diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..a4cccb44d8a6fc93bf6c5bbd8a67c0d08e2f9fd3 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +*.gif filter=lfs diff=lfs merge=lfs -text +*.txt filter=lfs diff=lfs merge=lfs -text +*.png filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..8287dfb7abe54576874d022409d3dced0598eb2e --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +__pycache__ +*.ipynb_checkpoints +.DS_Store +work_dir diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..024fd495d72062bebffa3d0c87e361602497d036 --- /dev/null +++ b/Makefile @@ -0,0 +1,10 @@ +default_target: main +.PHONY : default_target + +$(VERBOSE).SILENT: + +SHELL = /bin/sh + +sam_2D_example: + python3 medsam.py -c dataloader/yaml_data/buid_sam.yml -2D +.PHONY: sam_2D_example \ No newline at end of file diff --git a/README.md b/README.md index 154df8298fab5ecf322016157858e08cd1bccbe1..ffc8725334264042ca11b6faff715a5378d9d7cf 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,323 @@ ---- -license: apache-2.0 ---- +## LVM-Med: Learning Large-Scale Self-Supervised Vision Models for Medical Imaging via Second-order Graph Matching (Neurips 2023). +We release [LVM-Med](https://arxiv.org/abs/2306.11925)'s pre-trained models and demonstrate downstream tasks on 2D-3D segmentations, linear/fully finetuning image classification, and object detection. + +LVM-Med was trained with ~ 1.3 million medical images collected from 55 datasets using a second-order graph matching formulation unifying +current contrastive and instance-based SSL. + +

+ drawing +

+ +

+ drawing +

+ +## Table of contents +* [News](#news) +* [LVM-Med Pretrained Models](#lvm-med-pretrained-models) +* [Further Training LVM-Med on Large Dataset](#further-training-lvm-med-on-large-dataset) +* [Prerequisites](#prerequisites) +* [Preparing Dataset](#preparing-datasets) +* [Downstream Tasks](#downstream-tasks) + * [Segmentation](#segmentation) + * [Image Classification](#image-classification) + * [Object Detection](#object-detection) +* [Citation](#citation) +* [Related Work](#related-work) +* [License](#license) + +## News +- :mega: **14/12/2023**: The LVM-Med training algorithm is ready to be released! Please send us an email to request! +- :pencil2: If you want to have other architecture, send us a request by email or create an Issue. If the requests are enough, we will train them. +- :construction: Coming soon: [ConvNext](https://arxiv.org/abs/2201.03545) architecture trained by LVM-Med. +- :construction: Coming soon: ViT architectures for end-to-end segmentation with better performance reported in the paper. +- **31/07/2023**: Release ONNX support for LVM-Med ResNet50 and LVM-Med ViT as backbones in [`onnx_model`](/onnx_model/) folder. +- **26/07/2023**: We release ViT architectures (**ViT-B** and **ViT-H**) initialized from LVM-Med and further training on the LIVECell dataset with 1.6 million high-quality cells. See at this [table](#further-training-lvm-med-on-large-dataset). +- **25/06/2023**: We release two pre-trained models of LVM-Med: ResNet-50 and ViT-B. Providing scripts for downstream tasks. + +## LVM-Med Pretrained Models + + + + + + + + + + + + + + + + + + + + + + +
ArchParams (M) 2D Segmentation (Dice) 3D Segmentation (3D IoU) Weights
ResNet-5025.5M83.0579.02 backbone
ViT-B86.0M85.8080.90 backbone
+ +After downloading the pre-trained models, please place them in [`lvm_med_weights`](/lvm_med_weights/) folder to use. + +- For **Resnet-50**, we demo **end-to-end** segmentation/classification/object detection. +- For **ViT-B**, we demo **prompt-based** segmentation using bounding-boxes. + +**Important Note:** please check[```dataset.md```](https://github.com/duyhominhnguyen/LVM-Med/blob/main/lvm-med-training-data/README.md) to avoid potential leaking testing data when using our model. + +**Segment Anything Model-related Experiments** +- For all experiments using [SAM](https://github.com/facebookresearch/segment-anything) model, we use the base architecture of SAM which is `sam_vit_b`. You could browse the [`original repo`](https://github.com/facebookresearch/segment-anything) for this pre-trained weight and put it in [`./working_dir/sam_vit_b_01ec64.pth`](./working_dir/) folder to use yaml properly. + +## Further Training LVM-Med on Large Dataset +We release some further pre-trained weight on other large datasets as mentioned in the Table below. + + + + + + + + + + + + + + + + + + + + + + +
ArchParams (M)Dataset Name WeightsDescriptions
ViT-B86.0M LIVECell backbone Link
ViT-H632M LIVECell backbone Link
+ + + +## Prerequisites + +The code requires `python>=3.8`, as well as `pytorch>=1.7` and `torchvision>=0.8`. Please follow the instructions [here](https://pytorch.org/get-started/locally/) to install both PyTorch and TorchVision dependencies. Installing both PyTorch and TorchVision with CUDA support is strongly recommended. + +To set up our project, run the following command: + +```bash +git clone https://github.com/duyhominhnguyen/LVM-Med +cd LVM-Med +conda env create -f lvm_med.yml +conda activate lvm_med +``` + +To **fine-tune for [Segmentation](#segmentation) using ResNet-50**, we utilize U-Net from `segmentation-models-pytorch` package. To install this library, you can do the following ones: + +```bash +git clone https://github.com/qubvel/segmentation_models.pytorch.git +cd segmentation_models.pytorch +pip install -e +cd .. +mv segmentation_models_pytorch_example/encoders/__init__.py segmentation_models.pytorch/segmentation_models_pytorch/__init__.py +mv segmentation_models_pytorch_example/encoders/resnet.py segmentation_models.pytorch/segmentation_models_pytorch/resnet.py +``` + + + +## Preparing datasets +### For the Brain Tumor Dataset +You could download the `Brain` dataset via Kaggle's [`Brain Tumor Classification (MRI)`](https://www.kaggle.com/datasets/sartajbhuvaji/brain-tumor-classification-mri) and change the name into ```BRAIN```. + +### For VinDr +You can download the dataset from this link [`VinDr`](https://www.kaggle.com/datasets/awsaf49/vinbigdata-512-image-dataset) and put the folder ```vinbigdata``` into the folder ```object_detection```. To build the dataset, after downloading the dataset, you can run script ```convert_to_coco.py``` inside the folder object_detection. +```bash +python convert_to_coco.py # Note, please check links inside the code in lines 146 and 158 to build dataset correctly +``` +More information can be found in [```object_detection```](./object_detection). + +### Others +First you should download the respective dataset that you need to run to the [`dataset_demo`](/dataset_demo/) folder. To get as close results as your work as possible, you could prepare some of our specific dataset (which are not pre-distributed) the same way as we do: +```bash +python prepare_dataset.py -ds [dataset_name] +``` +such that: `dataset_name` is the name of dataset that you would like to prepare. After that, you should change paths to your loaded dataset on our pre-defined yaml file in [`dataloader/yaml_data`](/dataloader/yaml_data/). + +Currently support for `Kvasir`, `BUID`, `FGADR`, `MMWHS_MR_Heart` and `MMWHS_CT_Heart`. + +**Note:** You should change your dataset name into the correct format (i.e., Kvasir, BUID) as our current support dataset name. Or else it won't work as expected. + +## Downstream Tasks +### Segmentation +### 1. End-to-End Segmentation +**a) Training Phase:** + +**Fine-tune for downstream tasks using ResNet-50** + +```bash +python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_R50.yml +``` +Changing name of dataset in ``.yml`` configs in [```./dataloader/yaml_data/```](./dataloader/yaml_data/) for other experiments. + +**Note**: to apply segmentation models (2D or 3D) using ResNet-50, we suggest normalizing gradient for stable training phases by set: + +```bash +clip_value = 1 +torch.nn.utils.clip_grad_norm_(net.parameters(), clip_value) +``` +See examples in file [```/segmentation_2d/train_R50_seg_adam_optimizer_2d.py```](./segmentation_2d/train_R50_seg_adam_optimizer_2d.py) lines 129-130. + +[//]: # (#### Fine-tune for downstream tasks using SAM's VIT) + +[//]: # (```bash) + +[//]: # (python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_SAM_VIT.yml) + +[//]: # (```) +**b) Inference:** +#### ResNet-50 version + +```bash +python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_R50.yml -test +``` +For the end-to-end version using SAM's ViT, we will soon release a better version than the reported results in the paper. + +[//]: # (#### SAM's ViT version) + +[//]: # (```bash) + +[//]: # (python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_SAM_VIT.yml -test) + +[//]: # (```) + +### 2. Prompt-based Segmentation with ViT-B +**a. Prompt-based segmentation with fine-tuned decoder of SAM ([MedSAM](https://github.com/bowang-lab/MedSAM)).** + +We run the MedSAM baseline to compare performance by: +#### Train +```bash +python3 medsam.py -c dataloader/yaml_data/buid_sam.yml +``` +#### Inference +```bash +python3 medsam.py -c dataloader/yaml_data/buid_sam.yml -test +``` + +**b. Prompt-based segmentation as [MedSAM](https://github.com/bowang-lab/MedSAM) but using LVM-Med's Encoder.** + +The training script is similar as MedSAM case but specify the weight model by ```-lvm_encoder```. +#### Train +```bash +python3 medsam.py -c dataloader/yaml_data/buid_lvm_med_sam.yml -lvm_encoder ./lvm_med_weights/lvmmed_vit.pth +``` + +#### Test +```bash +python3 medsam.py -c dataloader/yaml_data/buid_lvm_med_sam.yml -lvm_encoder ./lvm_med_weights/lvmmed_vit.pth -test +``` + +You could also check our example notebook [`Prompt_Demo.ipynb`](/notebook/Prompt_Demo.ipynb) for results visualization using prompt-based MedSAM and prompt-based SAM with LVM-Med's encoder. The pre-trained weights for each SAM decoder model in the demo are [here](https://drive.google.com/drive/u/0/folders/1tjrkyEozE-98HAGEtyHboCT2YHBSW15U). Please download trained models of LVM-Med and MedSAM and put them into [`working_dir/checkpoints`](./working_dir/checkpoints/) folder for running the aforementioned notebook file. + +**c. Zero-shot prompt-based segmentation with Segment Anything Model (SAM) for downstream tasks** + +The SAM model without any finetuning using bounding box-based prompts can be done by: +```bash +python3 zero_shot_segmentation.py -c dataloader/yaml_data/buid_sam.yml +``` +### Image Classification +We provide training and testing scripts using LVM-Med's ResNet-50 models for Brain Tumor Classification and Diabetic Retinopathy Grading in FGADR dataset (Table 5 in main paper and Table 12 in Appendix). The version with ViT models will be updated soon. + +**a. Training with FGADR** +```bash +# Fully fine-tuned with 1 FCN +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_1_fcn.yml + +# Fully fine-tuned with multiple FCNs +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_fcns.yml + +# Freeze all and fine-tune 1-layer FCN only +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_frozen_1_fcn.yml + +# Freeze all and fine-tune multi-layer FCN only +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_frozen_fcns.yml +``` +To run for ```Brain dataset```, choose other config files ```brain_xyz.yml```in folder [`./dataloader/yaml_data/`](/dataloader/yaml_data). + +**b. Inference with FGADR** +```bash +# Fully fine-tuned with 1 FCN +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_1_fcn.yml -test + +# Fully fine-tuned with multiple FCNs +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_fcns.yml -test + +# Freeze all and fine-tune 1-layer FCN only +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_frozen_1_fcn.yml -test + +# Freeze all and fine-tune multi-layer FCN only +python train_classification.py -c ./dataloader/yaml_data/fgadr_endtoend_R50_frozen_fcns.yml -test +``` +### Object Detection +We demonstrate using LVM-Med ResNet-50 for object detection with Vin-Dr dataset. We use Faster-RCNN for the network backbone. +You can access [`object_detection`](./object_detection) folder for more details. + +## Citation +Please cite this paper if it helps your research: +```bibtex +@article{nguyen2023lvm, + title={LVM-Med: Learning Large-Scale Self-Supervised Vision Models for Medical Imaging via Second-order Graph Matching}, + author={Nguyen, Duy MH and Nguyen, Hoang and Diep, Nghiem T and Pham, Tan N and Cao, Tri and Nguyen, Binh T and Swoboda, Paul and Ho, Nhat and Albarqouni, Shadi and Xie, Pengtao and others}, + journal={arXiv preprint arXiv:2306.11925}, + year={2023} +} +``` + +## Related Work +We use and modify codes from [SAM](https://github.com/facebookresearch/segment-anything) and [MedSAM](https://github.com/bowang-lab/MedSAM) for prompt-based segmentation settings. A part of LVM-Med algorithm adopt data transformations from [Vicregl](https://github.com/facebookresearch/VICRegL), [Deepcluster-v2](https://github.com/facebookresearch/swav?utm_source=catalyzex.com). We also utilize [vissl](https://github.com/facebookresearch/vissl) framework to train 2D self-supervised methods in our collected data. Thank the authors for their great work! + +## License +Licensed under the [CC BY-NC-ND 2.0](https://creativecommons.org/licenses/by-nc-nd/2.0/) (**Attribution-NonCommercial-NoDerivs 2.0 Generic**). The code is released for academic research use only. For commercial use, please contact [Ho_Minh_Duy.Nguyen@dfki.de](Ho_Minh_Duy.Nguyen@dfki.de) + +[//]: # (### f. LVM-Med ) + +[//]: # (#### Training Phase) + +[//]: # (#### Fine-tune for downstream tasks using ResNet-50) + +[//]: # () +[//]: # (```bash) + +[//]: # (python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_R50.yml) + +[//]: # (```) + +[//]: # (#### Fine-tune for downstream tasks using SAM's VIT) + +[//]: # (```bash) + +[//]: # (python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_SAM_VIT.yml) + +[//]: # (```) + +[//]: # (#### Inference) + +[//]: # (#### Downstream tasks using ResNet-50) + +[//]: # () +[//]: # (```bash) + +[//]: # (python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_R50.yml -test) + +[//]: # (```) + +[//]: # (#### Downstream tasks using SAM's VIT) + +[//]: # (```bash) + +[//]: # (python train_segmentation.py -c ./dataloader/yaml_data/buid_endtoend_SAM_VIT.yml -test) + +[//]: # (```) diff --git a/assets/__init__.py b/assets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/assets/body_lvm_med.jpg b/assets/body_lvm_med.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8d6d56fc3d6cd7b536d4b38ad226f221b0be72be Binary files /dev/null and b/assets/body_lvm_med.jpg differ diff --git a/assets/lvm_med_compare.jpg b/assets/lvm_med_compare.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1df1260dc08a5ecd0ec22dc939293f173e13404a Binary files /dev/null and b/assets/lvm_med_compare.jpg differ diff --git a/assets/lvm_med_teaser.gif b/assets/lvm_med_teaser.gif new file mode 100644 index 0000000000000000000000000000000000000000..42153d569dd65258210d61cb8e5399cbb76aaa66 --- /dev/null +++ b/assets/lvm_med_teaser.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc91c309ae76648d8c2c8725ae1e95d9d9ef90bcf3cc754e3f24fc1c65c4057d +size 12460151 diff --git a/classification_R50/__init__.py b/classification_R50/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/classification_R50/train_R50_classification.py b/classification_R50/train_R50_classification.py new file mode 100644 index 0000000000000000000000000000000000000000..6bfb297f8ea18b155f7c46103e934bc271727e7a --- /dev/null +++ b/classification_R50/train_R50_classification.py @@ -0,0 +1,349 @@ +""" +Linear and non-linear image classification tasks with and w/o frozen image encoders +""" +import torch +import torch.nn as nn +import torchvision +from torchvision import transforms +from torch.utils.data import DataLoader, Dataset +from PIL import Image +import numpy as np +from sklearn.model_selection import train_test_split +import torchvision.datasets as datasets +import argparse +from tqdm import tqdm +import os +import random +import torch.nn.functional as F +seed = 42 +torch.manual_seed(seed) +np.random.seed(seed) +random.seed(seed) + + + +weight_collections = { + "resnet50": { + "lvm-med-resnet": "./lvm_med_weights/lvmmed_resnet.torch", + } +} + +def eval(net, dataloader, device, criterion, num_samples): + # Evaluate the model on the validation set + val_loss = 0.0 + val_acc = 0.0 + + net.eval() + with tqdm(total=len(dataloader), desc='Validation round', unit=' img') as pbar: + for inputs, labels in dataloader: + inputs = inputs.to(device) + labels = labels.to(device) + with torch.no_grad(): + outputs = net(inputs) + loss = criterion(outputs, labels) + val_loss += loss.item() * inputs.size(0) + _, preds = torch.max(outputs, 1) + val_acc += torch.sum(preds == labels.data) + pbar.update(inputs.shape[0]) + val_loss /= num_samples + val_acc /= num_samples + net.train() + return val_loss, val_acc + +def TrainingTesting(cfg, numtry, pretrained_weight_name, data_path, num_classes, data_tranform, device, solver, name_weights, + frozen_encoder, architecture_type, number_epoch=50, learning_rate=0.001, batch_size=32, test_mode='best_valid', + valid_rate=0.2): + # Load the datasets + train_dir = data_path + "/Training" + test_dir = data_path + "/Testing" + if frozen_encoder: + checkpoint_dir = cfg.base.best_valid_model_checkpoint + cfg.base.dataset_name + "_" + architecture_type + "_" + name_weights + "_frozen/" + else: + checkpoint_dir = cfg.base.best_valid_model_checkpoint + cfg.base.dataset_name + "_" + architecture_type + "_" + name_weights + "_non_frozen/" + + CHECK_FOLDER = os.path.isdir(checkpoint_dir) + + if not CHECK_FOLDER: + os.makedirs(checkpoint_dir) + print("created folder: ", checkpoint_dir) + + train_dataset = datasets.ImageFolder(root=train_dir, transform=data_tranform) + test_dataset = datasets.ImageFolder(root=test_dir, transform=data_tranform) + + print ("valid size is {}".format(valid_rate)) + # Split the training dataset into training and validation sets + train_indices, val_indices = train_test_split(list(range(len(train_dataset))), test_size=valid_rate, random_state=42) + train_sampler = torch.utils.data.sampler.SubsetRandomSampler(train_indices) + val_sampler = torch.utils.data.sampler.SubsetRandomSampler(val_indices) + + loader_args = dict(num_workers=10, pin_memory=True) + train_loader = DataLoader(train_dataset, batch_size=batch_size, sampler=train_sampler, **loader_args) + val_loader = DataLoader(train_dataset, batch_size=batch_size, sampler=val_sampler, **loader_args) + test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=True, **loader_args) + + n_train = len(train_indices) + + # Define the ResNet50 model + model = torchvision.models.resnet50(pretrained=True) + + # Freeze the layers of the ResNet50 model + if frozen_encoder: + print ("Frozen encoder") + for param in model.parameters(): + param.requires_grad = False + + num_ftrs = model.fc.in_features + + # define linear or non-linear architecture + if architecture_type == '1-fcn': + print ("Using single fully-connected layer") + model.fc = nn.Linear(num_ftrs, num_classes) + elif architecture_type == "fcns": + print("Using several fully-connected layers") + if cfg.base.dataset_name == 'brain': + model.fc = nn.Sequential( + nn.Linear(num_ftrs, 512), + nn.ReLU(), + nn.Linear(512, 256), + nn.ReLU(), + nn.Linear(256, num_classes)) + elif cfg.base.dataset_name == 'fgadr': + model.fc = nn.Sequential( + nn.Linear(num_ftrs, 512), + nn.ReLU(), + nn.Linear(512, 128), + nn.ReLU(), + nn.Linear(128, num_classes)) + else: + print(">>> Not implemented for selected datasets") + exit() + else: + print (">>> No available option for achitecture. Please check 'help' with --linear option") + exit() + + pretrained_weight = torch.load(weight_collections["resnet50"][pretrained_weight_name], map_location=device) + model.load_state_dict(pretrained_weight, strict=False) + print("Loaded pretrained-weight of ", pretrained_weight_name) + + model = model.to(device) + + criterion = nn.CrossEntropyLoss() + + if solver == "sgd": + optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, momentum=0.9) + elif solver == "adam": + optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate, betas=(0.9, 0.999), eps=1e-08, + weight_decay=0.) + if cfg.base.dataset_name == 'fgadr': + optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate, betas=(0.9, 0.999), eps=1e-08, + weight_decay=0.005) + else: + print ("Non-available solver") + exit() + + ## ------------ Train the model ------------ + print(" ------------ Training ------------ ") + num_epochs = number_epoch + best_acc_val = 0. + + for epoch in range(num_epochs): + print(f"Epoch {epoch + 1}:") + train_loss = 0.0 + train_acc = 0.0 + + # Train the model on the training set + model.train() + with tqdm(total=n_train, desc=f'Epoch {epoch + 1}/{num_epochs}', unit='img') as pbar: + for inputs, labels in train_loader: + inputs = inputs.to(device) + labels = labels.to(device) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, labels) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + _, preds = torch.max(outputs, 1) + train_acc += torch.sum(preds == labels.data) + + # updating progressing bar + pbar.update(inputs.shape[0]) + pbar.set_postfix(**{'loss (batch)': loss.item()}) + + # Print the results for this epoch + train_loss /= len(train_indices) + train_acc /= len(train_indices) + num_samples = len(val_indices) + num_samples_test = len(test_dataset) + + print(" \n >>> Evaluation ") + val_loss, val_acc = eval(model, val_loader, device, criterion, num_samples) + + if val_acc >= best_acc_val: + checkpoint = { + 'epoch': epoch + 1, + 'state_dict': model.state_dict(), + 'optimizer': optimizer.state_dict() + } + torch.save(checkpoint, checkpoint_dir + + name_weights + "_" + pretrained_weight_name + "_" + str(numtry) + ".pth") + print("Saved checkpoint at epoch ", epoch + 1) + best_acc_val = val_acc + + print(f"Training Loss: {train_loss:.4f}\t Training Accuracy: {train_acc:.4f}") + print(f"Val Loss: {val_loss:.4f}\tVal Accuracy: {val_acc:.5f}") + + # print model at last epochs + checkpoint = { + 'epoch': epoch + 1, + 'state_dict': model.state_dict(), + 'optimizer': optimizer.state_dict() + } + torch.save(checkpoint, checkpoint_dir + + name_weights + "_" + pretrained_weight_name + "_last_" + str(numtry) + ".pth") + print("Saved checkpoint at last epoch ", epoch + 1) + + ## ------------ Test the model ------------ + print("------ Testing ------") + if test_mode == "best_valid": + print("Loading best models at {}".format(checkpoint_dir)) + ckp = torch.load(checkpoint_dir + + name_weights + "_" + pretrained_weight_name + "_" + str(numtry) + ".pth") + else: + print("Loading models at last epochs {}".format(checkpoint_dir)) + ckp = torch.load(checkpoint_dir + + name_weights + "_" + pretrained_weight_name + "_last_" + str(numtry) + ".pth") + model.load_state_dict(ckp['state_dict']) + num_samples_test = len(test_dataset) + test_loss, test_acc = eval(model, test_loader, device, criterion, num_samples_test) + print(f"Test Loss: {test_loss:.4f}\tTest Accuracy: {test_acc:.5f}") + return test_acc + +def inference(numtry, device, cfg, data_path, data_tranform, name_weights, pretrained_weight_name, + frozen_encoder, architecture_type, num_classes): + if frozen_encoder: + checkpoint_dir = cfg.base.best_valid_model_checkpoint + cfg.base.dataset_name + "_" + architecture_type + "_" + name_weights + "_frozen/" + else: + checkpoint_dir = cfg.base.best_valid_model_checkpoint + cfg.base.dataset_name + "_" + architecture_type + "_" + name_weights + "_non_frozen/" + loader_args = dict(num_workers=10, pin_memory=True) + test_dir = data_path + "/Testing" + test_dataset = datasets.ImageFolder(root=test_dir, transform=data_tranform) + test_loader = DataLoader(test_dataset, batch_size=32, shuffle=False, **loader_args) + + model = torchvision.models.resnet50(pretrained=True) + # Freeze the layers of the ResNet50 model + if frozen_encoder: + print ("Frozen encoder") + for param in model.parameters(): + param.requires_grad = False + + num_ftrs = model.fc.in_features + if architecture_type == '1-fcn': + print ("Using single fully-connected layer") + model.fc = nn.Linear(num_ftrs, num_classes) + elif architecture_type == "fcns": + print("Using several fully-connected layers") + if cfg.base.dataset_name == 'brain': + model.fc = nn.Sequential( + nn.Linear(num_ftrs, 512), + nn.ReLU(), + nn.Linear(512, 256), + nn.ReLU(), + nn.Linear(256, num_classes)) + elif cfg.base.dataset_name == 'fgadr': + model.fc = nn.Sequential( + nn.Linear(num_ftrs, 512), + nn.ReLU(), + nn.Linear(512, 128), + nn.ReLU(), + nn.Linear(128, num_classes)) + else: + print(">>> Not implemented for selected datasets") + exit() + else: + print (">>> No available option for achitecture. Please check 'help' with --linear option") + exit() + + model = model.to(device) + print("Loading best models at {}".format(checkpoint_dir)) + ckp = torch.load(checkpoint_dir + + name_weights + "_" + pretrained_weight_name + "_" + str(numtry) + ".pth") + model.load_state_dict(ckp['state_dict']) + num_samples_test = len(test_dataset) + criterion = nn.CrossEntropyLoss() + test_loss, test_acc = eval(model, test_loader, device, criterion, num_samples_test) + print(f"Test Loss: {test_loss:.4f}\tTest Accuracy: {test_acc:.5f}") + return test_acc + +def str2bool(v): + if isinstance(v, bool): + return v + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + elif v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + else: + raise argparse.ArgumentTypeError('Boolean value expected.') + + +def train_R50(yml_args, cfg): + + if cfg.base.dataset_name == 'brain': + data_path = cfg.dataloader.data_path + num_classes = 4 + data_transforms = transforms.Compose([ + transforms.Resize((224, 224)), + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + elif cfg.base.dataset_name == 'fgadr': + data_path = cfg.dataloader.data_path + num_classes = 5 + data_transforms = transforms.Compose([ + transforms.RandomCrop(size=(480, 480)), + transforms.RandomHorizontalFlip(p=0.5), + transforms.RandomAutocontrast(p=0.2), + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + else: + print (">>> No available datasets") + exit() + + print ("Using dataset {}".format(cfg.base.dataset_name)) + list_acc = [] + + name_weight = cfg.base.original_checkpoint + "_output" + cuda_string = 'cuda:' + cfg.base.gpu_id + devices = torch.device(cuda_string if torch.cuda.is_available() else 'cpu') + + if not yml_args.use_test_mode: + # Training model with three trial times + for numtry in range(3): + print ("*****"*3 + "\n" + "Trial", numtry) + test_acc = TrainingTesting(cfg = cfg, numtry=numtry, pretrained_weight_name=cfg.base.original_checkpoint, data_path = data_path, + num_classes = num_classes, + data_tranform = data_transforms, + device=devices, + solver=cfg.train.solver, + name_weights=name_weight, frozen_encoder=cfg.base.frozen_eval, + number_epoch=cfg.train.num_epochs, architecture_type=cfg.base.model, + learning_rate=cfg.train.learning_rate, batch_size=cfg.train.train_batch_size, + test_mode=cfg.base.test_mode, + valid_rate = cfg.base.valid_rate) + list_acc.append(test_acc.to('cpu')) + print("==============================================================================") + print ("*****"*3 + "\n") + print("Mean Accuracy: ", np.mean(list_acc)) + print("Standard Deviation: ", np.std(list_acc)) + else: + # Evaluate model with three weights + for numtry in range(3): + print ("*****"*3 + "\n" + "weight", numtry+1) + test_acc = inference(numtry = numtry, device = devices, cfg = cfg, data_path = data_path, data_tranform=data_transforms, + name_weights=name_weight, pretrained_weight_name=cfg.base.original_checkpoint, + frozen_encoder=cfg.base.frozen_eval, architecture_type=cfg.base.model, num_classes=num_classes) + list_acc.append(test_acc.to('cpu')) + print ("*****"*3 + "\n") + print("Mean Accuracy: ", np.mean(list_acc)) + print("Standard Deviation: ", np.std(list_acc)) diff --git a/dataloader/__init__.py b/dataloader/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..05fba0cd880af087066ab4b1c5dc0d7dde0635db --- /dev/null +++ b/dataloader/__init__.py @@ -0,0 +1,3 @@ +from .sam_transforms import ( + ResizeLongestSide +) diff --git a/dataloader/dataloader.py b/dataloader/dataloader.py new file mode 100644 index 0000000000000000000000000000000000000000..837750272e44589b6d39c8468ebba8dc62392f26 --- /dev/null +++ b/dataloader/dataloader.py @@ -0,0 +1,55 @@ +from torch.utils.data import ( + DataLoader +) +from dataloader.dataset import ( + SegmentationDataset, + AugmentedSegmentationDataset +) + +def sam_dataloader(cfg): + loader_args = dict(num_workers=cfg.base.num_workers, + pin_memory=cfg.base.pin_memory) + """ + Since the output of SAM's mask decoder is 256 by default (without using a postprocessing step), + hence, we chose to resize the mask ground truth into 256x256 for a better output (prediction without postprocessing). + """ + if cfg.base.dataset_name in ["buidnewprocess", "kvasir", "isiconlytrain", "drive"]: + train_dataset = SegmentationDataset(cfg.base.dataset_name, + cfg.dataloader.train_dir_img, + cfg.dataloader.train_dir_mask, + scale=(1024, 256)) + elif cfg.base.dataset_name in ["bts", "las_mri", "las_ct"]: + train_dataset = AugmentedSegmentationDataset(cfg.base.dataset_name, + cfg.dataloader.train_dir_img, + cfg.dataloader.train_dir_mask, + scale=(1024, 256)) + else: + raise NameError(f"[Error] Dataset {cfg.base.dataset_name} is either in wrong format or not yet implemented!") + + val_dataset = SegmentationDataset(cfg.base.dataset_name, + cfg.dataloader.valid_dir_img, + cfg.dataloader.valid_dir_mask, + scale=(1024, 256)) + test_dataset = SegmentationDataset(cfg.base.dataset_name, + cfg.dataloader.test_dir_img, + cfg.dataloader.test_dir_mask, + scale=(1024, 256)) + train_loader = DataLoader(train_dataset, + shuffle=True, + batch_size=cfg.train.train_batch_size, + multiprocessing_context="fork", + **loader_args) + val_loader = DataLoader(val_dataset, + shuffle=False, + drop_last=True, + batch_size=cfg.train.valid_batch_size, + multiprocessing_context="fork", + **loader_args) + test_loader = DataLoader(test_dataset, + shuffle=False, + batch_size=cfg.train.test_batch_size, + drop_last=True, + multiprocessing_context="fork", + **loader_args) + + return train_loader, val_loader, test_loader, val_dataset, test_dataset \ No newline at end of file diff --git a/dataloader/dataset.py b/dataloader/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..961ed7fe424d3545b468fd90067bec8e2325c5f1 --- /dev/null +++ b/dataloader/dataset.py @@ -0,0 +1,434 @@ +import logging +import os +import numpy as np +import torch +import cv2 +from skimage.transform import resize +from torch.utils.data import Dataset + +def get_bbox_from_mask(mask): + '''Returns a bounding box from a mask''' + bbox = [] + + if len(mask.shape) == 2: #(H, W) + if np.all(mask == 0): + y_indices, x_indices = np.random.normal(0, 1024, 2) + x_min, x_max = np.min(x_indices), np.max(x_indices) + y_min, y_max = np.min(y_indices), np.max(y_indices) + x_min = max(0, x_min - np.random.randint(0, 20)) + x_max = min(1024, x_max + np.random.randint(0, 20)) + y_min = max(0, y_min - np.random.randint(0, 20)) + y_max = min(1024, y_max + np.random.randint(0, 20)) + else: + y_indices, x_indices = np.where(mask > 0) + x_min, x_max = np.min(x_indices), np.max(x_indices) + y_min, y_max = np.min(y_indices), np.max(y_indices) + H, W = mask.shape + x_min = max(0, x_min - np.random.randint(0, 20)) + x_max = min(W, x_max + np.random.randint(0, 20)) + y_min = max(0, y_min - np.random.randint(0, 20)) + y_max = min(H, y_max + np.random.randint(0, 20)) + return np.array([x_min, y_min, x_max, y_max]) + + for i in range(mask.shape[0]): + if np.all(mask[i] == 0): + y_indices, x_indices = np.random.normal(0, 1024, 2) + x_min, x_max = np.min(x_indices), np.max(x_indices) + y_min, y_max = np.min(y_indices), np.max(y_indices) + x_min = max(0, x_min - np.random.randint(0, 20)) + x_max = min(1024, x_max + np.random.randint(0, 20)) + y_min = max(0, y_min - np.random.randint(0, 20)) + y_max = min(1024, y_max + np.random.randint(0, 20)) + else: + y_indices, x_indices = np.where(mask[i] > 0) + x_min, x_max = np.min(x_indices), np.max(x_indices) + y_min, y_max = np.min(y_indices), np.max(y_indices) + H, W = mask[i].shape + x_min = max(0, x_min - np.random.randint(0, 20)) + x_max = min(W, x_max + np.random.randint(0, 20)) + y_min = max(0, y_min - np.random.randint(0, 20)) + y_max = min(H, y_max + np.random.randint(0, 20)) + bbox.append([x_min, y_min, x_max, y_max]) + return np.array(bbox) + +class SegmentationDataset_train(Dataset): + def __init__(self, nonlabel_path: str, havelabel_path: str, dataset: str, scale = (224, 224)): + self.nonlabel_path = nonlabel_path + self.havelabel_path = havelabel_path + self.name_dataset = dataset + self.scale = scale + + with open(self.nonlabel_path, 'r') as nlf: + lines = nlf.readlines() + non_label_lines = [line.strip().split(' ')[:2] for line in lines] + + with open(self.havelabel_path, 'r') as hlf: + lines = hlf.readlines() + have_label_lines = [line.strip().split(' ')[:2] for line in lines] + + if len(non_label_lines) == 0: + self.ids = np.array(have_label_lines, dtype= object) + else: + choose_non_lable_lines = np.random.choice(len(non_label_lines), size = len(have_label_lines)) + non_label_lines = np.array(non_label_lines, dtype= object) + have_label_lines = np.array(have_label_lines, dtype= object) + self.ids = np.concatenate([non_label_lines[choose_non_lable_lines], have_label_lines], axis= 0) + # self.ids = os.listdir(images_dir) #[splitext(file)[0] for file in listdir(images_dir) if not file.startswith('.') and image_type in file] + # print(len(self.ids)) + # if datasetname == "las_mri": + # self.ids = [f for f in self.ids if image_type in f] + if len(self.ids) == 0: + raise RuntimeError(f'No input file found in {self.images_dir}, make sure you put your images there') + logging.info(f'Creating dataset with {len(self.ids)} examples') + self.cache = {} + + def __len__(self): + return len(self.ids) + + @classmethod + def preprocess(self, img, scale, is_mask): + img = resize(img, + (scale[0], scale[0]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + img = np.asarray(img) + if not is_mask: + img = ((img - img.min()) * (1/(0.01 + img.max() - img.min()) * 255)).astype('uint8') + if is_mask: + img = resize(img, + (scale[1], scale[1]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + return img + + @classmethod + def load(self, filename, is_mask=False): + if is_mask: + return cv2.imread(filename, 0) + else: + return cv2.imread(filename) + + def __getitem__(self, idx): + if idx in self.cache: + return self.cache[idx] + + img_file = self.ids[idx][0] + mask_file = self.ids[idx][1] + # print(img_file) + #start = time.time() + mask = self.load(mask_file, is_mask=True) + img = self.load(img_file, is_mask=False) + + assert mask is not None, mask_file + assert img is not None, img_file + + img = self.preprocess(img, self.scale, is_mask=False) + mask = self.preprocess(mask, self.scale, is_mask=True) + + if self.name_dataset in ["kvasir", "buidnewprocess"]: + mask[mask < 50] = 0 + mask[mask > 200] = 1 + elif self.name_dataset == "isiconlytrain": + mask[mask > 1] = 1 + elif self.name_dataset.startswith("las"): + mask[mask == 30] = 1 + mask[mask == 60] = 2 # main predict + mask[mask == 90] = 3 + mask[mask == 120] = 4 + mask[mask == 150] = 5 + mask[mask == 180] = 6 + mask[mask == 210] = 7 + mask[mask > 7] = 0 + else: + mask[mask>0] = 1 + + bboxes = get_bbox_from_mask(mask) + + data = { + 'image': torch.as_tensor(img.copy()).permute(2, 0, 1).float().contiguous(), + 'mask': torch.tensor(mask[None, :, :]).long(), + 'mask_ete': torch.as_tensor(mask.copy().astype(int)).long().contiguous(), + 'bboxes' : torch.tensor(bboxes).float(), + 'mask_file' : mask_file, + 'img_file' : img_file + } + self.cache[idx] = data + return data + + def get_3d_iter(self): + from itertools import groupby + keyf = lambda idx : self.ids[idx].split("_frame_")[0] + sorted_ids = sorted(range(len(self.ids)), key=lambda i : self.ids[i]) + for _, items in groupby(sorted_ids, key=keyf): + images = [] + masks = [] + masks_ete = [] + bboxes = [] + for idx in items: + d = self.__getitem__(idx) + images.append(d['image']) + masks.append(d['mask']) + masks_ete.append(d['mask_ete']) + bboxes.append(d['bboxes']) + # store third dimension in image channels + images = torch.stack(images, dim=0) + masks = torch.stack(masks, dim=0) + masks_ete = torch.stack(masks_ete, dim=0) + bboxes = torch.stack(bboxes, dim=0) + _3d_data = {'image': images, 'mask': masks, 'mask_ete': masks_ete, 'bboxes': bboxes} + yield _3d_data + + +class SegmentationDataset(Dataset): + def __init__(self, name_dataset: str, images_dir: str, masks_dir: str, scale = (1024, 256)): + self.images_dir = images_dir + self.masks_dir = masks_dir + self.scale = scale + self.name_dataset = name_dataset + self.ids = os.listdir(images_dir) + if len(self.ids) == 0: + raise RuntimeError(f'No input file found in {self.images_dir}, make sure you put your images there') + logging.info(f'Creating dataset with {len(self.ids)} examples') + self.cache = {} + + def __len__(self): + return len(self.ids) + + @classmethod + def preprocess(self, img, scale, is_mask): + img = resize(img, + (scale[0], scale[0]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + img = np.asarray(img) + if not is_mask: + img = ((img - img.min()) * (1/(0.01 + img.max() - img.min()) * 255)).astype('uint8') + if is_mask: + img = resize(img, + (scale[1], scale[1]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + return img + + @classmethod + def load(self, filename, is_mask=False): + if is_mask: + return cv2.imread(filename, 0) + else: + return cv2.imread(filename) + + def __getitem__(self, idx): + if idx in self.cache: + return self.cache[idx] + + name = self.ids[idx] + + if self.name_dataset == "isiconlytrain": + mask_file = os.path.join(self.masks_dir, name).split(".jpg")[0] + mask_file = mask_file + "_segmentation.png" + elif self.name_dataset == "buidnewprocess": + mask_file = os.path.join(self.masks_dir, name) + elif self.name_dataset == "kvasir": + mask_file = os.path.join(self.masks_dir, name) + elif self.name_dataset == "drive": + mask_file = os.path.join(self.masks_dir, name).replace("training", "manual1") + elif self.name_dataset == "bts": + mask_file = os.path.join(self.masks_dir, name).replace(self.image_type, "_seg_") + elif self.name_dataset in ["las_mri", "las_ct"]: + mask_file = os.path.join(self.masks_dir, name).replace("image", "label") + else: + mask_file = os.path.join(self.masks_dir, name) + + img_file = os.path.join(self.images_dir, name) + + mask = self.load(mask_file, is_mask=True) + img = self.load(img_file, is_mask=False) + + assert mask is not None, mask_file + assert img is not None, img_file + + img = self.preprocess(img, self.scale, is_mask=False) + mask = self.preprocess(mask, self.scale, is_mask=True) + + if self.name_dataset in ["kvasir", "buidnewprocess"]: + mask[mask < 50] = 0 + mask[mask > 200] = 1 + elif self.name_dataset == "isiconlytrain": + mask[mask > 1] = 1 + elif self.name_dataset.startswith("las"): + mask[mask == 30] = 1 + mask[mask == 60] = 2 # main predict + mask[mask == 90] = 3 + mask[mask == 120] = 4 + mask[mask == 150] = 5 + mask[mask == 180] = 6 + mask[mask == 210] = 7 + mask[mask > 7] = 0 + else: + mask[mask>0] = 1 + + bboxes = get_bbox_from_mask(mask) + + data = { + 'image': torch.as_tensor(img.copy()).permute(2, 0, 1).float().contiguous(), + 'mask': torch.tensor(mask[None, :, :]).long(), + 'mask_ete': torch.as_tensor(mask.copy().astype(int)).long().contiguous(), + 'bboxes' : torch.tensor(bboxes).float(), + 'mask_file' : mask_file, + 'img_file' : img_file + } + self.cache[idx] = data + return data + + def get_3d_iter(self): + from itertools import groupby + keyf = lambda idx : self.ids[idx].split("_frame_")[0] + sorted_ids = sorted(range(len(self.ids)), key=lambda i : self.ids[i]) + for _, items in groupby(sorted_ids, key=keyf): + images = [] + masks = [] + masks_ete = [] + bboxes = [] + for idx in items: + d = self.__getitem__(idx) + images.append(d['image']) + masks.append(d['mask']) + masks_ete.append(d['mask_ete']) + bboxes.append(d['bboxes']) + # store third dimension in image channels + images = torch.stack(images, dim=0) + masks = torch.stack(masks, dim=0) + masks_ete = torch.stack(masks_ete, dim=0) + bboxes = torch.stack(bboxes, dim=0) + _3d_data = {'image': images, 'mask': masks, 'mask_ete': masks_ete, 'bboxes': bboxes} + yield _3d_data + +class AugmentedSegmentationDataset(Dataset): + def __init__(self, name_dataset: str, images_dir: str, masks_dir: str, scale = (1024, 256), transform=True): + + self.images_dir = images_dir + self.masks_dir = masks_dir + self.scale = scale + self.transform = transform + self.name_dataset = name_dataset + self.ids = os.listdir(images_dir) + if len(self.ids) == 0: + raise RuntimeError(f'No input file found in {self.images_dir}, make sure you put your images there') + logging.info(f'Creating dataset with {len(self.ids)} examples') + self.cache = {} + + def __len__(self): + return len(self.ids) + + @classmethod + def preprocess(self, img, scale, is_mask, transform): + img = resize(img, + (scale[0], scale[0]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + img = np.asarray(img) + if (not is_mask) and transform: + img = ((img - img.min()) * (1/(0.01 + img.max() - img.min()) * 255)).astype('uint8') + if is_mask: + img = resize(img, + (scale[1], scale[1]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + return img + + @classmethod + def preprocess_non_expand(self, img, scale, is_mask, transform): + img = resize(img, + (scale[0], scale[0]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + img = np.asarray(img) + if (not is_mask) and transform: + img = ((img - img.min()) * (1/(0.01 + img.max() - img.min()) * 255)).astype('uint8') + if is_mask: + img = resize(img, + (scale[1], scale[1]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + return img + + @classmethod + def load(self, filename, is_mask=False): + if is_mask: + return cv2.imread(filename, 0) + else: + return cv2.imread(filename) + + def __getitem__(self, idx): + if idx in self.cache: + return self.cache[idx] + + name = self.ids[idx] + + if self.name_dataset == "bts": + mask_file = os.path.join(self.masks_dir, name).replace(self.image_type, "_seg_") + elif self.name_dataset in ["las_mri", "las_ct"]: + mask_file = os.path.join(self.masks_dir, name).replace("image", "label") + + img_file = os.path.join(self.images_dir, name) + + mask = self.load(mask_file, is_mask=True) + img = self.load(img_file, is_mask=False) + + assert mask is not None, mask_file + assert img is not None, img_file + + img = self.preprocess_non_expand(img, self.scale, False, self.transform) + mask = self.preprocess(mask, self.scale, True, self.transform) + + if self.name_dataset.startswith("las"): + mask[mask == 30] = 1 + mask[mask == 60] = 2 # main predict + mask[mask == 90] = 3 + mask[mask == 120] = 4 + mask[mask == 150] = 5 + mask[mask == 180] = 6 + mask[mask == 210] = 7 + mask[mask > 7] = 0 + else: + mask[mask>0]=1 + + bboxes = get_bbox_from_mask(mask) + + data = { + 'image': torch.as_tensor(img.copy()).permute(2, 0, 1).float().contiguous(), + 'mask': torch.tensor(mask[None, :, :]).long(), + 'bboxes' : torch.tensor(bboxes).float(), + 'mask_file' : mask_file, + 'img_file' : img_file + } + self.cache[idx] = data + return data + + def get_3d_iter(self): + from itertools import groupby + keyf = lambda idx : self.ids[idx].split("_frame_")[0] + sorted_ids = sorted(range(len(self.ids)), key=lambda i : self.ids[i]) + for _, items in groupby(sorted_ids, key=keyf): + images = [] + masks = [] + bboxes = [] + for idx in items: + d = self.__getitem__(idx) + images.append(d['image']) + masks.append(d['mask']) + bboxes.append(d['bboxes']) + # store third dimension in image channels + images = torch.stack(images, dim=0) + masks = torch.stack(masks, dim=0) + bboxes = torch.stack(bboxes, dim=0) + _3d_data = {'image': images, 'mask': masks, 'bboxes': bboxes} + yield _3d_data \ No newline at end of file diff --git a/dataloader/dataset_ete.py b/dataloader/dataset_ete.py new file mode 100644 index 0000000000000000000000000000000000000000..3d4c8c83efc7e9b3c33b17ef334da5a98879801b --- /dev/null +++ b/dataloader/dataset_ete.py @@ -0,0 +1,267 @@ +import logging +import os +import numpy as np +import torch +import cv2 +from skimage.transform import resize +from torch.utils.data import Dataset + + + +class SegmentationDataset_train(Dataset): + def __init__(self, nonlabel_path: str, havelabel_path: str, dataset: str, scale = (224, 224)): + self.nonlabel_path = nonlabel_path + self.havelabel_path = havelabel_path + self.name_dataset = dataset + self.scale = scale + + with open(self.nonlabel_path, 'r') as nlf: + lines = nlf.readlines() + non_label_lines = [line.strip().split(' ')[:2] for line in lines] + + with open(self.havelabel_path, 'r') as hlf: + lines = hlf.readlines() + have_label_lines = [line.strip().split(' ')[:2] for line in lines] + + if len(non_label_lines) == 0: + self.ids = np.array(have_label_lines, dtype= object) + else: + choose_non_lable_lines = np.random.choice(len(non_label_lines), size = len(have_label_lines)) + non_label_lines = np.array(non_label_lines, dtype= object) + have_label_lines = np.array(have_label_lines, dtype= object) + self.ids = np.concatenate([non_label_lines[choose_non_lable_lines], have_label_lines], axis= 0) + # self.ids = os.listdir(images_dir) #[splitext(file)[0] for file in listdir(images_dir) if not file.startswith('.') and image_type in file] + # print(len(self.ids)) + # if datasetname == "las_mri": + # self.ids = [f for f in self.ids if image_type in f] + if len(self.ids) == 0: + raise RuntimeError(f'No input file found in {self.images_dir}, make sure you put your images there') + logging.info(f'Creating dataset with {len(self.ids)} examples') + self.cache = {} + + def __len__(self): + return len(self.ids) + + @classmethod + def preprocess(self, img, scale, is_mask): + img = resize(img, + (scale[0], scale[0]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + img = np.asarray(img) + if not is_mask: + img = ((img - img.min()) * (1/(0.01 + img.max() - img.min()) * 255)).astype('uint8') + if len(img.shape) != 3: + img = np.expand_dims(img, axis=2) #(1, 224, 224) + if is_mask: + img = resize(img, + (scale[1], scale[1]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + return img + + @classmethod + def load(self, filename, name_dataset, is_mask=False): + if name_dataset.startswith("las"): + if is_mask: + return cv2.imread(filename, cv2.IMREAD_UNCHANGED) + else: + img = cv2.imread(filename, 0) + return img + else: + if is_mask: + return cv2.imread(filename, 0) + else: + return cv2.imread(filename) + + def __getitem__(self, idx): + if idx in self.cache: + return self.cache[idx] + + img_file = self.ids[idx][0] + mask_file = self.ids[idx][1] + # print(img_file) + #start = time.time() + mask = self.load(mask_file, self.name_dataset, is_mask=True) + img = self.load(img_file, self.name_dataset, is_mask=False) + + assert mask is not None, mask_file + assert img is not None, img_file + + + if self.name_dataset in ["kvasir", "buidnewprocess"]: + mask[mask < 50] = 0 + mask[mask > 200] = 1 + elif self.name_dataset == "isiconlytrain": + mask[mask > 1] = 1 + elif self.name_dataset.startswith("las"): + mask[mask == 30] = 1 + mask[mask == 60] = 2 # main predict + mask[mask == 90] = 3 + mask[mask == 120] = 4 + mask[mask == 150] = 5 + mask[mask == 180] = 6 + mask[mask == 210] = 7 + mask[mask > 7] = 0 + else: + mask[mask>0] = 1 + + img = self.preprocess(img, self.scale, is_mask=False) + mask = self.preprocess(mask, self.scale, is_mask=True) + + data = { + 'image': torch.as_tensor(img.copy()).permute(2, 0, 1).float().contiguous(), + 'mask_ete': torch.as_tensor(mask.copy().astype(int)).long().contiguous(), + 'mask_file' : mask_file, + 'img_file' : img_file + } + self.cache[idx] = data + return data + + def get_3d_iter(self): + from itertools import groupby + keyf = lambda idx : self.ids[idx].split("_frame_")[0] + sorted_ids = sorted(range(len(self.ids)), key=lambda i : self.ids[i]) + for _, items in groupby(sorted_ids, key=keyf): + images = [] + masks_ete = [] + for idx in items: + d = self.__getitem__(idx) + images.append(d['image']) + masks_ete.append(d['mask_ete']) + # store third dimension in image channels + images = torch.stack(images, dim=0) + masks_ete = torch.stack(masks_ete, dim=0) + _3d_data = {'image': images, 'mask_ete': masks_ete} + yield _3d_data + + +class SegmentationDataset(Dataset): + def __init__(self, name_dataset: str, images_dir: str, masks_dir: str, scale = (1024, 256)): + self.images_dir = images_dir + self.masks_dir = masks_dir + self.scale = scale + self.name_dataset = name_dataset + self.ids = os.listdir(images_dir) + if len(self.ids) == 0: + raise RuntimeError(f'No input file found in {self.images_dir}, make sure you put your images there') + logging.info(f'Creating dataset with {len(self.ids)} examples') + self.cache = {} + + def __len__(self): + return len(self.ids) + + @classmethod + def preprocess(self, img, scale, is_mask): + img = resize(img, + (scale[0], scale[0]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + img = np.asarray(img) + if not is_mask: + img = ((img - img.min()) * (1/(img.max() - img.min()) * 255)).astype('uint8') + if len(img.shape) != 3: + img = np.expand_dims(img, axis=2) #(1, 224, 224) + if is_mask: + img = resize(img, + (scale[1], scale[1]), + order=0, + preserve_range=True, + anti_aliasing=False).astype('uint8') + return img + + @classmethod + def load(self, filename, name_dataset, is_mask=False): + if name_dataset.startswith("las"): + if is_mask: + return cv2.imread(filename, cv2.IMREAD_UNCHANGED) + else: + img = cv2.imread(filename, 0) + return img + else: + if is_mask: + return cv2.imread(filename, 0) + else: + return cv2.imread(filename) + + def __getitem__(self, idx): + if idx in self.cache: + return self.cache[idx] + + name = self.ids[idx] + + if self.name_dataset == "isiconlytrain": + mask_file = os.path.join(self.masks_dir, name).split(".jpg")[0] + mask_file = mask_file + "_segmentation.png" + elif self.name_dataset == "buidnewprocess": + mask_file = os.path.join(self.masks_dir, name) + elif self.name_dataset == "kvasir": + mask_file = os.path.join(self.masks_dir, name) + elif self.name_dataset == "drive": + mask_file = os.path.join(self.masks_dir, name).replace("training", "manual1") + elif self.name_dataset == "bts": + mask_file = os.path.join(self.masks_dir, name).replace(self.image_type, "_seg_") + elif self.name_dataset in ["las_mri", "las_ct"]: + mask_file = os.path.join(self.masks_dir, name).replace("image", "label") + else: + mask_file = os.path.join(self.masks_dir, name) + + img_file = os.path.join(self.images_dir, name) + + mask = self.load(mask_file, self.name_dataset, is_mask=True) + img = self.load(img_file, self.name_dataset, is_mask=False) + + assert mask is not None, mask_file + assert img is not None, img_file + + + + if self.name_dataset in ["kvasir", "buidnewprocess"]: + mask[mask < 50] = 0 + mask[mask > 200] = 1 + elif self.name_dataset == "isiconlytrain": + mask[mask > 1] = 1 + elif self.name_dataset.startswith("las"): + mask[mask == 30] = 1 + mask[mask == 60] = 2 # main predict + mask[mask == 90] = 3 + mask[mask == 120] = 4 + mask[mask == 150] = 5 + mask[mask == 180] = 6 + mask[mask == 210] = 7 + mask[mask > 7] = 0 + else: + mask[mask>0] = 1 + + img = self.preprocess(img, self.scale, is_mask=False) + mask = self.preprocess(mask, self.scale, is_mask=True) + + data = { + 'image': torch.as_tensor(img.copy()).permute(2, 0, 1).float().contiguous(), + 'mask_ete': torch.as_tensor(mask.copy().astype(int)).long().contiguous(), + 'mask_file' : mask_file, + 'img_file' : img_file + } + self.cache[idx] = data + return data + + def get_3d_iter(self): + from itertools import groupby + keyf = lambda idx : self.ids[idx].split("_frame_")[0] + sorted_ids = sorted(range(len(self.ids)), key=lambda i : self.ids[i]) + for _, items in groupby(sorted_ids, key=keyf): + images = [] + masks_ete = [] + for idx in items: + d = self.__getitem__(idx) + images.append(d['image']) + masks_ete.append(d['mask_ete']) + # store third dimension in image channels + images = torch.stack(images, dim=0) + masks_ete = torch.stack(masks_ete, dim=0) + _3d_data = {'image': images, 'mask_ete': masks_ete} + yield _3d_data + diff --git a/dataloader/sam_transforms.py b/dataloader/sam_transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..c08ba1e3db751f3a5483a003be38c69c2cf2df85 --- /dev/null +++ b/dataloader/sam_transforms.py @@ -0,0 +1,102 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torch.nn import functional as F +from torchvision.transforms.functional import resize, to_pil_image # type: ignore + +from copy import deepcopy +from typing import Tuple + + +class ResizeLongestSide: + """ + Resizes images to the longest side 'target_length', as well as provides + methods for resizing coordinates and boxes. Provides methods for + transforming both numpy array and batched torch tensors. + """ + + def __init__(self, target_length: int) -> None: + self.target_length = target_length + + def apply_image(self, image: np.ndarray) -> np.ndarray: + """ + Expects a numpy array with shape HxWxC in uint8 format. + """ + target_size = self.get_preprocess_shape(image.shape[0], image.shape[1], self.target_length) + return np.array(resize(to_pil_image(image), target_size)) + + def apply_coords(self, coords: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray: + """ + Expects a numpy array of length 2 in the final dimension. Requires the + original image size in (H, W) format. + """ + old_h, old_w = original_size + new_h, new_w = self.get_preprocess_shape( + original_size[0], original_size[1], self.target_length + ) + coords = deepcopy(coords).astype(float) + coords[..., 0] = coords[..., 0] * (new_w / old_w) + coords[..., 1] = coords[..., 1] * (new_h / old_h) + return coords + + def apply_boxes(self, boxes: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray: + """ + Expects a numpy array shape Bx4. Requires the original image size + in (H, W) format. + """ + boxes = self.apply_coords(boxes.reshape(-1, 2, 2), original_size) + return boxes.reshape(-1, 4) + + def apply_image_torch(self, image: torch.Tensor) -> torch.Tensor: + """ + Expects batched images with shape BxCxHxW and float format. This + transformation may not exactly match apply_image. apply_image is + the transformation expected by the model. + """ + # Expects an image in BCHW format. May not exactly match apply_image. + target_size = self.get_preprocess_shape(image.shape[2], image.shape[3], self.target_length) + return F.interpolate( + image, target_size, mode="bilinear", align_corners=False, antialias=True + ) + + def apply_coords_torch( + self, coords: torch.Tensor, original_size: Tuple[int, ...] + ) -> torch.Tensor: + """ + Expects a torch tensor with length 2 in the last dimension. Requires the + original image size in (H, W) format. + """ + old_h, old_w = original_size + new_h, new_w = self.get_preprocess_shape( + original_size[0], original_size[1], self.target_length + ) + coords = deepcopy(coords).to(torch.float) + coords[..., 0] = coords[..., 0] * (new_w / old_w) + coords[..., 1] = coords[..., 1] * (new_h / old_h) + return coords + + def apply_boxes_torch( + self, boxes: torch.Tensor, original_size: Tuple[int, ...] + ) -> torch.Tensor: + """ + Expects a torch tensor with shape Bx4. Requires the original image + size in (H, W) format. + """ + boxes = self.apply_coords_torch(boxes.reshape(-1, 2, 2), original_size) + return boxes.reshape(-1, 4) + + @staticmethod + def get_preprocess_shape(oldh: int, oldw: int, long_side_length: int) -> Tuple[int, int]: + """ + Compute the output size given input size and target long side length. + """ + scale = long_side_length * 1.0 / max(oldh, oldw) + newh, neww = oldh * scale, oldw * scale + neww = int(neww + 0.5) + newh = int(newh + 0.5) + return (newh, neww) diff --git a/dataloader/yaml_data/brain_endtoend_R50_frozen_1_fcn.yml b/dataloader/yaml_data/brain_endtoend_R50_frozen_1_fcn.yml new file mode 100644 index 0000000000000000000000000000000000000000..7612fa040e018b969dd14683ada887eb5c17ae02 --- /dev/null +++ b/dataloader/yaml_data/brain_endtoend_R50_frozen_1_fcn.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: brain + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 1 + model: '1-fcn' + test_mode: 'best_valid' + valid_rate: 0.3 + +train: + train_batch_size: 64 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0005 + num_epochs: 20 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/BRAIN' + \ No newline at end of file diff --git a/dataloader/yaml_data/brain_endtoend_R50_frozen_fcns.yml b/dataloader/yaml_data/brain_endtoend_R50_frozen_fcns.yml new file mode 100644 index 0000000000000000000000000000000000000000..3e67a60e2109115e1698f76c33860ea1b0b10288 --- /dev/null +++ b/dataloader/yaml_data/brain_endtoend_R50_frozen_fcns.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: brain + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 1 + model: 'fcns' + test_mode: 'best_valid' + valid_rate: 0.3 + +train: + train_batch_size: 64 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0005 + num_epochs: 20 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/BRAIN' + \ No newline at end of file diff --git a/dataloader/yaml_data/brain_endtoend_R50_non_frozen_1_fcn.yml b/dataloader/yaml_data/brain_endtoend_R50_non_frozen_1_fcn.yml new file mode 100644 index 0000000000000000000000000000000000000000..b46c8f5bd6b5e78dc47ff188c63d5fc95f89cd2c --- /dev/null +++ b/dataloader/yaml_data/brain_endtoend_R50_non_frozen_1_fcn.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: brain + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 0 + model: '1-fcn' + test_mode: 'best_valid' + valid_rate: 0.3 + +train: + train_batch_size: 64 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0005 + num_epochs: 30 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/BRAIN' + \ No newline at end of file diff --git a/dataloader/yaml_data/brain_endtoend_R50_non_frozen_fcns.yml b/dataloader/yaml_data/brain_endtoend_R50_non_frozen_fcns.yml new file mode 100644 index 0000000000000000000000000000000000000000..68640e5141cc8712fc69b5dcf14c1c8896cdc461 --- /dev/null +++ b/dataloader/yaml_data/brain_endtoend_R50_non_frozen_fcns.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: brain + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 0 + model: 'fcns' + test_mode: 'best_valid' + valid_rate: 0.3 + +train: + train_batch_size: 64 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0005 + num_epochs: 30 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/BRAIN' + \ No newline at end of file diff --git a/dataloader/yaml_data/buid_endtoend_R50.yml b/dataloader/yaml_data/buid_endtoend_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..1c4a996c77714108e1a5e8c7abf09f72369ace6c --- /dev/null +++ b/dataloader/yaml_data/buid_endtoend_R50.yml @@ -0,0 +1,32 @@ +base: + is_2D: 1 + is_3D: 0 + is_R50: 1 + is_SAMVIT: 0 + dataset_name: buidnewprocess + gpu_id: '5' + original_checkpoint: 'lvm-med-resnet' # change checkpoints here + best_valid_model_checkpoint: './working_dir/checkpoint/BUID_R50' + image_shape: 256 + +train: + train_batch_size: 8 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 0.0001 + num_epochs: 50 + beta1: 0.9 + beta2: 0.999 + weight_decay: 0 + scheduler: 0 + +dataloader: + train_dir_img: './dataset_demo/BUID/train' + train_dir_mask: './dataset_demo/BUID/train_labels' + valid_dir_img: './dataset_demo/BUID/valid' + valid_dir_mask: './dataset_demo/BUID/valid_labels' + test_dir_img: './dataset_demo/BUID/test' + test_dir_mask: './dataset_demo/BUID/test_labels' + have_label: './dataset_demo/BUID/have_label.txt' + non_label: './dataset_demo/BUID/non_label.txt' diff --git a/dataloader/yaml_data/buid_lvm_med_sam.yml b/dataloader/yaml_data/buid_lvm_med_sam.yml new file mode 100644 index 0000000000000000000000000000000000000000..8de3e0909164ce0b670c5dbc265f838e3429d141 --- /dev/null +++ b/dataloader/yaml_data/buid_lvm_med_sam.yml @@ -0,0 +1,28 @@ +base: + is_2D: True + is_3D: False + dataset_name: buidnewprocess + random_seed: 100 + gpu_id: 3 + num_workers: 40 + pin_memory: True + original_checkpoint: './working_dir/sam_vit_b_01ec64.pth' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: 'lvm_med_sam_model_best_dice_original_buidnewprocess_seed' # I just change the path to save model here + image_shape: (3, 1024) + +train: + task: sam + train_batch_size: 20 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 1e-4 + num_epochs: 20 + +dataloader: + train_dir_img: './dataset_demo/BUID/train' + train_dir_mask: './dataset_demo/BUID/train_labels' + valid_dir_img: './dataset_demo/BUID/valid' + valid_dir_mask: './dataset_demo/BUID/valid_labels' + test_dir_img: './dataset_demo/BUID/test' + test_dir_mask: './dataset_demo/BUID/test_labels' \ No newline at end of file diff --git a/dataloader/yaml_data/buid_sam.yml b/dataloader/yaml_data/buid_sam.yml new file mode 100644 index 0000000000000000000000000000000000000000..09b8abe60d19036584df27222b13dbb30d42f129 --- /dev/null +++ b/dataloader/yaml_data/buid_sam.yml @@ -0,0 +1,28 @@ +base: + is_2D: True + is_3D: False + dataset_name: buidnewprocess + random_seed: 100 + gpu_id: 3 + num_workers: 40 + pin_memory: True + original_checkpoint: './working_dir/sam_vit_b_01ec64.pth' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: 'sam_model_best_dice_original_buidnewprocess_seed' + image_shape: (3, 1024) + +train: + task: sam + train_batch_size: 20 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 1e-4 + num_epochs: 20 + +dataloader: + train_dir_img: './dataset_demo/BUID/train' + train_dir_mask: './dataset_demo/BUID/train_labels' + valid_dir_img: './dataset_demo/BUID/valid' + valid_dir_mask: './dataset_demo/BUID/valid_labels' + test_dir_img: './dataset_demo/BUID/test' + test_dir_mask: './dataset_demo/BUID/test_labels' \ No newline at end of file diff --git a/dataloader/yaml_data/fgadr_endtoend_R50_frozen_1_fcn.yml b/dataloader/yaml_data/fgadr_endtoend_R50_frozen_1_fcn.yml new file mode 100644 index 0000000000000000000000000000000000000000..aa5a9c5eab4834a1a1969621ce6d4f6df8269b2c --- /dev/null +++ b/dataloader/yaml_data/fgadr_endtoend_R50_frozen_1_fcn.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: fgadr + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 1 + model: '1-fcn' + test_mode: 'best_valid' + valid_rate: 0.2 + +train: + train_batch_size: 32 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0005 + num_epochs: 20 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/FGADR' + \ No newline at end of file diff --git a/dataloader/yaml_data/fgadr_endtoend_R50_frozen_fcns.yml b/dataloader/yaml_data/fgadr_endtoend_R50_frozen_fcns.yml new file mode 100644 index 0000000000000000000000000000000000000000..f6fb921df57b63e4d1aed40dad1ca2248e96be7e --- /dev/null +++ b/dataloader/yaml_data/fgadr_endtoend_R50_frozen_fcns.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: fgadr + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 1 + model: 'fcns' + test_mode: 'best_valid' + valid_rate: 0.2 + +train: + train_batch_size: 32 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0005 + num_epochs: 20 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/FGADR' + \ No newline at end of file diff --git a/dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_1_fcn.yml b/dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_1_fcn.yml new file mode 100644 index 0000000000000000000000000000000000000000..0dd18d3624c15ad4b63d8b65d74df0354efb6ad3 --- /dev/null +++ b/dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_1_fcn.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: fgadr + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 0 + model: '1-fcn' + test_mode: 'best_valid' + valid_rate: 0.2 + +train: + train_batch_size: 32 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0001 + num_epochs: 30 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/FGADR' + \ No newline at end of file diff --git a/dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_fcns.yml b/dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_fcns.yml new file mode 100644 index 0000000000000000000000000000000000000000..19f293130325ac862428676cc00a25e1ffe4e20f --- /dev/null +++ b/dataloader/yaml_data/fgadr_endtoend_R50_non_frozen_fcns.yml @@ -0,0 +1,24 @@ +base: + is_R50: 1 + is_SAMVIT: 0 + dataset_name: fgadr + gpu_id: '3' + original_checkpoint: 'lvm-med-resnet' + best_valid_model_checkpoint: './working_dir/checkpoint/' + frozen_eval: 0 + model: 'fcns' + test_mode: 'best_valid' + valid_rate: 0.2 + +train: + train_batch_size: 32 + valid_batch_size: 1 + test_batch_size: 1 + learning_rate: 0.0001 + num_epochs: 30 + linear_eval: 0 + solver: 'adam' + +dataloader: + data_path: './dataset_demo/FGADR' + \ No newline at end of file diff --git a/dataloader/yaml_data/kvasir_endtoend_R50.yml b/dataloader/yaml_data/kvasir_endtoend_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..39b4f3d7295abe5e242dfad3258d0f31e5821977 --- /dev/null +++ b/dataloader/yaml_data/kvasir_endtoend_R50.yml @@ -0,0 +1,32 @@ +base: + is_2D: 1 + is_3D: 0 + is_R50: 1 + is_SAMVIT: 0 + dataset_name: kvasir + gpu_id: '1' + original_checkpoint: 'lvm-med-resnet' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: './working_dir/checkpoint/Kvasir_R50' + image_shape: 224 + +train: + train_batch_size: 64 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 0.001 + num_epochs: 35 + beta1: 0.9 + beta2: 0.999 + weight_decay: 0 + scheduler: 0 + +dataloader: + train_dir_img: './dataset_demo/Kvasir/train' + train_dir_mask: './dataset_demo/Kvasir/train_labels' + valid_dir_img: './dataset_demo/Kvasir/valid' + valid_dir_mask: './dataset_demo/Kvasir/valid_labels' + test_dir_img: './dataset_demo/Kvasir/test' + test_dir_mask: './dataset_demo/Kvasir/test_labels' + have_label: './dataset_demo/Kvasir/have_label.txt' + non_label: './dataset_demo/Kvasir/non_label.txt' \ No newline at end of file diff --git a/dataloader/yaml_data/kvasir_sam.yml b/dataloader/yaml_data/kvasir_sam.yml new file mode 100644 index 0000000000000000000000000000000000000000..9c0ea613b30a4e466c5f4eb55040cb1098301cc5 --- /dev/null +++ b/dataloader/yaml_data/kvasir_sam.yml @@ -0,0 +1,28 @@ +base: + is_2D: True + is_3D: False + dataset_name: kvasir + random_seed: 100 + gpu_id: 3 + num_workers: 40 + pin_memory: True + original_checkpoint: ./working_dir/sam_vit_b_01ec64.pth' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: 'sam_model_best_dice_original_kvasir_seed' + image_shape: (3, 1024) + +train: + task: sam + train_batch_size: 20 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 3e-4 + num_epochs: 20 + +dataloader: + train_dir_img: './dataset_demo/Kvasir/train' + train_dir_mask: './dataset_demo/Kvasir/train_labels' + valid_dir_img: './dataset_demo/Kvasir/valid' + valid_dir_mask: './dataset_demo/Kvasir/valid_labels' + test_dir_img: './dataset_demo/Kvasir/test' + test_dir_mask: './dataset_demo/Kvasir/test_labels' \ No newline at end of file diff --git a/dataloader/yaml_data/mmwhs_ct_endtoend_R50.yml b/dataloader/yaml_data/mmwhs_ct_endtoend_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..75852cd16651f56a62fd040da1aed692cd1b4d5c --- /dev/null +++ b/dataloader/yaml_data/mmwhs_ct_endtoend_R50.yml @@ -0,0 +1,32 @@ +base: + is_2D: 0 + is_3D: 1 + is_R50: 1 + is_SAMVIT: 0 + dataset_name: las_ct + gpu_id: '2' + original_checkpoint: 'lvm-med-resnet' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: './working_dir/checkpoint/MMWHS_CT_R50' + image_shape: 224 + +train: + train_batch_size: 64 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 0.001 + num_epochs: 20 + beta1: 0.9 + beta2: 0.999 + weight_decay: 0 + scheduler: 1 + +dataloader: + train_dir_img: './dataset_demo/MMWHS_CT_Heart/train' + train_dir_mask: './dataset_demo/MMWHS_CT_Heart/train_labels' + valid_dir_img: './dataset_demo/MMWHS_CT_Heart/valid' + valid_dir_mask: './dataset_demo/MMWHS_CT_Heart/valid_labels' + test_dir_img: './dataset_demo/MMWHS_CT_Heart/test' + test_dir_mask: './dataset_demo/MMWHS_CT_Heart/test_labels' + have_label: './dataset_demo/MMWHS_CT_Heart/have_label.txt' + non_label: './dataset_demo/MMWHS_CT_Heart/non_label.txt' \ No newline at end of file diff --git a/dataloader/yaml_data/mmwhs_ct_sam.yml b/dataloader/yaml_data/mmwhs_ct_sam.yml new file mode 100644 index 0000000000000000000000000000000000000000..e32bd90e02aa6232224231f4011e824c89b95553 --- /dev/null +++ b/dataloader/yaml_data/mmwhs_ct_sam.yml @@ -0,0 +1,28 @@ +base: + is_2D: False + is_3D: True + dataset_name: las_ct + random_seed: 100 + gpu_id: 3 + num_workers: 40 + pin_memory: True + original_checkpoint: './working_dir/sam_vit_b_01ec64.pth' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: 'sam_model_best_original_las_ct_seed' + image_shape: (3, 1024) + +train: + task: sam + train_batch_size: 20 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 3e-5 + num_epochs: 30 + +dataloader: + train_dir_img: './dataset_demo/MMWHS_CT_Heart/train' + train_dir_mask: './dataset_demo/MMWHS_CT_Heart/train_labels' + valid_dir_img: './dataset_demo/MMWHS_CT_Heart/valid' + valid_dir_mask: './dataset_demo/MMWHS_CT_Heart/valid_labels' + test_dir_img: './dataset_demo/MMWHS_CT_Heart/test' + test_dir_mask: './dataset_demo/MMWHS_CT_Heart/test_labels' \ No newline at end of file diff --git a/dataloader/yaml_data/mmwhs_mr_endtoend_R50.yml b/dataloader/yaml_data/mmwhs_mr_endtoend_R50.yml new file mode 100644 index 0000000000000000000000000000000000000000..cdec05f65faa19e80495d12d17832861f6a59a7b --- /dev/null +++ b/dataloader/yaml_data/mmwhs_mr_endtoend_R50.yml @@ -0,0 +1,32 @@ +base: + is_2D: 0 + is_3D: 1 + is_R50: 1 + is_SAMVIT: 0 + dataset_name: las_mri + gpu_id: '4' + original_checkpoint: 'lvm-med-resnet' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: './working_dir/checkpoint/MMWHS_MR_R50' + image_shape: 224 + +train: + train_batch_size: 64 + valid_batch_size: 32 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 0.0015 + num_epochs: 30 + beta1: 0.9 + beta2: 0.999 + weight_decay: 0 + scheduler: 1 + +dataloader: + train_dir_img: './dataset_demo/MMWHS_MR_Heart/train' + train_dir_mask: './dataset_demo/MMWHS_MR_Heart/train_labels' + valid_dir_img: './dataset_demo/MMWHS_MR_Heart/valid' + valid_dir_mask: './dataset_demo/MMWHS_MR_Heart/valid_labels' + test_dir_img: './dataset_demo/MMWHS_MR_Heart/test' + test_dir_mask: './dataset_demo/MMWHS_MR_Heart/test_labels' + have_label: './dataset_demo/MMWHS_MR_Heart/have_label.txt' + non_label: './dataset_demo/MMWHS_MR_Heart/non_label.txt' \ No newline at end of file diff --git a/dataloader/yaml_data/mmwhs_mri_sam.yml b/dataloader/yaml_data/mmwhs_mri_sam.yml new file mode 100644 index 0000000000000000000000000000000000000000..5745fbca6ca21df10f5fc56aa2deb3e8de92f32d --- /dev/null +++ b/dataloader/yaml_data/mmwhs_mri_sam.yml @@ -0,0 +1,28 @@ +base: + is_2D: False + is_3D: True + dataset_name: las_mri + random_seed: 100 + gpu_id: 3 + num_workers: 40 + pin_memory: True + original_checkpoint: './working_dir/sam_vit_b_01ec64.pth' # Original checkpoint from segment-anything project + best_valid_model_checkpoint: 'sam_model_best_original_las_mri_seed' + image_shape: (3, 1024) + +train: + task: sam + train_batch_size: 20 + valid_batch_size: 1 + test_batch_size: 1 + optimizer: 'adam' + learning_rate: 5e-5 + num_epochs: 30 + +dataloader: + train_dir_img: './dataset_demo/MMWHS_MR_Heart/train' + train_dir_mask: './dataset_demo/MMWHS_MR_Heart/train_labels' + valid_dir_img: './dataset_demo/MMWHS_MR_Heart/valid' + valid_dir_mask: './dataset_demo/MMWHS_MR_Heart/valid_labels' + test_dir_img: './dataset_demo/MMWHS_MR_Heart/test' + test_dir_mask: './dataset_demo/MMWHS_MR_Heart/test_labels' \ No newline at end of file diff --git a/dataset_demo/__init__.py b/dataset_demo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/dataset_demo/__init__.py @@ -0,0 +1 @@ + diff --git a/datasets_split/BUID_split.py b/datasets_split/BUID_split.py new file mode 100644 index 0000000000000000000000000000000000000000..d29f6c17ecec1482a2065324332ef69d656260e0 --- /dev/null +++ b/datasets_split/BUID_split.py @@ -0,0 +1,73 @@ +import numpy as np +import torch +import shutil +import os +import matplotlib.pyplot as plt +import cv2 +import json +from PIL import Image +import pickle +from skimage.transform import resize +from utils.dataset_prepare import split_data, save_fileLabel + +def BUID_split(): + dataset_name = "BUID" + file = "./dataset_demo/BUID" + if os.path.exists(os.path.join(file, "normal")): + shutil.rmtree(os.path.join(file, "normal")) + + dir_data = "./dataset_demo/BUID/images" + dir_label = "./dataset_demo/BUID/labels" + + if os.path.exists(dir_data): + shutil.rmtree(dir_data) + os.mkdir(dir_data) + + if os.path.exists(dir_label): + shutil.rmtree(dir_label) + os.mkdir(dir_label) + + for i in os.listdir(file): + if i == "labels" or i == "images" or "label" in i: + continue + file_label = os.path.join(file, i) + for img in os.listdir(file_label): + img_file = os.path.join(file_label, img) + if "mask" in img: + shutil.copy(img_file, os.path.join(dir_label, img)) + else: + shutil.copy(img_file, os.path.join(dir_data, img)) + + file = os.listdir(dir_label) + label_uni = -1 + check = False + b = 0 + for i in os.listdir(dir_data): + for k in range(10): + if k == 0: + mask = "_mask" + else: + mask = "_mask_" + str(k) + a = i.replace(".png", mask+".png") + + if a in file: + b = k + if not check: + label_uni = cv2.imread(os.path.join(dir_label, a)) + check = True + else: + img = cv2.imread(os.path.join(dir_label, a)) + label_uni = label_uni + img + os.remove(os.path.join(dir_label, a)) + else: + check = False + break + + #print(i) + cv2.imwrite(os.path.join(dir_label, i), label_uni) + label_uni = -1 + check = False + b = 0 + + split_data(dataset_name) + save_fileLabel(dataset_name) \ No newline at end of file diff --git a/datasets_split/FGADR_split.py b/datasets_split/FGADR_split.py new file mode 100644 index 0000000000000000000000000000000000000000..95f886b8f119d10421a133f61bdd185bdd3ca332 --- /dev/null +++ b/datasets_split/FGADR_split.py @@ -0,0 +1,38 @@ +import numpy as np +import torch +import shutil +import os +import matplotlib.pyplot as plt +import cv2 +import json +from PIL import Image +import pickle +from skimage.transform import resize +from utils.dataset_prepare import split_data, save_fileLabel + +def FGADR_split(): + pkl_path = './files_split/fgadr_pkl_file.pkl' # change your path here + path = "./dataset_demo/FGADR" + f = open(pkl_path, 'rb') + a = pickle.load(f) + a_key = a.keys() + B = ["train", "test"] + C = ["Training", "Testing"] + for index, i in enumerate(B): + print(i) + print(len(a[i])) + folder_type = os.path.join(path, i) + if os.path.exists(folder_type.replace(i, C[index])): + shutil.rmtree(os.path.join(path, C[index])) + os.mkdir(os.path.join(path, C[index])) + for j in a[i]: + folder_class = os.path.join(folder_type, str(j[1])) + if not os.path.exists(folder_class.replace(i, C[index])): + os.mkdir(folder_class.replace(i, C[index])) + file = j[0].replace("/mnt/sda/haal02-data/FGADR-Seg-Set", "./dataset_demo/FGADR") + img = cv2.imread(file) + img = resize(img, (512, 512), order=0, preserve_range=True, anti_aliasing=False).astype('uint8') + #/home/caduser/Foundmed_Experiment/Classification/FGADR/Seg-set/Original_Images/0001_2.png + name_img = file.split("/")[-1] + #print(os.path.join(folder_class.replace(i, C[index]))) + cv2.imwrite(os.path.join(folder_class.replace(i, C[index]), name_img), img) \ No newline at end of file diff --git a/datasets_split/Kvasir_split.py b/datasets_split/Kvasir_split.py new file mode 100644 index 0000000000000000000000000000000000000000..48fda674fb94b8ad4d3c1a2e7264c722cc0c799e --- /dev/null +++ b/datasets_split/Kvasir_split.py @@ -0,0 +1,19 @@ +import numpy as np +import torch +import shutil +import os +import matplotlib.pyplot as plt +import cv2 +import json +from PIL import Image +import pickle +from skimage.transform import resize +from utils.dataset_prepare import split_data, save_fileLabel + +def Kvasir_split(): + dataset_name = "Kvasir" + file = "./dataset_demo/Kvasir" + if os.path.exists(os.path.join(file, "masks")): + os.rename(os.path.join(file, "masks"), os.path.join(file, "labels")) + split_data(dataset_name) + save_fileLabel(dataset_name) \ No newline at end of file diff --git a/datasets_split/MMWHS_CT_Heart_split.py b/datasets_split/MMWHS_CT_Heart_split.py new file mode 100644 index 0000000000000000000000000000000000000000..e7d17d1ca528f2339676fd9df0a34788c838252f --- /dev/null +++ b/datasets_split/MMWHS_CT_Heart_split.py @@ -0,0 +1,127 @@ +import math +import os +import random +import numpy as np +from torch.utils.data import Dataset +import nibabel +from scipy import ndimage +import glob +from skimage.io import imsave +from utils.dataset_prepare import split_data, save_fileLabel_3D + +# input image is the volume +def __itensity_normalize_one_volume__(image): + # normalization following Med3D + top_per = np.percentile(image, 99.5) + bot_per = np.percentile(image, 0.5) + image[image > top_per] = top_per + image[image < bot_per] = bot_per + image = (image - np.mean(image)) / np.std(image) + image = image / 10.0 + image[image < 0] = 0.0 + image[image > 1] = 1.0 + return image + + +def __training_data_process__(data, label): + # crop data according net input size + data = data.get_fdata() + label = label.get_fdata() + + # normalization datas + data = __itensity_normalize_one_volume__(data) + + # changing label values + label[label == 205] = 30 + label[label == 420] = 60 + label[label == 500] = 90 + label[label == 550] = 120 + label[label == 600] = 150 + label[label == 820] = 180 + label[label == 850] = 210 + + return data, label + + +def preprocess_vol(img_name, label_name): + + assert os.path.isfile(img_name) + assert os.path.isfile(label_name) + + img = nibabel.load(img_name) # We have transposed the data from WHD format to DHW + assert img is not None + mask = nibabel.load(label_name) + assert mask is not None + + img_array, mask_array = __training_data_process__(img, mask) + assert img_array.shape == mask_array.shape, "img shape:{} is not equal to mask shape:{}".format(img_array.shape, mask_array.shape) + + return (img_array*255).astype('uint8'), mask_array.astype('uint8') + +#if __name__ == '__main__': +def MMWHS_CT_Heart_split(): + dataset_name = "MMWHS_CT_Heart" + ### Training set + data_dir = './dataset_demo/MMWHS_CT_Heart/Raw/train/' + img_fold_list = os.listdir(data_dir) + dest_dir = './dataset_demo/MMWHS_CT_Heart/train/' # dir for saving train images + dest_dir_label = './dataset_demo/MMWHS_CT_Heart/train_labels/' + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + if not os.path.exists(dest_dir_label): + os.makedirs(dest_dir_label) + + for vol_name in img_fold_list: + if 'label' in vol_name: + continue + mask_name = os.path.join(data_dir, vol_name).replace('image','label') + img_flair, mask = preprocess_vol(os.path.join(data_dir, vol_name), mask_name) + print(img_flair.shape, mask.shape) + # img_array.shape[2] is the length of depth dimension + for depth in range(0, img_flair.shape[2]): + imsave(os.path.join(dest_dir, vol_name.split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), img_flair[:, :, depth], check_contrast=False) + imsave(os.path.join(dest_dir_label, vol_name.replace('image','label').split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), mask[:, :, depth], check_contrast=False) + + ### Validation set + data_dir = './dataset_demo/MMWHS_CT_Heart/Raw/valid/' + img_fold_list = os.listdir(data_dir) + dest_dir = './dataset_demo/MMWHS_CT_Heart/valid/' + dest_dir_label = './dataset_demo/MMWHS_CT_Heart/valid_labels/' + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + if not os.path.exists(dest_dir_label): + os.makedirs(dest_dir_label) + + for vol_name in img_fold_list: + if 'label' in vol_name: + continue + mask_name = os.path.join(data_dir, vol_name).replace('image','label') + img_flair, mask = preprocess_vol(os.path.join(data_dir, vol_name), mask_name) + print(img_flair.shape, mask.shape) + # img_array.shape[2] is the length of depth dimension + for depth in range(0, img_flair.shape[2]): + imsave(os.path.join(dest_dir, vol_name.split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), img_flair[:, :, depth], check_contrast=False) + imsave(os.path.join(dest_dir_label, vol_name.replace('image','label').split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), mask[:, :, depth], check_contrast=False) + + ### Testing set + data_dir = './dataset_demo/MMWHS_CT_Heart/Raw/test/' + img_fold_list = os.listdir(data_dir) + dest_dir = './dataset_demo/MMWHS_CT_Heart/test/' + dest_dir_label = './dataset_demo/MMWHS_CT_Heart/test_labels/' + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + if not os.path.exists(dest_dir_label): + os.makedirs(dest_dir_label) + + for vol_name in img_fold_list: + if 'label' in vol_name: + continue + mask_name = os.path.join(data_dir, vol_name).replace('image','label') + img_flair, mask = preprocess_vol(os.path.join(data_dir, vol_name), mask_name) + print(img_flair.shape, mask.shape) + # img_array.shape[2] is the length of depth dimension + for depth in range(0, img_flair.shape[2]): + imsave(os.path.join(dest_dir, vol_name.split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), img_flair[:, :, depth], check_contrast=False) + imsave(os.path.join(dest_dir_label, vol_name.replace('image','label').split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), mask[:, :, depth], check_contrast=False) + + save_fileLabel_3D(dataset_name) \ No newline at end of file diff --git a/datasets_split/MMWHS_MR_Heart_split.py b/datasets_split/MMWHS_MR_Heart_split.py new file mode 100644 index 0000000000000000000000000000000000000000..d1508248815567ca245ab644f70037807518bf97 --- /dev/null +++ b/datasets_split/MMWHS_MR_Heart_split.py @@ -0,0 +1,128 @@ +import math +import os +import random +import numpy as np +from torch.utils.data import Dataset +import nibabel +from scipy import ndimage +import glob +from skimage.io import imsave +from utils.dataset_prepare import split_data, save_fileLabel_3D + +# input image is the volume +def __itensity_normalize_one_volume__(image): + # normalization following Med3D + top_per = np.percentile(image, 99.5) + bot_per = np.percentile(image, 0.5) + image[image > top_per] = top_per + image[image < bot_per] = bot_per + image = (image - np.mean(image)) / np.std(image) + image = image / 10.0 + image[image < 0] = 0.0 + image[image > 1] = 1.0 + return image + + +def __training_data_process__(data, label): + # crop data according net input size + data = data.get_fdata() + label = label.get_fdata() + + # normalization datas + data = __itensity_normalize_one_volume__(data) + + # changing label values + label[label == 205] = 30 + label[label == 420] = 60 + label[label == 500] = 90 + label[label == 550] = 120 + label[label == 600] = 150 + label[label == 820] = 180 + label[label == 850] = 210 + + return data, label + + +def preprocess_vol(img_name, label_name): + + assert os.path.isfile(img_name) + assert os.path.isfile(label_name) + + img = nibabel.load(img_name) # We have transposed the data from WHD format to DHW + assert img is not None + mask = nibabel.load(label_name) + assert mask is not None + + img_array, mask_array = __training_data_process__(img, mask) + assert img_array.shape == mask_array.shape, "img shape:{} is not equal to mask shape:{}".format(img_array.shape, mask_array.shape) + + return (img_array*255).astype('uint8'), mask_array.astype('uint8') + +#if __name__ == '__main__': +def MMWHS_MR_Heart_split(): + dataset_name = "MMWHS_MR_Heart" + ### Training set + data_dir = './dataset_demo/MMWHS_MR_Heart/Raw/train/' + img_fold_list = os.listdir(data_dir) + dest_dir = './dataset_demo/MMWHS_MR_Heart/train/' # dir for saving train images + dest_dir_label = './dataset_demo/MMWHS_MR_Heart/train_labels/' + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + if not os.path.exists(dest_dir_label): + os.makedirs(dest_dir_label) + + for vol_name in img_fold_list: + if 'label' in vol_name: + continue + mask_name = os.path.join(data_dir, vol_name).replace('image','label') + img_flair, mask = preprocess_vol(os.path.join(data_dir, vol_name), mask_name) + print(img_flair.shape, mask.shape) + # img_array.shape[2] is the length of depth dimension + for depth in range(0, img_flair.shape[2]): + imsave(os.path.join(dest_dir, vol_name.split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), img_flair[:, :, depth], check_contrast=False) + imsave(os.path.join(dest_dir_label, vol_name.replace('image','label').split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), mask[:, :, depth], check_contrast=False) + + ### Validation set + data_dir = './dataset_demo/MMWHS_MR_Heart/Raw/valid/' + img_fold_list = os.listdir(data_dir) + dest_dir = './dataset_demo/MMWHS_MR_Heart/valid/' + dest_dir_label = './dataset_demo/MMWHS_MR_Heart/valid_labels/' + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + if not os.path.exists(dest_dir_label): + os.makedirs(dest_dir_label) + + for vol_name in img_fold_list: + if 'label' in vol_name: + continue + mask_name = os.path.join(data_dir, vol_name).replace('image','label') + img_flair, mask = preprocess_vol(os.path.join(data_dir, vol_name), mask_name) + print(img_flair.shape, mask.shape) + # img_array.shape[2] is the length of depth dimension + for depth in range(0, img_flair.shape[2]): + imsave(os.path.join(dest_dir, vol_name.split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), img_flair[:, :, depth], check_contrast=False) + imsave(os.path.join(dest_dir_label, vol_name.replace('image','label').split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), mask[:, :, depth], check_contrast=False) + + ### Testing set + data_dir = './dataset_demo/MMWHS_MR_Heart/Raw/test/' + img_fold_list = os.listdir(data_dir) + dest_dir = './dataset_demo/MMWHS_MR_Heart/test/' + dest_dir_label = './dataset_demo/MMWHS_MR_Heart/test_labels/' + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + if not os.path.exists(dest_dir_label): + os.makedirs(dest_dir_label) + + for vol_name in img_fold_list: + if 'label' in vol_name: + continue + mask_name = os.path.join(data_dir, vol_name).replace('image','label') + img_flair, mask = preprocess_vol(os.path.join(data_dir, vol_name), mask_name) + print(img_flair.shape, mask.shape) + # img_array.shape[2] is the length of depth dimension + for depth in range(0, img_flair.shape[2]): + imsave(os.path.join(dest_dir, vol_name.split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), img_flair[:, :, depth], check_contrast=False) + imsave(os.path.join(dest_dir_label, vol_name.replace('image','label').split('.')[0] + '_frame_' + str(depth).zfill(3) + '.png'), mask[:, :, depth], check_contrast=False) + + + save_fileLabel_3D(dataset_name) \ No newline at end of file diff --git a/datasets_split/__init__.py b/datasets_split/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/evaluate.py b/evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..d50345fccc7c1876c6141127e988b88af849aa7d --- /dev/null +++ b/evaluate.py @@ -0,0 +1,136 @@ +import torch +import torch.nn.functional as F +from tqdm import tqdm + +from utils.endtoend import multiclass_dice_coeff, multiclass_iou +num_classes = 8 + +def evaluate(net, dataloader, device, eval_class): + net.eval() + num_val_batches = len(dataloader) + dice_score = 0 + iou_score = 0 + # iterate over the validation set + for batch in tqdm(dataloader, desc='Validation round', unit='batch', leave=False): + image, mask_true = batch['image'], batch['mask_ete'] + # move images and labels to correct device and type + image = image.to(device=device, dtype=torch.float32) + mask_true = mask_true.to(device=device, dtype=torch.long) + #mask_true[mask_true == 4] = 3 + #mask_true[mask_true > 4] = 0 + mask_true_vector = F.one_hot(mask_true, num_classes).permute(0, 3 , 1, 2).float() + with torch.no_grad(): + # predict the mask + mask_pred = net(image) + mask_pred = mask_pred.argmax(dim=1) + mask_pred_vector = F.one_hot(mask_pred, num_classes).permute(0, 3 , 1, 2).float() + # compute the Dice score, ignoring background + dice_score += multiclass_dice_coeff(mask_pred_vector[:, eval_class, ...], mask_true_vector[:, eval_class, ...], + reduce_batch_first=False) + iou_score += multiclass_iou(mask_pred_vector[:,eval_class, ...], mask_true_vector[:, eval_class, ...]) + + net.train() + return dice_score / num_val_batches, iou_score/ num_val_batches + + +def evaluate_3d_iou(net, dataset, device, eval_class): + net.eval() + iou_score = 0 + # iterate over the validation set + num_items = 0 + for image_3d in tqdm(dataset.get_3d_iter(), desc='3D Evaluation', unit='image(s)', leave=False): + image, mask_true = image_3d['image'], image_3d['mask_ete'] + num_items += 1 + # move images and labels to correct device and type + + image = image.to(device=device, dtype=torch.float32) + mask_true = mask_true.to(device=device, dtype=torch.long) + mask_true_vector = F.one_hot(mask_true, num_classes).permute(0, 3, 1, 2).float() + + with torch.no_grad(): + # predict the mask + mask_pred = net(image) + mask_pred = mask_pred.argmax(dim=1) + mask_pred_vector = F.one_hot(mask_pred, num_classes).permute(0, 3, 1, 2).float() + iou_score += multiclass_iou(mask_pred_vector[:, eval_class, ...], mask_true_vector[:, eval_class, ...], reduce_batch_first=True) + net.train() + return iou_score/num_items + +def evaluate_3d_iou_large(net, dataset, device, eval_class): + net.eval() + iou_score = 0 + # iterate over the validation set + num_items = 0 + for image_3d in tqdm(dataset.get_3d_iter(), desc='3D Evaluation', unit='image(s)', leave=False): + image, mask_true = image_3d['image'], image_3d['mask'] + num_items += 1 + # move images and labels to correct device and type + + image = image.to(device=device) + mask_true = mask_true.to(device=device) + mask_true_vector = F.one_hot(mask_true, num_classes).permute(0, 3, 1, 2).float() + + net.to(device=device) + with torch.no_grad(): + # predict the mask + mask_pred = net(image) + mask_pred = mask_pred.argmax(dim=1) + mask_pred_vector = F.one_hot(mask_pred, num_classes).permute(0, 3, 1, 2).float() + iou_score += multiclass_iou(mask_pred_vector[:, eval_class, ...], mask_true_vector[:, eval_class, ...], reduce_batch_first=True) + net.train() + return iou_score/num_items + +def evaluate_3d_iou_fast(net, dataset, device, eval_class): + """ + This function is similar as evaluate_3d_iou but get a batch size in shape [batch_size, dimension, W, H] + :param net: + :param dataset: + :param device: + :param eval_class: + :return: + """ + net.eval() + iou_score = 0 + # iterate over the validation set + num_items = 0 + for image_3d in tqdm(dataset, desc='3D Evaluation', unit='image(s)', leave=False): + image, mask_true = image_3d['image'][0], image_3d['mask'][0] + # print ("Image and mask shapes in 3D evaluation are {}, {}".format(image.shape, mask_true.shape)) + num_items += 1 + # move images and labels to correct device and type + + image = image.to(device=device, dtype=torch.float32) + mask_true = mask_true.to(device=device, dtype=torch.long) + mask_true_vector = F.one_hot(mask_true, num_classes).permute(0, 3, 1, 2).float() + + with torch.no_grad(): + # predict the mask + mask_pred = net(image) + mask_pred = mask_pred.argmax(dim=1) + mask_pred_vector = F.one_hot(mask_pred, num_classes).permute(0, 3, 1, 2).float() + iou_score += multiclass_iou(mask_pred_vector[:, eval_class, ...], mask_true_vector[:, eval_class, ...], reduce_batch_first=True) + net.train() + return iou_score/num_items + +def evaluate_3d_dice(net, dataset, device, eval_class): + net.eval() + dice_score = 0 + # iterate over the validation set + num_items = 0 + for image_3d in tqdm(dataset.get_3d_iter(), desc='3D Evaluation', unit='image(s)', leave=False): + image, mask_true = image_3d['image'], image_3d['mask_ete'] + num_items += 1 + # move images and labels to correct device and type + + image = image.to(device=device, dtype=torch.float32) + mask_true = mask_true.to(device=device, dtype=torch.long) + mask_true_vector = F.one_hot(mask_true, num_classes).permute(0, 3, 1, 2).float() + + with torch.no_grad(): + # predict the mask + mask_pred = net(image) + mask_pred = mask_pred.argmax(dim=1) + mask_pred_vector = F.one_hot(mask_pred, num_classes).permute(0, 3, 1, 2).float() + dice_score += multiclass_dice_coeff(mask_pred_vector[:, eval_class, ...], mask_true_vector[:, eval_class, ...], reduce_batch_first=True) + net.train() + return dice_score/num_items diff --git a/files_split/BUID.json b/files_split/BUID.json new file mode 100644 index 0000000000000000000000000000000000000000..a27f57409a67c3a8b04db0ffb7949290914d6a15 --- /dev/null +++ b/files_split/BUID.json @@ -0,0 +1 @@ +{"train": ["benign (298).png", "benign (420).png", "benign (167).png", "benign (102).png", "benign (256).png", "benign (128).png", "benign (405).png", "benign (273).png", "benign (158).png", "benign (265).png", "malignant (113).png", "benign (41).png", "benign (152).png", "benign (304).png", "benign (151).png", "malignant (191).png", "benign (91).png", "benign (46).png", "benign (105).png", "malignant (170).png", "benign (396).png", "benign (261).png", "benign (412).png", "benign (284).png", "benign (71).png", "malignant (116).png", "benign (222).png", "malignant (3).png", "benign (28).png", "malignant (77).png", "benign (185).png", "benign (1).png", "benign (309).png", "benign (361).png", "benign (154).png", "malignant (86).png", "benign (172).png", "benign (372).png", "benign (225).png", "malignant (42).png", "malignant (201).png", "malignant (27).png", "benign (124).png", "malignant (104).png", "benign (43).png", "benign (98).png", "malignant (19).png", "benign (97).png", "malignant (92).png", "benign (136).png", "malignant (8).png", "malignant (74).png", "malignant (136).png", "benign (419).png", "malignant (97).png", "malignant (68).png", "malignant (51).png", "malignant (203).png", "benign (409).png", "malignant (38).png", "benign (242).png", "benign (354).png", "benign (291).png", "benign (162).png", "benign (392).png", "malignant (141).png", "malignant (40).png", "benign (79).png", "malignant (210).png", "benign (266).png", "benign (64).png", "malignant (165).png", "malignant (21).png", "benign (140).png", "benign (369).png", "benign (88).png", "benign (181).png", "malignant (174).png", "malignant (144).png", "benign (399).png", "malignant (87).png", "benign (13).png", "benign (57).png", "benign (130).png", "benign (293).png", "benign (302).png", "malignant (150).png", "benign (76).png", "malignant (186).png", "malignant (151).png", "benign (259).png", "benign (16).png", "malignant (1).png", "benign (320).png", "malignant (16).png", "malignant (70).png", "benign (104).png", "malignant (71).png", "malignant (13).png", "benign (112).png", "benign (192).png", "benign (183).png", "malignant (47).png", "malignant (125).png", "malignant (34).png", "benign (4).png", "benign (282).png", "malignant (188).png", "benign (50).png", "benign (285).png", "malignant (168).png", "benign (235).png", "benign (286).png", "benign (166).png", "malignant (115).png", "malignant (196).png", "benign (237).png", "benign (281).png", "malignant (156).png", "malignant (57).png", "benign (327).png", "benign (338).png", "benign (290).png", "benign (306).png", "malignant (95).png", "malignant (7).png", "malignant (145).png", "benign (328).png", "malignant (181).png", "benign (375).png", "benign (398).png", "benign (173).png", "benign (278).png", "benign (20).png", "benign (109).png", "malignant (157).png", "benign (288).png", "malignant (62).png", "benign (312).png", "benign (153).png", "benign (171).png", "malignant (140).png", "benign (305).png", "benign (418).png", "benign (268).png", "malignant (162).png", "malignant (41).png", "benign (344).png", "malignant (106).png", "malignant (52).png", "benign (160).png", "benign (395).png", "benign (86).png", "malignant (190).png", "benign (100).png", "benign (114).png", "benign (429).png", "benign (201).png", "malignant (6).png", "benign (23).png", "benign (211).png", "benign (299).png", "malignant (207).png", "benign (143).png", "benign (18).png", "benign (287).png", "malignant (209).png", "malignant (202).png", "benign (127).png", "benign (123).png", "benign (103).png", "malignant (14).png", "benign (422).png", "benign (255).png", "benign (49).png", "malignant (114).png", "benign (332).png", "benign (189).png", "benign (209).png", "malignant (131).png", "benign (414).png", "benign (415).png", "benign (31).png", "benign (246).png", "benign (56).png", "malignant (194).png", "benign (339).png", "benign (121).png", "malignant (46).png", "malignant (130).png", "malignant (81).png", "benign (413).png", "benign (406).png", "benign (208).png", "benign (176).png", "malignant (45).png", "benign (221).png", "benign (303).png", "benign (436).png", "benign (257).png", "benign (250).png", "malignant (103).png", "malignant (142).png", "malignant (88).png", "benign (371).png", "benign (424).png", "benign (417).png", "benign (294).png", "benign (411).png", "benign (384).png", "benign (334).png", "benign (126).png", "malignant (180).png", "malignant (25).png", "benign (81).png", "malignant (169).png", "benign (69).png", "benign (270).png", "malignant (96).png", "malignant (155).png", "benign (315).png", "benign (131).png", "benign (25).png", "benign (199).png", "benign (67).png", "malignant (84).png", "malignant (206).png", "benign (2).png", "benign (39).png", "benign (99).png", "benign (343).png", "benign (358).png", "benign (7).png", "benign (226).png", "benign (212).png", "benign (425).png", "benign (427).png", "benign (279).png", "malignant (161).png", "benign (89).png", "benign (219).png", "malignant (152).png", "benign (317).png", "benign (231).png", "benign (8).png", "malignant (138).png", "benign (17).png", "benign (170).png", "benign (32).png", "benign (297).png", "benign (117).png", "benign (416).png", "malignant (5).png", "benign (397).png", "benign (108).png", "benign (434).png", "benign (355).png", "malignant (184).png", "benign (408).png", "benign (230).png", "benign (323).png", "malignant (65).png", "benign (421).png", "benign (80).png", "benign (83).png", "benign (274).png", "benign (94).png", "benign (179).png", "malignant (54).png", "benign (352).png", "benign (391).png", "benign (65).png", "benign (296).png", "malignant (189).png", "benign (129).png", "benign (85).png", "benign (254).png", "malignant (83).png", "malignant (60).png", "benign (163).png", "malignant (35).png", "benign (161).png", "benign (340).png", "benign (262).png", "benign (356).png", "malignant (39).png", "benign (264).png", "benign (370).png", "benign (349).png", "benign (142).png", "benign (321).png", "benign (435).png", "malignant (199).png", "benign (74).png", "malignant (166).png", "malignant (59).png", "benign (14).png", "benign (203).png", "benign (324).png", "benign (55).png", "benign (186).png", "malignant (171).png", "malignant (99).png", "malignant (111).png", "malignant (107).png", "malignant (153).png", "benign (150).png", "malignant (26).png", "benign (267).png", "malignant (56).png", "malignant (73).png", "malignant (11).png", "malignant (90).png", "benign (47).png", "malignant (37).png", "malignant (101).png", "benign (386).png", "benign (301).png", "benign (336).png", "malignant (124).png", "benign (195).png", "benign (24).png", "malignant (167).png", "malignant (94).png", "malignant (204).png", "benign (59).png", "benign (138).png", "benign (190).png", "benign (144).png", "malignant (160).png", "benign (238).png", "malignant (159).png", "malignant (175).png", "malignant (67).png", "benign (70).png", "malignant (43).png", "benign (44).png", "benign (373).png", "benign (118).png", "benign (248).png", "malignant (102).png", "benign (178).png", "benign (93).png", "benign (45).png", "benign (394).png", "malignant (18).png", "malignant (55).png", "benign (271).png", "benign (229).png", "benign (58).png", "benign (300).png", "benign (423).png", "malignant (28).png", "malignant (183).png", "benign (27).png", "benign (381).png", "benign (132).png", "benign (430).png", "benign (342).png", "malignant (128).png", "malignant (164).png", "malignant (4).png", "benign (310).png", "malignant (64).png", "benign (29).png", "malignant (89).png", "benign (137).png", "benign (390).png", "benign (330).png", "benign (387).png", "malignant (72).png", "malignant (121).png", "benign (364).png", "malignant (20).png", "benign (116).png", "benign (359).png", "malignant (147).png", "benign (251).png", "benign (164).png", "benign (146).png", "benign (35).png", "benign (107).png", "malignant (137).png", "malignant (78).png", "benign (345).png", "benign (62).png", "benign (15).png", "benign (84).png", "benign (252).png", "benign (42).png", "benign (311).png", "benign (34).png", "benign (210).png", "malignant (143).png", "benign (33).png", "benign (68).png", "malignant (200).png", "benign (77).png", "malignant (33).png", "benign (245).png", "benign (96).png", "benign (110).png", "benign (407).png", "benign (234).png", "malignant (100).png", "benign (403).png", "benign (9).png", "malignant (22).png", "benign (223).png", "benign (61).png", "benign (63).png", "benign (155).png", "benign (240).png"], "valid": ["benign (205).png", "benign (115).png", "benign (382).png", "benign (214).png", "malignant (50).png", "malignant (172).png", "benign (433).png", "benign (37).png", "benign (215).png", "malignant (61).png", "benign (182).png", "benign (308).png", "malignant (176).png", "benign (125).png", "malignant (17).png", "benign (159).png", "benign (12).png", "malignant (135).png", "benign (277).png", "malignant (119).png", "benign (187).png", "malignant (192).png", "benign (53).png", "malignant (105).png", "benign (202).png", "benign (147).png", "benign (21).png", "benign (206).png", "benign (329).png", "benign (101).png", "benign (200).png", "benign (404).png", "malignant (126).png", "benign (236).png", "malignant (44).png", "benign (360).png", "benign (135).png", "malignant (98).png", "benign (184).png", "benign (307).png", "benign (60).png", "malignant (146).png", "benign (145).png", "benign (36).png", "benign (243).png", "malignant (80).png", "benign (289).png", "benign (11).png", "malignant (49).png", "benign (95).png", "benign (48).png", "benign (260).png", "malignant (69).png", "malignant (197).png", "malignant (66).png", "malignant (129).png", "benign (52).png", "malignant (29).png", "benign (353).png", "benign (139).png", "benign (314).png", "benign (224).png", "benign (428).png", "benign (175).png", "malignant (10).png", "benign (180).png", "benign (122).png", "benign (220).png", "malignant (117).png", "malignant (53).png", "benign (51).png", "malignant (63).png", "malignant (9).png", "benign (362).png", "benign (87).png", "malignant (108).png", "benign (377).png", "malignant (12).png", "benign (233).png", "malignant (149).png", "benign (383).png", "malignant (177).png", "benign (295).png", "benign (258).png", "malignant (139).png", "malignant (112).png", "benign (432).png", "benign (319).png", "malignant (173).png", "malignant (122).png", "malignant (127).png", "malignant (195).png", "benign (196).png", "benign (111).png", "malignant (123).png", "benign (40).png", "benign (241).png", "benign (325).png", "benign (204).png", "malignant (75).png", "benign (197).png", "benign (272).png", "benign (400).png", "benign (3).png"], "test": ["malignant (109).png", "benign (275).png", "benign (188).png", "benign (90).png", "benign (431).png", "malignant (2).png", "benign (141).png", "benign (263).png", "benign (75).png", "malignant (179).png", "benign (10).png", "malignant (133).png", "benign (244).png", "benign (385).png", "malignant (118).png", "malignant (163).png", "benign (388).png", "benign (376).png", "benign (276).png", "benign (156).png", "benign (26).png", "malignant (198).png", "benign (78).png", "benign (380).png", "benign (326).png", "benign (38).png", "benign (357).png", "benign (227).png", "benign (217).png", "benign (351).png", "malignant (110).png", "benign (393).png", "benign (366).png", "malignant (32).png", "malignant (148).png", "benign (347).png", "benign (368).png", "benign (113).png", "benign (149).png", "benign (216).png", "malignant (79).png", "benign (247).png", "benign (6).png", "malignant (178).png", "benign (280).png", "benign (316).png", "benign (66).png", "benign (378).png", "malignant (15).png", "benign (92).png", "benign (341).png", "benign (249).png", "benign (269).png", "benign (198).png", "malignant (23).png", "benign (333).png", "benign (374).png", "benign (22).png", "benign (318).png", "benign (30).png", "benign (133).png", "benign (213).png", "benign (120).png", "benign (19).png", "benign (218).png", "benign (191).png", "benign (350).png", "malignant (58).png", "benign (207).png", "benign (348).png", "malignant (82).png", "benign (379).png", "malignant (31).png", "benign (157).png", "benign (401).png", "malignant (24).png", "malignant (85).png", "benign (363).png", "malignant (36).png", "benign (193).png", "benign (313).png", "malignant (120).png", "benign (5).png", "benign (335).png", "benign (426).png", "benign (174).png", "benign (410).png", "benign (148).png", "benign (253).png", "benign (346).png", "benign (331).png", "malignant (30).png", "benign (194).png", "benign (134).png", "malignant (91).png", "malignant (187).png", "benign (54).png", "benign (389).png", "benign (402).png", "malignant (205).png", "benign (73).png", "malignant (154).png", "benign (232).png", "benign (169).png", "benign (437).png", "malignant (93).png", "benign (168).png", "benign (365).png", "benign (106).png", "benign (292).png", "malignant (208).png", "benign (367).png", "malignant (48).png", "malignant (158).png", "benign (82).png", "benign (239).png", "malignant (185).png", "benign (119).png", "malignant (182).png", "malignant (132).png", "benign (283).png", "benign (322).png", "malignant (193).png", "malignant (134).png", "benign (177).png", "benign (337).png", "benign (228).png", "benign (165).png", "benign (72).png", "malignant (76).png"], "train_labels": ["benign (298).png", "benign (420).png", "benign (167).png", "benign (102).png", "benign (256).png", "benign (128).png", "benign (405).png", "benign (273).png", "benign (158).png", "benign (265).png", "malignant (113).png", "benign (41).png", "benign (152).png", "benign (304).png", "benign (151).png", "malignant (191).png", "benign (91).png", "benign (46).png", "benign (105).png", "malignant (170).png", "benign (396).png", "benign (261).png", "benign (412).png", "benign (284).png", "benign (71).png", "malignant (116).png", "benign (222).png", "malignant (3).png", "benign (28).png", "malignant (77).png", "benign (185).png", "benign (1).png", "benign (309).png", "benign (361).png", "benign (154).png", "malignant (86).png", "benign (172).png", "benign (372).png", "benign (225).png", "malignant (42).png", "malignant (201).png", "malignant (27).png", "benign (124).png", "malignant (104).png", "benign (43).png", "benign (98).png", "malignant (19).png", "benign (97).png", "malignant (92).png", "benign (136).png", "malignant (8).png", "malignant (74).png", "malignant (136).png", "benign (419).png", "malignant (97).png", "malignant (68).png", "malignant (51).png", "malignant (203).png", "benign (409).png", "malignant (38).png", "benign (242).png", "benign (354).png", "benign (291).png", "benign (162).png", "benign (392).png", "malignant (141).png", "malignant (40).png", "benign (79).png", "malignant (210).png", "benign (266).png", "benign (64).png", "malignant (165).png", "malignant (21).png", "benign (140).png", "benign (369).png", "benign (88).png", "benign (181).png", "malignant (174).png", "malignant (144).png", "benign (399).png", "malignant (87).png", "benign (13).png", "benign (57).png", "benign (130).png", "benign (293).png", "benign (302).png", "malignant (150).png", "benign (76).png", "malignant (186).png", "malignant (151).png", "benign (259).png", "benign (16).png", "malignant (1).png", "benign (320).png", "malignant (16).png", "malignant (70).png", "benign (104).png", "malignant (71).png", "malignant (13).png", "benign (112).png", "benign (192).png", "benign (183).png", "malignant (47).png", "malignant (125).png", "malignant (34).png", "benign (4).png", "benign (282).png", "malignant (188).png", "benign (50).png", "benign (285).png", "malignant (168).png", "benign (235).png", "benign (286).png", "benign (166).png", "malignant (115).png", "malignant (196).png", "benign (237).png", "benign (281).png", "malignant (156).png", "malignant (57).png", "benign (327).png", "benign (338).png", "benign (290).png", "benign (306).png", "malignant (95).png", "malignant (7).png", "malignant (145).png", "benign (328).png", "malignant (181).png", "benign (375).png", "benign (398).png", "benign (173).png", "benign (278).png", "benign (20).png", "benign (109).png", "malignant (157).png", "benign (288).png", "malignant (62).png", "benign (312).png", "benign (153).png", "benign (171).png", "malignant (140).png", "benign (305).png", "benign (418).png", "benign (268).png", "malignant (162).png", "malignant (41).png", "benign (344).png", "malignant (106).png", "malignant (52).png", "benign (160).png", "benign (395).png", "benign (86).png", "malignant (190).png", "benign (100).png", "benign (114).png", "benign (429).png", "benign (201).png", "malignant (6).png", "benign (23).png", "benign (211).png", "benign (299).png", "malignant (207).png", "benign (143).png", "benign (18).png", "benign (287).png", "malignant (209).png", "malignant (202).png", "benign (127).png", "benign (123).png", "benign (103).png", "malignant (14).png", "benign (422).png", "benign (255).png", "benign (49).png", "malignant (114).png", "benign (332).png", "benign (189).png", "benign (209).png", "malignant (131).png", "benign (414).png", "benign (415).png", "benign (31).png", "benign (246).png", "benign (56).png", "malignant (194).png", "benign (339).png", "benign (121).png", "malignant (46).png", "malignant (130).png", "malignant (81).png", "benign (413).png", "benign (406).png", "benign (208).png", "benign (176).png", "malignant (45).png", "benign (221).png", "benign (303).png", "benign (436).png", "benign (257).png", "benign (250).png", "malignant (103).png", "malignant (142).png", "malignant (88).png", "benign (371).png", "benign (424).png", "benign (417).png", "benign (294).png", "benign (411).png", "benign (384).png", "benign (334).png", "benign (126).png", "malignant (180).png", "malignant (25).png", "benign (81).png", "malignant (169).png", "benign (69).png", "benign (270).png", "malignant (96).png", "malignant (155).png", "benign (315).png", "benign (131).png", "benign (25).png", "benign (199).png", "benign (67).png", "malignant (84).png", "malignant (206).png", "benign (2).png", "benign (39).png", "benign (99).png", "benign (343).png", "benign (358).png", "benign (7).png", "benign (226).png", "benign (212).png", "benign (425).png", "benign (427).png", "benign (279).png", "malignant (161).png", "benign (89).png", "benign (219).png", "malignant (152).png", "benign (317).png", "benign (231).png", "benign (8).png", "malignant (138).png", "benign (17).png", "benign (170).png", "benign (32).png", "benign (297).png", "benign (117).png", "benign (416).png", "malignant (5).png", "benign (397).png", "benign (108).png", "benign (434).png", "benign (355).png", "malignant (184).png", "benign (408).png", "benign (230).png", "benign (323).png", "malignant (65).png", "benign (421).png", "benign (80).png", "benign (83).png", "benign (274).png", "benign (94).png", "benign (179).png", "malignant (54).png", "benign (352).png", "benign (391).png", "benign (65).png", "benign (296).png", "malignant (189).png", "benign (129).png", "benign (85).png", "benign (254).png", "malignant (83).png", "malignant (60).png", "benign (163).png", "malignant (35).png", "benign (161).png", "benign (340).png", "benign (262).png", "benign (356).png", "malignant (39).png", "benign (264).png", "benign (370).png", "benign (349).png", "benign (142).png", "benign (321).png", "benign (435).png", "malignant (199).png", "benign (74).png", "malignant (166).png", "malignant (59).png", "benign (14).png", "benign (203).png", "benign (324).png", "benign (55).png", "benign (186).png", "malignant (171).png", "malignant (99).png", "malignant (111).png", "malignant (107).png", "malignant (153).png", "benign (150).png", "malignant (26).png", "benign (267).png", "malignant (56).png", "malignant (73).png", "malignant (11).png", "malignant (90).png", "benign (47).png", "malignant (37).png", "malignant (101).png", "benign (386).png", "benign (301).png", "benign (336).png", "malignant (124).png", "benign (195).png", "benign (24).png", "malignant (167).png", "malignant (94).png", "malignant (204).png", "benign (59).png", "benign (138).png", "benign (190).png", "benign (144).png", "malignant (160).png", "benign (238).png", "malignant (159).png", "malignant (175).png", "malignant (67).png", "benign (70).png", "malignant (43).png", "benign (44).png", "benign (373).png", "benign (118).png", "benign (248).png", "malignant (102).png", "benign (178).png", "benign (93).png", "benign (45).png", "benign (394).png", "malignant (18).png", "malignant (55).png", "benign (271).png", "benign (229).png", "benign (58).png", "benign (300).png", "benign (423).png", "malignant (28).png", "malignant (183).png", "benign (27).png", "benign (381).png", "benign (132).png", "benign (430).png", "benign (342).png", "malignant (128).png", "malignant (164).png", "malignant (4).png", "benign (310).png", "malignant (64).png", "benign (29).png", "malignant (89).png", "benign (137).png", "benign (390).png", "benign (330).png", "benign (387).png", "malignant (72).png", "malignant (121).png", "benign (364).png", "malignant (20).png", "benign (116).png", "benign (359).png", "malignant (147).png", "benign (251).png", "benign (164).png", "benign (146).png", "benign (35).png", "benign (107).png", "malignant (137).png", "malignant (78).png", "benign (345).png", "benign (62).png", "benign (15).png", "benign (84).png", "benign (252).png", "benign (42).png", "benign (311).png", "benign (34).png", "benign (210).png", "malignant (143).png", "benign (33).png", "benign (68).png", "malignant (200).png", "benign (77).png", "malignant (33).png", "benign (245).png", "benign (96).png", "benign (110).png", "benign (407).png", "benign (234).png", "malignant (100).png", "benign (403).png", "benign (9).png", "malignant (22).png", "benign (223).png", "benign (61).png", "benign (63).png", "benign (155).png", "benign (240).png"], "valid_labels": ["benign (205).png", "benign (115).png", "benign (382).png", "benign (214).png", "malignant (50).png", "malignant (172).png", "benign (433).png", "benign (37).png", "benign (215).png", "malignant (61).png", "benign (182).png", "benign (308).png", "malignant (176).png", "benign (125).png", "malignant (17).png", "benign (159).png", "benign (12).png", "malignant (135).png", "benign (277).png", "malignant (119).png", "benign (187).png", "malignant (192).png", "benign (53).png", "malignant (105).png", "benign (202).png", "benign (147).png", "benign (21).png", "benign (206).png", "benign (329).png", "benign (101).png", "benign (200).png", "benign (404).png", "malignant (126).png", "benign (236).png", "malignant (44).png", "benign (360).png", "benign (135).png", "malignant (98).png", "benign (184).png", "benign (307).png", "benign (60).png", "malignant (146).png", "benign (145).png", "benign (36).png", "benign (243).png", "malignant (80).png", "benign (289).png", "benign (11).png", "malignant (49).png", "benign (95).png", "benign (48).png", "benign (260).png", "malignant (69).png", "malignant (197).png", "malignant (66).png", "malignant (129).png", "benign (52).png", "malignant (29).png", "benign (353).png", "benign (139).png", "benign (314).png", "benign (224).png", "benign (428).png", "benign (175).png", "malignant (10).png", "benign (180).png", "benign (122).png", "benign (220).png", "malignant (117).png", "malignant (53).png", "benign (51).png", "malignant (63).png", "malignant (9).png", "benign (362).png", "benign (87).png", "malignant (108).png", "benign (377).png", "malignant (12).png", "benign (233).png", "malignant (149).png", "benign (383).png", "malignant (177).png", "benign (295).png", "benign (258).png", "malignant (139).png", "malignant (112).png", "benign (432).png", "benign (319).png", "malignant (173).png", "malignant (122).png", "malignant (127).png", "malignant (195).png", "benign (196).png", "benign (111).png", "malignant (123).png", "benign (40).png", "benign (241).png", "benign (325).png", "benign (204).png", "malignant (75).png", "benign (197).png", "benign (272).png", "benign (400).png", "benign (3).png"], "test_labels": ["malignant (109).png", "benign (275).png", "benign (188).png", "benign (90).png", "benign (431).png", "malignant (2).png", "benign (141).png", "benign (263).png", "benign (75).png", "malignant (179).png", "benign (10).png", "malignant (133).png", "benign (244).png", "benign (385).png", "malignant (118).png", "malignant (163).png", "benign (388).png", "benign (376).png", "benign (276).png", "benign (156).png", "benign (26).png", "malignant (198).png", "benign (78).png", "benign (380).png", "benign (326).png", "benign (38).png", "benign (357).png", "benign (227).png", "benign (217).png", "benign (351).png", "malignant (110).png", "benign (393).png", "benign (366).png", "malignant (32).png", "malignant (148).png", "benign (347).png", "benign (368).png", "benign (113).png", "benign (149).png", "benign (216).png", "malignant (79).png", "benign (247).png", "benign (6).png", "malignant (178).png", "benign (280).png", "benign (316).png", "benign (66).png", "benign (378).png", "malignant (15).png", "benign (92).png", "benign (341).png", "benign (249).png", "benign (269).png", "benign (198).png", "malignant (23).png", "benign (333).png", "benign (374).png", "benign (22).png", "benign (318).png", "benign (30).png", "benign (133).png", "benign (213).png", "benign (120).png", "benign (19).png", "benign (218).png", "benign (191).png", "benign (350).png", "malignant (58).png", "benign (207).png", "benign (348).png", "malignant (82).png", "benign (379).png", "malignant (31).png", "benign (157).png", "benign (401).png", "malignant (24).png", "malignant (85).png", "benign (363).png", "malignant (36).png", "benign (193).png", "benign (313).png", "malignant (120).png", "benign (5).png", "benign (335).png", "benign (426).png", "benign (174).png", "benign (410).png", "benign (148).png", "benign (253).png", "benign (346).png", "benign (331).png", "malignant (30).png", "benign (194).png", "benign (134).png", "malignant (91).png", "malignant (187).png", "benign (54).png", "benign (389).png", "benign (402).png", "malignant (205).png", "benign (73).png", "malignant (154).png", "benign (232).png", "benign (169).png", "benign (437).png", "malignant (93).png", "benign (168).png", "benign (365).png", "benign (106).png", "benign (292).png", "malignant (208).png", "benign (367).png", "malignant (48).png", "malignant (158).png", "benign (82).png", "benign (239).png", "malignant (185).png", "benign (119).png", "malignant (182).png", "malignant (132).png", "benign (283).png", "benign (322).png", "malignant (193).png", "malignant (134).png", "benign (177).png", "benign (337).png", "benign (228).png", "benign (165).png", "benign (72).png", "malignant (76).png"]} \ No newline at end of file diff --git a/files_split/Kvasir.json b/files_split/Kvasir.json new file mode 100644 index 0000000000000000000000000000000000000000..1c7dfacded836f9bcd65c68a5e081bc8369bf8ef --- /dev/null +++ b/files_split/Kvasir.json @@ -0,0 +1 @@ +{"train": ["cju5wphwwlu3m0987hh3ltg88.jpg", "cju8aj01yqeqm0850lhdz3xdw.jpg", "cju2lz8vqktne0993fuym6drw.jpg", "cju2rxm8rpbaf0993o3qr2oph.jpg", "cju3xga12iixg0817dijbvjxw.jpg", "cju5fb86jd1jp0755b1ukbhq5.jpg", "cju2nsmwjlzyl0993jl80chvz.jpg", "cju7fq7mm2pw508176uk5ugtx.jpg", "cju18kevfrojc0835bn90f1in.jpg", "cju2tvrvm53ws0801a0jfjdxg.jpg", "cju2qqn5ys4uo0988ewrt2ip2.jpg", "cju306x7w05nb0835cunv799x.jpg", "cju5c7oijaqmq09878qwgqv8n.jpg", "cju35oyvd3y850988km12hdz1.jpg", "cju2yljr0yzhw0988ecf271ly.jpg", "cju8c9akjsdjj0850s67uzlxq.jpg", "cju5ccpvqash50850kb4bs22k.jpg", "cju7db7lp2f400755tntd1ohf.jpg", "cju6vgdmivcvb08018fra5lnv.jpg", "cju2syxa93yw40799x2iuwabz.jpg", "cju1cvkfwqrec0993wbp1jlzm.jpg", "cju320gyvbch60801v2amdi2g.jpg", "cju7emdni2py40871ivhxjtut.jpg", "cju5f8hxdcxxn08188obby0ea.jpg", "cju2lcyfgkf5809932fn9gucn.jpg", "cju5i5oh2efg60987ez6cpf72.jpg", "cjyzkmjy8evns070165gf9dmq.jpg", "cju31y80qbawn0801twwm2l5s.jpg", "cju5bwhapakm90987c1v4z46a.jpg", "cju1hyolc7aqu0878rrkfn1lr.jpg", "cju0qx73cjw570799j4n5cjze.jpg", "cju7dbppn28nx085097654msi.jpg", "cju2i6acqvo6l0799u20fift8.jpg", "cju2raxlosl630988jdbfy9b0.jpg", "cju30xqmh0ni00835ix3batv1.jpg", "cju1alwgo30z60855fm3y23sm.jpg", "cju7fpfzq2wyf0818xxd1oziv.jpg", "cju8bk8oirjhw0817hgkua2w8.jpg", "cju34i3qvcyog0855qiejxx5w.jpg", "cju2qozsk20cq0855ugrg3cri.jpg", "cju84kplnl1y30755ropua1b0.jpg", "cju40taxlkrho0987smigg0x0.jpg", "cju18849rrsgr0988p90hkygb.jpg", "cju1cyjb5qtie0993njqne9m3.jpg", "cju33o12x2jm50988944mxq0v.jpg", "cju7cq6su27qv075574dir0r3.jpg", "cju0qoxqj9q6s0835b43399p4.jpg", "cju2igw4gvxds0878808qj398.jpg", "cju2zxja9w1eh09933609ho9z.jpg", "cju2iatlki5u309930zmgkv6h.jpg", "cju83rcnzkbsj0755x5anfrcg.jpg", "cju2xs6na81t20878pt6nkfip.jpg", "cju1fmsyf6gxb0801cimx2gle.jpg", "cju2u73dj53oz0878486k8k4b.jpg", "cju7efffp2ivf0817etg3jehl.jpg", "cju8a1jtvpt9m081712iwkca7.jpg", "cju85je7vlht70817c9jcjwi4.jpg", "cju5wcc90lu020850mjrxppv6.jpg", "cju1f320ewfyu0988ndz6blh5.jpg", "cju2zwg05a0oy0801yr73ig7g.jpg", "cju32phw2bv130801yj7bkouq.jpg", "cju32zhbnc1oy0801iyv1ix6p.jpg", "cju887ftknop008177nnjt46y.jpg", "cju32gzs6xo8x0993r8tedbpb.jpg", "cju0rx1idathl0835detmsp84.jpg", "cju2r7h21sj9608354gzks3ae.jpg", "cju5jz5fff8c50871hbe6108f.jpg", "cju35ldepdtlm0801yv79y8vu.jpg", "cju34uhepd3dd0799hs8782ad.jpg", "cju2otvvv0l7z0855x7we8cb0.jpg", "cju7ajnbo1gvm098749rdouk0.jpg", "cju1dnz61vfp40988e78bkjga.jpg", "cju846ec0kj7z08012o10klrb.jpg", "cju5eyfe9cpk90987laa7tsl3.jpg", "cju2zgbj9zmrw0835nnlzxj4c.jpg", "cju7dizi82h2i0755doucgnt3.jpg", "cju7dhpsc2dnn0818025m6857.jpg", "cju1dia8hvc6v098827mgffnm.jpg", "cju6x35ervu2808015c7eoqe4.jpg", "cju2upu4evw7g08358guwozxv.jpg", "cju77196iyshb0850ycbto50a.jpg", "cju87xn2snfmv0987sc3d9xnq.jpg", "cjyzl833ndne80838pzuq6ila.jpg", "cju8bj2ssrmlm0871gc2ug2rs.jpg", "cju7d5m0p23kn09871rk7pu3v.jpg", "cju8432cmkgq90871cxe4iptl.jpg", "cju34xspwzenf0993cyzajv9n.jpg", "cju5eftctcdbj08712gdp989f.jpg", "cju772304yw5t0818vbw8kkjf.jpg", "cju83u9ftk3ni0987qnhlcinv.jpg", "cju2tqfgw4oat0799rn0g5b2z.jpg", "cju2qfie4rvz508357kad9z5o.jpg", "cju2sszfq3uye0878sucelzk2.jpg", "cju1drnhbrb9409935wi7vkhg.jpg", "cju2i03ptvkiu0799xbbd4det.jpg", "cju2oo0wh0bqy0878biujeyhe.jpg", "cju1haab178i70799tk9z8y8x.jpg", "cju5nyu31gv8e0871zpk74a2n.jpg", "cju3v0fl3gwce0755qkjhzmd4.jpg", "cju2r2obh2bjm08553kng0rh7.jpg", "cju2srvy5440s0801y1ba9akr.jpg", "cju8bm24yrrdp081829mbo8ic.jpg", "cju8bh8surexp0987o5pzklk1.jpg", "cju2okvco06xc0799kxe5n1qh.jpg", "cju8ceacrsqkr0755hdz145es.jpg", "ck2bxpfgxu2mk0748gsh7xelu.jpg", "cju8cdeazsm8h0801jxifmzur.jpg", "cju2hw5gjlr5h0988so2qqres.jpg", "cju2suk42469908015ngmq6f2.jpg", "cju2p4ddkmzxj0993p94o62av.jpg", "cju88vx2uoocy075531lc63n3.jpg", "cju77re6fz5bb0817vp9redjg.jpg", "cju5wmvsdlx1j0871npgj8j4b.jpg", "cju787jnjzjuj0871p94nck9g.jpg", "cju5wkonqlrl409877y8zvnub.jpg", "cju1ffnjn6ctm08015perkg37.jpg", "cju2rpa30t07b0835im0erql0.jpg", "cju5ekty5ckzf07550c9u3ckk.jpg", "cju7awzmu1ncs0871hziy65zx.jpg", "cju7el6xv2k520817qxx9wdr5.jpg", "cju1gv7106qd008784gk603mg.jpg", "cju1dfeupuzlw0835gnxip369.jpg", "cju6xmqd9w0250817l5kxfnsk.jpg", "cju42py9mlqyd0818u3d1d7ga.jpg", "cju7eotqi2qea0871y8yc7tqh.jpg", "cju2ricdv2iys0878sv1adh0u.jpg", "cju7f4sc62xqj075597xpmuoy.jpg", "cju2zdhsczmn50988z64qwg2q.jpg", "cju2s9g11pnra0993gn4eh793.jpg", "cju88k75inzyb0850ccv5x3vk.jpg", "cju2saez63gxl08559ucjq3kt.jpg", "cju3yht87j83m08507yk1u1fg.jpg", "cju336l68y7if0993wf092166.jpg", "cjyztzaqtrv430848l8xgcerw.jpg", "cju8b8yair65w09878pyqtr96.jpg", "cju2z6ez69g4u0801qwt088lw.jpg", "cju1c0qb4tzi308355wtsnp0y.jpg", "cju5ht88gedbu0755xrcuddcx.jpg", "cju7d2q1k27nf08715zshsckt.jpg", "cju7epwj82koz098713apjnzo.jpg", "cju2rzpsmtb0f0835jabkbao1.jpg", "cju1d31sp4d4k0878r3fr02ul.jpg", "cju8bop5jrsid08716i24fqda.jpg", "cju2wve9v7esz0878mxsdcy04.jpg", "cju1b0y2e396p08558ois175d.jpg", "cju2top2ruxxy0988p1svx36g.jpg", "cju1erep75us208553i4ofwwe.jpg", "cju6v4szov55u0871qmqz3v8n.jpg", "cju76erapykj30871x5eaxh4q.jpg", "cju7ey10f2rvf0871bwbi9x82.jpg", "cju6v6g6kvdw007552x6mb0po.jpg", "cju85dx63lic408017f0l0400.jpg", "cju2hjrqcvi2j0801bx1i6gxg.jpg", "cju5huurrecm70801y680y13m.jpg", "cju2trbpkv0c00988hxla5dzz.jpg", "cju16ach3m1da0993r1dq3sn2.jpg", "cju6vifjlv55z0987un6y4zdo.jpg", "cju2qs32r1vys07999conmbvx.jpg", "cju32csyfblyh080170aa3x5p.jpg", "cju32fhnhbds40799broyoptc.jpg", "cju8bljw9rqk20801kr54akrl.jpg", "cju7fmvpk2q170987v6i3ola8.jpg", "cju7d6ux323ze0987xos3srkx.jpg", "cju85l4yjlops0801fvmnwptf.jpg", "cju1egh885m1l0855ci1lt37c.jpg", "cju6v5ilsv8hk0850rb5sgh6o.jpg", "cju8bzzy2s66m08016z6mouqt.jpg", "cju1djtprvd7b0988thwwrg09.jpg", "cju7aifyo1p3n07552nxjx51f.jpg", "cju2ulk385h170799rlklxob0.jpg", "cju424hy5lckr085073fva1ok.jpg", "cju5knbbqfipk080128cggukq.jpg", "cju5bf6hxa6m50817rbwettgu.jpg", "cju2tjrog4jy30878pawyazqc.jpg", "cju88rl5eo94l0850kf5wtrm1.jpg", "cju300m3s04fg0988uzupuf7z.jpg", "cju3u1c8tfyqx08503iedc3mx.jpg", "cju43mkj9m8wb0871qiadahub.jpg", "cju18ibp219ub08783i6o98g7.jpg", "cju5fydrud94708507vo6oy21.jpg", "cju8ashhnquqr0801rwduzt7d.jpg", "cju45ty6zn9oz0850qy4qnck1.jpg", "cju2y40d8ulqo0993q0adtgtb.jpg", "cju16whaj0e7n0855q7b6cjkm.jpg", "cju2wxv0hxs2f09884w48v8fi.jpg", "cju1hmff8tkp809931jps6fbr.jpg", "cju857ad0l88m0817qx4cwxnf.jpg", "cju7ekbo32pft0871fv7kzwb9.jpg", "cju8bff9nrfi10850fmfzbf8v.jpg", "cju85plp7lmkw0850rx42jdpf.jpg", "cju1ewnoh5z030855vpex9uzt.jpg", "cju5tenjojp1j0755ms4949h2.jpg", "cju2ntxtdzlvu0799xl3j9pan.jpg", "cju2zm0axztpe0988r8s9twjr.jpg", "cju3ykamdj9u208503pygyuc8.jpg", "cju15czxqp3lv0835jvhgzurz.jpg", "cju88v2f9oi8w0871hx9auh01.jpg", "cju8828oxnool0801qno9luhr.jpg", "cju2sxf3iqbpv09937iksn8ep.jpg", "cju0sr5ghl0nd08789uzf1raf.jpg", "cju43gfosm63n08714rpih8pe.jpg", "cju3ya7goj6at0818v2l5ay7f.jpg", "cju5hjxaae3i40850h5z2laf5.jpg", "cju30u1hbakn808019g15nb8b.jpg", "cju8bbznkrf5g0871jncffynk.jpg", "cju85citjlnfm0755i4rk5tqj.jpg", "cju7dqcwi2dz00850gcmr2ert.jpg", "cju87q6yoneim0871dl4phvkd.jpg", "cju35c4wzdhow0799h6eq4sgs.jpg", "cju7b1ygu1msd0801hywhy0mc.jpg", "cju8c5223s8j80850b4kealt4.jpg", "cju1bhnfitmge0835ynls0l6b.jpg", "cju5x7iskmad90818frchyfwd.jpg", "cju83vvmik9wa08710yeh7cuk.jpg", "cju87nkyrnb970801q84m47yt.jpg", "cju2oi8sq0i2y0801mektzvw8.jpg", "cju40wto5kxwi0755it190f2k.jpg", "cju5jx7jzf7c90871c2i9aiov.jpg", "cju6vucxvvlda0755j7msqnya.jpg", "cju3xtufwiv9c0818djsc4cqd.jpg", "cju84ffdzkrjn08183jh1fxmb.jpg", "cju8auylgqx0z0871u4o4db7o.jpg", "cju2nbdpmlmcj0993s1cht0dz.jpg", "cju6yywx1whbb0871ksgfgf9f.jpg", "cju5ft6mcd5q40987rhjgbrr6.jpg", "cju7dymur2od30755eg8yv2ht.jpg", "cju14g8o4xui30878gkgbrvqj.jpg", "cju83syhdk6gs0801rf1rekdl.jpg", "cju7do8c72dbo0801vxfzxdc4.jpg", "cju7alcgr1lsr0871riqk84z7.jpg", "cju330ofbc2l30801th5g3hw6.jpg", "cju3xzvnzj0hd0755xprz39nj.jpg", "cju2rga4psq9n09881z519xx0.jpg", "cju18gzrq18zw0878wbf4ftw6.jpg", "cju8apjewqrk00801k5d71gky.jpg", "cju43b8daly4408170e5ev06g.jpg", "cju3xiic0ilzp0850lusrb42j.jpg", "cju3v11mrgwwb0755u242ygye.jpg", "cju77b3wyz4160755qis4ljsb.jpg", "cju3uz4o6gr9z0850lhxyxvsj.jpg", "cju5o1vu9gz8a0818eyy92bns.jpg", "cju8ayeq7r1fb0818z1junacy.jpg", "cjyzk8qieoboa0848ogj51wwm.jpg", "cju3128yi0rpu0988o4oo5n8n.jpg", "cju1d50a94qf50855wsowacrc.jpg", "cju6x0yqbvxqt0755dhxislgb.jpg", "cju5buy2bal250818ipl6fqwv.jpg", "cju7ff97z2ow40817u2r83my5.jpg", "cju1g4nsb6ngy0799l4ezm8ab.jpg", "cju34ds2531520988qjpqt6e3.jpg", "cju6wjm81vgsc0987enk9n3pr.jpg", "cju8dic9mtppa0987swn23wbc.jpg", "cju6vta3kvazg0817qbeppjtm.jpg", "cju15wdt3zla10801odjiw7sy.jpg", "cju8aeei7q8k308173n9y4klv.jpg", "cju7aez2x1jtj0871ztezs3oi.jpg", "cju1f5x1164xv08555654c24r.jpg", "cju8chxndsre008015uisl4si.jpg", "cju5wtdu4m0im0871mix0yvc0.jpg", "cju85c2d4ln1b0755zz1z3onx.jpg", "cju7g7ba42z310987bqzbi2bq.jpg", "cju7f5ghb2r5s0801chwkxxh9.jpg", "cju16jgnyzp970878melv7r25.jpg", "cju5v8pgplg6k0755rvi2t63h.jpg", "cju1euuc65wm00799m4sjdnnn.jpg", "cju1871y11d6r0799k6cw4yze.jpg", "cju8bqxxurs6i0850mu7mtef9.jpg", "cju7ecl9i2i060987xawjp4l0.jpg", "cju7d3oc82cho0755dajlwldz.jpg", "cju326h4v1gxw08352px40p7r.jpg", "cju8bafgqrf4x0818twisk3ea.jpg", "cju84aoa3ktwn0755pfl4gfwd.jpg", "cju6xa0qmvzun0818xjukgncj.jpg", "cju85qefyln6v0850szeb9byi.jpg", "cju8c5zcbsdfz0801o5t6jag1.jpg", "cjyzul1qggwwj07216mhiv5sy.jpg", "cju40jl7skiuo0817p0smlgg8.jpg", "cju2hos57llxm08359g92p6jj.jpg", "cju7dubap2g0w0801fgl42mg9.jpg", "cju85nr8elly209872w9n5m0s.jpg", "cju42wamblrqn098798r2yyok.jpg", "cju7f0ec32txj08184asb8w5f.jpg", "cju1cdxvz48hw0801i0fjwcnk.jpg", "cju3xl264ingx0850rcf0rshj.jpg", "cju17bz250pgd0799u1hqkj5u.jpg", "cju783tmkzkqu081803g7q5vk.jpg", "cju41p90plcsx08018cnzpndc.jpg", "cju32jcdabepz0878d0cznmfe.jpg", "cju1gghyjwxt80835vx0wgxw0.jpg", "cju7arvfe1ldu0850erdmphgj.jpg", "cju1hirfi7ekp0855q0vgm9qq.jpg", "cju30525w04r10835ygp257sb.jpg", "cju5vwbr4lhqn0987a1pji0ux.jpg", "cju42qet0lsq90871e50xbnuv.jpg", "cju8aw9n1qyg10801jkjlmors.jpg", "cju7787c5yy3l080159mwqsnj.jpg", "cju15ptjtppz40988odsm9azx.jpg", "cju34ymm8d6700799uop0cw33.jpg", "cju6uy20suzbl0987rzuhz7z9.jpg", "cju310f6val1v0855xo8tc3gu.jpg", "cju7atnm31if40817pqclnjer.jpg", "cju88aq6vo1ij0755c2ey7z7n.jpg", "cju8dn0c3u2v50801k8rvq02f.jpg", "cju8doa16u5gh0818w1ywda3q.jpg", "cju77t0razbvm080106o56289.jpg", "cju84hibuktj80871u519o71q.jpg", "cju7f900s2o0k08175gl1giid.jpg", "cju422cm8lfxn0818ojicxejb.jpg", "cju8567gdlcbq0801dwwyo2jt.jpg", "cju2lyynuymli0855g7fxgbhe.jpg", "cju3x5u2tiihx0818914gzxy1.jpg", "cju5vcmrqla7i0817x4sp4pqw.jpg", "cju0s2a9ekvms080138tjjpxr.jpg", "cju43kj2pm34f0850l28ahpni.jpg", "cju41lojblbs307555jdci937.jpg", "cju88trl3ogi208716qvti51b.jpg", "cju2ro5jqsy680988pi6qsujw.jpg", "cju8c5mxls96t0850wvkvsity.jpg", "cju890guyoiti098753yg6cdu.jpg", "cju5yimthmlv80850zhoc90c2.jpg", "cju1egx9pvz2n0988eoy8jp23.jpg", "cju7d4jk723eu0817bqz2n39m.jpg", "cju2qh5le1ock0878oahaql7d.jpg", "cju8b3ka8r64u0801fh18hk7l.jpg", "cju5yhgznmkzb0801cji2vi8j.jpg", "cju2nqapmzvk20801f9us40dx.jpg", "cju8djdqztu6408506pzhlo18.jpg", "cju418rckl3ur08012psrx1r1.jpg", "cju2hqt33lmra0988fr5ijv8j.jpg", "cju6vvxsev9y30987kespucdg.jpg", "cju84jdl9kv0i0871eog9b3i9.jpg", "cjz14qsk2wci60794un9ozwmw.jpg", "cju7afqon1ip40850ue2308b6.jpg", "cju8a84g0q76m0818hwiggkod.jpg", "cju2rz4k434s70855wwx3ddtx.jpg", "cju1c8ffau5770835g0g343o8.jpg", "cju41s6nbleqy0755e2mslg0b.jpg", "cju31t8xd17bk0835rnb893jk.jpg", "cju8cbsyssiqj0871gr4jedjp.jpg", "cju5klveuff6w0871wbibgh3m.jpg", "cju2sggy13na70855tbeoqgha.jpg", "cju8dm2cau2km0818jsv9eeq2.jpg", "cju7dtb1e2j0t0818deq51ib3.jpg", "cju2xyd9vyi7m098831qcucse.jpg", "cju2r11x7sdgx0988o8ule0wl.jpg", "cju85omszllp30850b6rm9mi3.jpg", "cju2nfnvxzdkd0878399axlco.jpg", "cju5bhv81abur0850ean02atv.jpg", "cju77j66ez52p08019xygi0co.jpg", "cju33yemn2qb20988wfjxximx.jpg", "cju3518w2d838079939fqztbc.jpg", "cju2t62nq45jl0799odpufwx6.jpg", "cju7fen322ou10817ziqkob4k.jpg", "cju5wqonpm0e60801z88ewmy1.jpg", "cju42romflni20817etb9a0fl.jpg", "cju3381d8bz3h07991xtl7ra0.jpg", "cju33qpdvc9g0087825jhf3s9.jpg", "cju8alhigqn2h0801zksudldd.jpg", "cju8418jhkf7d0818ga2v0xq0.jpg", "cju7fcgbe2z3p07550vaflqdb.jpg", "cju428k5fldt108177s6g6f45.jpg", "cju7fnfv02tt90801djnix9m8.jpg", "cju1cj3f0qi5n0993ut8f49rj.jpg", "cju1dq3x1vgx109889c7wyirg.jpg", "cju784jpdzeae0987q5ypq883.jpg", "cju1ats0y372e08011yazcsxm.jpg", "cju7deifq2fzn0755lc8idyh8.jpg", "cju1c6yfz42md08550zgoz3pw.jpg", "cju2zdvjn9h7r08553cp4eed5.jpg", "cju32upim1z7u0988l883nqp6.jpg", "cju0s690hkp960855tjuaqvv0.jpg", "cju7dglf226g50987ohbthl19.jpg", "cju1ejj7dvqfa0835ra184v5m.jpg", "cju83k8fyjsxr0817d6nxs6r4.jpg", "cju35a77vdj4n08556jj2lgmc.jpg", "cju45v0pungu40871acnwtmu5.jpg", "cju1hs0za7jha0855vj0mdrjt.jpg", "cju5woy82m07m08505dmjg7g1.jpg", "cju34repocy5208780gswillm.jpg", "cju5tgbzhjllu08174ca41eus.jpg", "cju5hqz50e7o90850e0prlpa0.jpg", "cju0t4oil7vzk099370nun5h9.jpg", "cju30qbm1ad3x0855znuhpz9u.jpg", "cju1b3zgj3d8e0801kpolea6c.jpg", "cju17r8il13910799dr2wme2e.jpg", "cju2rnkt22xep0801as160g9t.jpg", "cju88gx09o2vk0818610zody3.jpg", "cju0u2g7pmnux0801vkk47ivj.jpg", "cju5nxkujgscq0817l9gss626.jpg", "cju2hfqnmhisa0993gpleeldd.jpg", "cju5uhrdwkmsu0817ervv91l8.jpg", "cju6vrs1ov8cr098788h8gs6j.jpg", "cju353d1eda8c07992afde611.jpg", "cju6w733bveoz0817e600tw72.jpg", "cju2nyc5f02m40801ojqbtiea.jpg", "cju45rj7ln8980850a7821fov.jpg", "cju2xf8e5y2wm08359vcgk09b.jpg", "cju5yeqiwmkgl0801fzv2douc.jpg", "cju35740hzm0g0993zl5ic246.jpg", "cju13fwthn9mq0835gacxgy01.jpg", "cju76l27oyrw907551ri2a7fl.jpg", "cju7cp6dw244p0818gncdol4m.jpg", "cju8c6hnxsdvr0801wn0vrsa6.jpg", "cju43c92lm5cj0755lorsorfg.jpg", "cju45n0oxn5vu08500yfrt9jn.jpg", "cju30ywtc0oar0835bp2en7ec.jpg", "cju5waeduln160817w0agirve.jpg", "cju5clr68b48r0755cmuvponm.jpg", "cju2zpw4q9vzr0801p0lysjdl.jpg", "cju1b75x63ddl0799sdp0i2j3.jpg", "cju87tyddnnad0755bj0wxahe.jpg", "cju5ymyd8mmdc0801ry3by1xr.jpg", "cju8ando2qqdo0818ck7i1be1.jpg", "cju3tsh4lfsok0987w6x3a0v1.jpg", "cju2z2x3nvd3c099350zgty7w.jpg", "cju3uhb79gcgr0871orbrbi3x.jpg", "cju2omjpeqj5a0988pjdlb8l1.jpg", "cju15mhjczc8z0801kit5c6di.jpg", "cju2ycp1u8g2r0799jslnp7cz.jpg", "cju888fr7nveu0818r9uwtiit.jpg", "cju426tomlhll0818fc0i7nvh.jpg", "cju5ukkg6kv7u08011x2b6zl5.jpg", "ck2bxiswtxuw80838qkisqjwz.jpg", "cju5bj926aiec07559rshy4wa.jpg", "cju2zo0fwzv580988qlijd2xa.jpg", "cju1ddr6p4k5z08780uuuzit2.jpg", "cju85a8h8llwm07559wxg4t5w.jpg", "cju2htabevq9108015qjei0x7.jpg", "cju14hjh2ob2o0835ouz3r5aa.jpg", "cju88y1mwoln50871emyfny1g.jpg", "cju3xeexgii1j0817zs68tb4g.jpg", "cju6xlygpw7bs0818n691jsq4.jpg", "cju7aklv31h4309871m29l4e7.jpg", "cju6z2616wqbk07555bvnuyr1.jpg", "cju1c4fcu40hl07992b8gj0c8.jpg", "cju34eqjpcpm508788b3lhp97.jpg", "cju8b5p40r2c60987ofa0mu03.jpg", "cju77q10sz9ug0801449wu1nu.jpg", "cju6uzxk0v83p0801rcwnexdu.jpg", "cju7csvlb22fr0850lvm45n3x.jpg", "cju1cbokpuiw70988j4lq1fpi.jpg", "cju8b2rmgr52s0801p54eyflx.jpg", "cju357rxxdaz30878y2esjpjt.jpg", "cju34m7h536wq0988xz7gx79v.jpg", "cju3yb47cj1xq0817zfotbni4.jpg", "cju2u4pymvc720988wsxrmi84.jpg", "cju41nz76lcxu0755cya2qefx.jpg", "cju3xhpvvimda0987ygrpzni2.jpg", "cju8b4ja9r2s808509d45ma86.jpg", "cju88nroho44508500129f1nh.jpg", "cju1g20bdwq6u0835e16xugcd.jpg", "cju43in5fm22c08175rxziqrk.jpg", "cju8cgi2kspp308011nxdtjp6.jpg", "cju16b6ynq8e40988m8vx0xnj.jpg", "cju2qu37qobl50993aw7ghcfq.jpg", "cju2spdagu1l50835da1f46fr.jpg", "cju3v664kh0px0818y4y7wolf.jpg", "cju30lncba3ny0878jwnous8n.jpg", "cju1amqw6p8pw0993d9gc5crl.jpg", "cju2lejzcy4pc0878c9rlonot.jpg", "cju2qdj95ru8g09886gfi9rsz.jpg", "cju5eq8c8ck690850vix98hv3.jpg", "cju1fyb1d69et0878muzdak9u.jpg", "cju2wtwj87kys0855kx6mddzw.jpg", "cju88itqbny720987hxizbj5y.jpg", "cju3v3ac9gyz30755hfqwyp1i.jpg", "cju5vzjoslpj708186z2fusmz.jpg", "cju2ij9uiic2l09933ljiv6gm.jpg", "cju2uwz9f5yf1085506cfamfx.jpg", "cju2lberzkdzm09938cl40pog.jpg", "cju7cufm7298k0755j09uf3of.jpg", "cju7b9vcs1luz0987ta60j1dy.jpg", "cju40poe4kt9s0755f9cnm3h5.jpg", "cju7ap09p1kz10850ldccjebj.jpg", "cju5ew4h9cqaf0818rrczkmqh.jpg", "cju88fpm4o0tl0871w1i6a4ds.jpg", "cju7bgnvb1sf808717qa799ir.jpg", "cju2m71z2ywwv080131bcrsd3.jpg", "cju41r6v2lcww0871ps8k8pf5.jpg", "cju30ajhw09sx0988qyahx9s8.jpg", "cju87zv8lni0o0850hbbecbq6.jpg", "cju5yjq1pmlgc0801z0t24bly.jpg", "cjyzuio1qgh040763k56deohv.jpg", "cjyzufihqquiw0a46jatrbwln.jpg", "cju2zrojo9kcd0878ld2epejq.jpg", "cju5vxuc5loxw0818u8xgf45p.jpg", "cju2rlqdnoz9k0993cpjae3x0.jpg", "cju8402x1kcy70801t6kz6bdi.jpg", "cju1h89h6xbnx08352k2790o9.jpg", "cju8at3s1qqqx0850hcq8nmnq.jpg", "cju886ryxnsl50801r93jai7q.jpg", "ck2bxqz3evvg20794iiyv5v2m.jpg", "cju35k2fr3vc50988c85qkrwg.jpg", "cju8c1a0ws7o208181c6lbsom.jpg", "cju7dda8w2br20818zhsuz8s7.jpg", "cju1ftaji6isw0855108yqcse.jpg", "cju8c3xs7sauj0801ieyzezr5.jpg", "cju33za6l2qy70988jhrlp2ev.jpg", "cju30ov1oah920801mi8thuyg.jpg", "cju7ez7r22qbc08015xfoz2wb.jpg", "cju5o4pk9h0720755lgp9jq8m.jpg", "cju410dnfl0960755y8lu8d79.jpg", "cju334jzo261t0835yqudnfs1.jpg", "cju2yi9tz8vky0801yqip0xyl.jpg", "cju45qbf3n9sa0987oonbkly9.jpg", "cju6ut4l8va6y0755tyw3vfqq.jpg", "cju6ur9l9v9jq0755paud9uka.jpg", "cju89z6pqpqfx0817mfv8ixjc.jpg", "cju7b10ce1mnm08011c5bwyr4.jpg", "cju175facms5f0993a5tjikvt.jpg", "cju2txjfzv60w098839dcimys.jpg", "cju1csmlc4ht10799b8ymmghg.jpg", "cju3tp94kfstl08181awh6z49.jpg", "cju2p91qir00k08350ddfif0w.jpg", "cju5hyi9yegob0755ho3do8en.jpg", "cju43h43am1dy08176gwfhmnt.jpg", "cju88msmoo3470817m441j4sg.jpg", "cju13hp5rnbjx0835bf0jowgx.jpg", "cju83ipu3jwpx0801z5pvguf8.jpg", "cju33w4sdcivk0855x879zht7.jpg", "cju2tzypl4wss0799ow05oxb9.jpg", "cju6wll7wvo3y08502pagos8m.jpg", "cju7bc95p1mdm0817yqj5jc6j.jpg", "cju30j1rgadut0801vuyrsnt8.jpg", "cju35mdz73x890835eynq1h9v.jpg", "cju43lcnum9y10755bjs7z87f.jpg", "cju7etr3y2p4t0801cdzjj8ab.jpg", "cju774fmayxif0818u2g79usw.jpg", "cju1cu1u2474n0878tt7v4tdr.jpg", "cju323ypb1fbb0988gx5rzudb.jpg", "cju33x0f22peh0988g0ln7w5v.jpg", "cju2y5zas8m7f0801d34g5owq.jpg", "cju5ddda9bkkt0850enzwatb1.jpg", "cju5ufn3skquf0818dhapnhba.jpg", "cju84gpefknwm098714oq8q61.jpg", "cju2trtjf4qjd0878a2zle9v9.jpg", "cju7fazv92ywx0755xov2erga.jpg", "cju2rn0hasxri0835nfy3buay.jpg", "cju3x4blzieu30850x10uuvbm.jpg", "cju3y9difj6th0801kd1rqm3w.jpg", "cju2osuru0ki00855txo0n3uu.jpg", "cjyzlw7f9faqr070129au64sq.jpg", "cju3u39fog1bo0871lxjrabks.jpg", "cju2yo1j1v0qz09934o0e683p.jpg", "cju33231uy4gi0993qc7b1jch.jpg", "cju2xlcqxy9c60988vjacdznb.jpg", "cju77vvcwzcm50850lzoykuva.jpg", "cju8dpa89u6l80818dj6lldh9.jpg", "cju3xwpgviwlx0871rwm15q7v.jpg", "cju1h5w4wxajx0835mc954kxy.jpg", "cju303j5r062k098835zxfds5.jpg", "cju35fxqyzt5p0993vusm54qz.jpg", "cju31w6goazci0799n014ly1q.jpg", "cju3u815rg4ek0850vvhtcvcm.jpg", "cju8bpctzrqkr0850zeldv9kt.jpg", "cju5f26ebcuai0818xlwh6116.jpg", "cju5g163vd6mt0817uccuga6u.jpg", "cju2zjcvj9qma0801dk71hhi0.jpg", "cju2mfjndoz700988b9lc3zeq.jpg", "cju5uxjnol2r509871qv2yeia.jpg", "cju7dwe282dc309876rco45ts.jpg", "cju5wuhm1lwm40987vugqn3vv.jpg", "cju77u1sjz77b0817ft44r3fk.jpg", "cju2yv4imv6cz099314jveiib.jpg", "cju16d65tzw9d0799ouslsw25.jpg", "cju8b6rp0r5st0850184f79xt.jpg", "cju3umoh1geet0817cmpef5am.jpg", "cju5y7buemcw80987p0r30g9f.jpg", "cju32srle1xfq083575i3fl75.jpg", "cju0roawvklrq0799vmjorwfv.jpg", "cju7b5afm1nfw0801xqm8bf8q.jpg", "cju1fb9236a110801yvg0fwju.jpg", "cju2z45kuzf6d0988nz2c819m.jpg", "cju45lbgznahl08180xz1h7u6.jpg", "cju2tpfa5uyx408359datxqqj.jpg", "cju3v72v5h1qz0818fggilwtq.jpg", "cju2yb31a8e8u0878wdashg7o.jpg", "cju6x97w4vwua0850x0997r0a.jpg", "cju42u5bjlvi10801dc13sskp.jpg", "cju77bvg0yv4r0987yh60xmjo.jpg", "cju5wrapcm2290818jsh26ppb.jpg", "cju17v6ih0u7808783zcbg1jy.jpg", "cju8d2q30tfhs0801n7lx77xl.jpg", "cju2nd7l7z98o0799gfjvyfmw.jpg", "cju5uzmaol56l0817flxh4w9p.jpg", "cju17otoe119u0799nqcbl8n1.jpg", "cju2s16zp317h0799gr67jqc2.jpg", "cju0u82z3cuma0835wlxrnrjv.jpg", "cju2zy1e49pqk0878t6ncqn12.jpg", "cju1f15k3w4ct0835cmde6ypo.jpg", "cju8bgdmqrksy0801tozdmraa.jpg", "cju3521y5d5mq0878t3ezsu4p.jpg", "cju40sdwukv3k0755y99ug1k8.jpg", "cju2pjb9v0ywn0878j5g5n69j.jpg", "cju2i3hzclw3o0988rrgh911i.jpg", "cju6yxyt0wh080871sqpepu47.jpg", "cju5x28nzm7t907558ocq4bt7.jpg", "cju1hp9i2xu8e0988u2dazk7m.jpg", "cju160wshltz10993i1gmqxbe.jpg", "cju34aozyyy830993bn16u32n.jpg", "cju7dxffn2eam0817qxosfwch.jpg", "cju8dqkrqu83i0818ev74qpxq.jpg", "cju8bysfgrzkl081786jwac09.jpg", "cju5gucasds9d0801019axylx.jpg", "cju5vi4nxlc530817uoqm2m7a.jpg", "cju5wj0faly5008187n6530af.jpg", "cju30mm25a53s0799qa5wiqe8.jpg", "cju5ca9hcatkc0801jzwe7tfx.jpg", "cju7bd1qu1mx409877xjxibox.jpg", "cju2yg5ht8i4p087800js8hp4.jpg", "cju5y4hgqmk0i08180rjhbwvp.jpg", "cju1euant5l960878iqj5vvto.jpg", "cju1dg44i4z3w0801nyz4p6zf.jpg", "cju2uy8ox62jo0801g88hh42z.jpg", "cju2t16vuucaq0835xcpsivn2.jpg", "cju5u4pywk81x0817vn9pe14z.jpg", "cju8ca4geseia0850i2ru11hw.jpg", "cju83wwn1k55e0850kw6i2d81.jpg", "cju45ofd9ne1j0801ri8dup7t.jpg", "cju2uzabhs6er0993x3aaf87p.jpg", "cju8a56vxpy780850r45yu4wk.jpg", "cju45pm27n80u08174kyow1gj.jpg", "cju3u4lxmg59o0755rz42b9en.jpg", "cju1gkndf6yi10801o1qnje19.jpg", "cju30gxjq0djk0988jytm49rs.jpg", "cju45jpvfn6c809873pv1i34s.jpg", "cju7adqyj1jcx08712r1ro5gx.jpg", "cju7dmlgf2ebw0871ieqas5fh.jpg", "cju8d4jgatgpj0871q2ophhkm.jpg", "cju2wx0gh7fpz0878wwyd9ep8.jpg", "cju2x7vw87mu30878hye2ca0m.jpg", "cju1hhj6mxfp90835n3wofrap.jpg", "cju30ftgja7170855xl9bkdm0.jpg", "cju43eigtm6ev0801mv0m96t1.jpg", "cju1fjsb4sipq09931lvd8e41.jpg", "cju77g99iyxc00817zqi2ppor.jpg", "cju34zivp3fq80988opxbaqyn.jpg", "cju2t9tdwuk700835kv0ljmtl.jpg", "cju5wi6bqlxy90755bu227nvb.jpg", "cju2rmd2rsw9g09888hh1efu0.jpg"], "valid": ["cju5vgawslbe30987ndeepc1b.jpg", "ck2bxlujamu330725szlc2jdu.jpg", "cju2hx006vidl0799igm81vmh.jpg", "cju7aqkue1i2k09879uzcpt8r.jpg", "cju5uget8krjy0818kvywd0zu.jpg", "cju8dk7eztzup08182yxko5zh.jpg", "cju1c3218411b08014g9f6gig.jpg", "cju7f6cqy2ur20818t1saazbm.jpg", "cju6wi3akvn8r0801px8eligc.jpg", "cju5enq1tcn1i0755hnkon787.jpg", "cju2hewssldzx0835ep795xu0.jpg", "ck2bxskgxxzfv08386xkqtqdy.jpg", "cju2np2k9zi3v079992ypxqkn.jpg", "cju5bycdkalkb09875f7bfrvx.jpg", "cju2hlm19vjjf0801o69qnber.jpg", "cju7amjna1ly40871ugiokehb.jpg", "cju34c1xfyz920993itxkkfad.jpg", "cju88oh0po9gq0801nge4tgr1.jpg", "cju5i39mreass0817au8p22zy.jpg", "cju5f0dezct4q08183ydw11dx.jpg", "cju5cky5xb0ay0801oxet697t.jpg", "cju7ehljc2or70871261br8ai.jpg", "cju183od81ff608017ekzif89.jpg", "cju7evxt12m730987rxivne3x.jpg", "cju8cj10qsrau0871o2dr6ai1.jpg", "cju2y26c588bo07993ksd8eoz.jpg", "cju2zkpdl9h7t0799ix60teqg.jpg", "cju85bf1algsq0871y9gtlq97.jpg", "cju40w3hbkwpn08015rbs3wko.jpg", "cju17hw9hr9c5098800fu4u8e.jpg", "cju6wuojavt740818b5qcv3iw.jpg", "cju2qvuj1s9ok0835tp2k4ozh.jpg", "cju33jon3ygbj0993pu22a4k6.jpg", "cju7dsrtb2f8i085064kwugfk.jpg", "cju77k828z46w0871r0avuoo9.jpg", "cju1aqqv02qwz0878a5cyhr67.jpg", "cju412uwlkva50850d1ps1ww7.jpg", "cju2nguelpmlj0835rojdn097.jpg", "cju35eg0tdmjt085525sb4bua.jpg", "cju7dn24o296i09871qfxb8s2.jpg", "cju2ma647l0nj0993ot4deq2q.jpg", "cju1expq45zst0855rjqwwj4m.jpg", "cju88t4fvokxf07558ymyh281.jpg", "cju83nwu1jxte0987h1krpfmv.jpg", "cju1fj6axwfp30835ukhuzhw9.jpg", "cju8b0jr0r2oi0801jiquetd5.jpg", "cju7bmi1v1pnj0987pa52jjok.jpg", "ck2da7fwcjfis07218r1rvm95.jpg", "cju2zblxw9848087853csbrx1.jpg", "cju5udcufki0s09874ll1dbr5.jpg", "cju2rqo702wpx0855fn7d5cxh.jpg", "cju88l66no10s0850rsda7ej1.jpg", "cju83yddek68q0850d2x7zfkm.jpg", "cju6z1bzbwfq50817b2alatvr.jpg", "cju5hi52odyf90817prvcwg45.jpg", "cju324q101fhe08350wae9cif.jpg", "cju5cetivauok0987ok3e5bre.jpg", "cju5u6wf0kh1t0755bg1ssixv.jpg", "cju8clorgsuwn08714toqb7v6.jpg", "cju5b9oyda4yr0850g9viziyv.jpg", "cju2qtee81yd708787bsjr75d.jpg", "cju3tvffffx5f0818t5ov22al.jpg", "cju1d96gsv62d09881b3wecw2.jpg", "cju5yclrymlgj0818k426ud6z.jpg", "cju2i8br1vqtd08784u6vmcjk.jpg", "cju7dvl5m2n4t0755hlnnjjet.jpg", "cju5hl8nee8a40755fm8qjj0o.jpg", "cju7d8m3b2e210755l8fj1yph.jpg", "cju1f79yhsb5w0993txub59ol.jpg", "cju7frtqu2xa20818wq8r9fzf.jpg", "cju35atpxdjot0855q46aqrd0.jpg", "cju3y0pjrj1c30755nxekxccj.jpg", "cju2ti1du4idn0878giuozonw.jpg", "cju2yyhsp933j0855hp32e012.jpg", "cju2r6mt2om21099352pny5gw.jpg", "cju3ttznuftyf09875t11850w.jpg", "cju42xpi8lw4w0871ve317a1p.jpg", "cju5u8gz4kj5b07552e2wpkwp.jpg", "cju7ezs7g2mxm098787atbran.jpg", "cju17g6ykn1cs0993dww6qdi8.jpg", "cju8arof2qpf20850ifr1bnqj.jpg", "cju3xuj20ivgp0818mij8bjrd.jpg", "cju34sh43d8zm08019xbwhc0o.jpg", "cju2zp89k9q1g0855k1x0f1xa.jpg", "cju7agj961l2r0818z29iq8yn.jpg", "cju3ua8u0g9rg0801uayhdxhu.jpg", "cju2hdr06v2bq0799mbm3bks1.jpg", "cju2qz06823a40878ojcz9ccx.jpg", "cju8azmhcr66e0755t61atz72.jpg", "cju87ox0kncom0801b98hqnd2.jpg", "cju1fuoa4wmc50835qfd11sp9.jpg", "cju88cddensj00987788yotmg.jpg", "cju7azuu31mia0801pf9ib5ed.jpg", "cju5k3j3uf6de0817hszzfr7n.jpg", "cju5x00l6m5j608503k78ptee.jpg", "cju430pm2lz0y0755jkhcc3d1.jpg", "cju0vtox5ain6099360pu62rp.jpg", "cju7dp3dw2k4n0755zhe003ad.jpg", "cju885ikhnmkn09878s2lqtuh.jpg", "cju2zr3c3vwb00993jn06bbaz.jpg", "cju8bw697rwg308177tg8huas.jpg", "cju1efbr0rqxz09931z0lf4vf.jpg", "cju7d7aut2a2p0818z4uxc6cd.jpg", "cju2sevf53lkx08558h5bpaig.jpg", "cju15l5ubz9yh0855b3ivdpse.jpg", "cju2nnqrqzp580855z8mhzgd6.jpg", "cju2mh8t6p07008350e01tx2a.jpg", "cju30nyxe0gfb0835p256yoju.jpg", "cju85rkbnlo1c08503uxcpax1.jpg", "cju3280wv1ir009882jze27tc.jpg", "cju7b2l561oas0871decgslaf.jpg", "cju2m56cryvqd0801gtn2yp8t.jpg", "cju5boicjagt20871b1fotkh4.jpg", "ck2bxknhjvs1x0794iogrq49k.jpg", "cju7crgxa28550755wbsgqkel.jpg", "cju1brhsj3rls0855a1vgdlen.jpg", "cju8b542nr81x0871uxnkm9ih.jpg", "cju87li0zn3yb0817kbwgjiz8.jpg", "cju1f8w0t65en0799m9oacq0q.jpg", "cju8a3nhbpwnb0850d37fo2na.jpg", "cju1bm8063nmh07996rsjjemq.jpg", "cju2z1nxlzaj40835wj81s1iy.jpg", "cju83qd0yjyht0817ktkfl268.jpg", "cju414lf2l1lt0801rl3hjllj.jpg", "cju2yw4s7z7p20988lmf2gdgd.jpg", "cju5fs6j6d8350801vglraq4u.jpg", "cju7er4kc2opa0801anuxc0eb.jpg", "cju5bdwa3aatx0818b79i18zf.jpg", "cjyzurzvohqnr0794es1itzek.jpg", "ck2bxw18mmz1k0725litqq2mc.jpg", "cju41z76wlgbz0801qdetlvby.jpg", "cju84ih17kp5l09876bkooocl.jpg", "cju5vbo6jldrt0871jf6f1700.jpg", "cju7es23b2vcp0755gpbm9s7v.jpg", "cju6ywm40wdbo0987pbftsvtg.jpg", "cju32l161bi1v07990vm376in.jpg", "cju5fi0yxd3ei0801v7u0yudn.jpg", "cju884985nlmx0817vzpax3y4.jpg", "cju32qr9tbvsj08013pkpjenq.jpg", "cju7flevb2wii08188otgs9p2.jpg", "cju16fpvhzypl0799p9phnlx6.jpg", "cju1dhfok4mhe0878jlgrag0h.jpg", "cju5xq3tdm9fn0987pbedxdg5.jpg", "cju7bb3ss1uoo0755pmhyco7t.jpg", "cju6us80mv1b50871ebyq2wxa.jpg", "cju2p0eveqtdc0835gpi3p93i.jpg", "cju7apr0c1qqm0755s7msqot4.jpg", "cju34o6dbd2lo0855aqlcy1hs.jpg", "cju2xd75m82720801q4s4ik3n.jpg", "cju7ebe962hr409872ovibahw.jpg", "cju1fr4etsmrr09933u4t4aql.jpg", "cju0ue769mxii08019zqgdbxn.jpg", "cju7cl8zm1xcu0817ado0jpas.jpg", "cju5ktjwofed70817eg58ef7u.jpg", "cju843yjskhq30818qre4rwm2.jpg", "cju8cwy02t9eq08185qn12c02.jpg", "cju892fesoq2g0801n0e0jyia.jpg", "cju3ul8dogf1z09872y2ecowp.jpg", "cju85fc11ljr40818edpb0inh.jpg", "cju8c2rqzs5t80850d0zky5dy.jpg"], "test": ["cju8abobpqbir08189u01huru.jpg", "cju8axq24r4an0755yhv9d4ly.jpg", "cju5cu8qkb84x08186jwo8yin.jpg", "cju30k2z40ds308353kdew70n.jpg", "cju6vqarjv7yo0987q4b1btk1.jpg", "cju1819curo000988pd5xcqme.jpg", "cju7d9seq29zd0871nzl2uu5m.jpg", "cju31ugmfb3dz0855xtqshki6.jpg", "cju858eswlepn0871pzvdrhj1.jpg", "cju5fw37edaae0801vkwvocn7.jpg", "cju83kxitjv340987z09m0ezy.jpg", "cjyzkpsbjdsjq07211dfi4sru.jpg", "cju2z9vlp9j0w0801oag91sy9.jpg", "cju0tl3uz8blh0993wxvn7ly3.jpg", "cju33eqwbcch208012jikwdky.jpg", "cju8bssulrrcy0987h1vq5060.jpg", "cju2qxxko229x08786gvxxhur.jpg", "cju6wn57mvooj0850rp78hhy7.jpg", "cju7ejm2l2ncl0801wq6y84nw.jpg", "cju7b3f5h1sm40755i572jden.jpg", "cju2pmhtr17a00855cvpelzb0.jpg", "cju2u2b9o4zvp08788qb9nqxj.jpg", "cju85ia1slh220987y7c20sm2.jpg", "cju439oazm2fu0871ma0vvrft.jpg", "cju5hwonqedw10801vsd3w6kk.jpg", "cju1gi7jlwyld0835cdf6g6qz.jpg", "cjyzjzssvd8pq0838f4nolj5l.jpg", "cju17x0j4nfc10993y31pvlgs.jpg", "cju1fm3id6gl50801r3fok20c.jpg", "cju30bmab08bi0835mvlr6e0r.jpg", "cju41kd7yl4nm0850gil5qqwh.jpg", "cju2s2527pfyr0993l3h1149a.jpg", "cju2urqpwvxw70835rvndvtsi.jpg", "cju7et17a2vjk0755e743npl1.jpg", "cju7fob3x301u0755x985pmmq.jpg", "cju17z0qongpa0993de4boim4.jpg", "cju2phaksnahz0993yxogjcpv.jpg", "cju8b1v3br45u087189kku66u.jpg", "cju8aqq8uqmoq0987hphto9gg.jpg", "cju33mirdc8mj0799k33wzoes.jpg", "cju2y0z6g87p10878fpk5d3rq.jpg", "cju6vvb8svhed0801jjcquh5e.jpg", "cju2t3ibkuecr0835o7si16zv.jpg", "cju8914beokbf0850isxpocrk.jpg", "cju8chdlqsu620755azjty1tj.jpg", "cju43o6n7m9nk087191ijwqq9.jpg", "cju7bf1lp1shi081835vs84lc.jpg", "cju2ysg748ru80878sp6j0gm0.jpg", "cju88z8bson4h0871nnd7fdxo.jpg", "cju8brv16rx7f0818uf5n89pv.jpg", "cju7da88w2eod0755wejzynvt.jpg", "cju7eueum2oqn0850rodmx8zo.jpg", "cju773hsyyosz0817pk1e7sjq.jpg", "cju358pwtdby20878cg7nm0np.jpg", "cju2oq5570avm079959o20op1.jpg", "cju3y54kwj3nr0801biidlb4e.jpg", "cju5bmhdcafs909878qfzrqzi.jpg", "cju83h9ysjwe808716nt35oah.jpg", "cju5chrxxawka0871qcj171yz.jpg", "cju8czvnztbf40871b4m7t78w.jpg", "cju3xvoo2iqlc0817eku2r3wl.jpg", "cju7f9umg2olj0987fj5y285w.jpg", "cju87vqa0ndwg0850onjdz7ol.jpg", "cju2ouil2mssu0993hvxsed6d.jpg", "cju2zi4l09f5807991s8do2b2.jpg", "cju5k503sfa5f0871lx0rpu5y.jpg", "cju87z6o6nh73085045bzsx6o.jpg", "cju5xopi0md7q0871r1sjc1av.jpg", "cju40r6jrksyk0871wg98zgho.jpg", "cju5xneamme2p0801qdf7fdwv.jpg", "cju3xjqtpikx50817tppy6g84.jpg", "cju8bn7m2rmm70817hgxpb1uq.jpg", "cju42nm68lpyo0818xvvqmupq.jpg", "cju88q6h6obpd0871ckmiabbo.jpg", "cju3wstckialg0871xs0vevsj.jpg", "cju2y8s56ymqr083541ggdsml.jpg", "cju5ygh1zmmdi0755uod5e17i.jpg", "cju7bfx651qr80801cs7epotb.jpg", "cju7d7tly27h408016fyp5nr7.jpg", "cju77idwfz36d0871tzfzz51i.jpg", "cju7ddtz729960801uazp1knc.jpg", "cju30ia8da2bq0799klnehml2.jpg", "cju3y79ofj3va0871uqfb1mzo.jpg", "cju7cue9b232j0801qdzk1ykj.jpg", "cju5vutu7ll8w0871dfp92n9p.jpg", "cju2xa3i4y0160988i679zsqd.jpg", "cju6x4t13vyw60755gtcf9ndu.jpg", "cju7fd6yt2p740987wkr8exo1.jpg", "cju2xbk0080y80801eghyddi2.jpg", "cju32a52lb9rc0799xi40qs00.jpg", "cju42tauqlo5p08171l3cuo4b.jpg", "cju7ae7bq1f820987toc8si1d.jpg", "cju5y84q3mdv50817eyp82xf3.jpg", "cju893jmdompz0817xn3g1w4h.jpg", "cju2uokeg5jm20799xwgsyz89.jpg", "cju42dwedlmk60871jbgu4ehi.jpg", "cju35i2e63uxr0835h7zgkg9k.jpg", "cju6z9a9kwsl007552s49rx6i.jpg", "cju15jr8jz8sb0855ukmkswkz.jpg", "cjyzu9th0qt4r0a46pyl4zik0.jpg", "cju89y9h0puti0818i5yw29e6.jpg", "cju7bduyq1rjf08719giru9ho.jpg", "cju2xwm1s84l50799i60mq0pu.jpg", "cju7dlk532dsh0871zvr6qz0r.jpg", "cju85mpuglq8k0818d2it6hzb.jpg", "cju5xkwzxmf0z0818gk4xabdm.jpg", "cju8cattbsivm0818p446wgel.jpg", "cju2pag1f0s4r0878h52uq83s.jpg", "cju5x15djm7ae0755h8czf6nt.jpg", "cju5c5xc7algd0817pb1ej5yo.jpg", "cju32pzh9bpw10855q4vaxfhe.jpg", "cju6v3bb2v7xo085090blubyw.jpg", "cju83mki1jv5w0817kubxm31r.jpg", "cju5bbtwsa8cl0987wgfsqpao.jpg", "cju14pxbaoksp0835qzorx6g6.jpg", "cju785htizjzo08017tvlhtg4.jpg", "cju42m60jlpcm08186kqppzqv.jpg", "cju847pxykriq0755268ktrk2.jpg", "cju3x9lttikfb0818a0g104zn.jpg", "cju1cnnziug1l0835yh4ropyg.jpg", "cju76lsehyia10987u54vn8rb.jpg", "cju7fbndk2sl608015ravktum.jpg", "cju5cjh3xattc0817j2vbulzi.jpg", "cju3uwz6ogsp10801h2r3bj5l.jpg", "cju34ouumcznz07996gg1xq7v.jpg", "cju3x2s11ibzi0817kk284k0j.jpg", "cju34fojcctcf0799ebolbvkn.jpg", "cju3v56bwgy8v0871w14pz8fx.jpg", "cju87r56lnkyp0755hz30leew.jpg", "cju2r91dg2k090801bh0xzbxk.jpg", "cju7ev2b12owa08500bpfpwyw.jpg", "cju76o55nymqd0871h31sph9w.jpg", "ck2395w2mb4vu07480otsu6tw.jpg", "cju2pkwt3r8b90988v2ywq1px.jpg", "cju33belnbyhm0878yxl42233.jpg", "cju7ahtkb1jr90801jck4kbds.jpg", "cju8adb60qbiu080188mxpf8d.jpg", "cju5wrrs0m2af0818vmnajbtw.jpg", "cju2wzu8wxtgu09880ku9x1pg.jpg", "cju3y21quj0ir0818kgjagr15.jpg", "cju87mrypnb1e0818scv1mxxg.jpg", "cju0sxqiclckk08551ycbwhno.jpg", "cju5h57xedz5h0755mjpc8694.jpg", "cju1cfhyg48bb0799cl5pr2jh.jpg", "cju8c82iosagu0817l74s4m5g.jpg", "cju30df2j09dd08351ayx2t6w.jpg", "cju2rkjfwoxys0993x768l1j2.jpg", "cjyzu3reghjya0794w7pwoi50.jpg", "cju7druhp2gp308715i6km7be.jpg", "cju5w7xn0lrkq0801f9k0htgx.jpg", "cju40u30gkuzc0871rq7t666d.jpg", "cju6v1m1xv07w09870ah3njy1.jpg", "cju849c23kgnk0817cgv2hw1e.jpg", "cju5k7r0yf98c09878csbxb4d.jpg", "cju8a2itsq4dv0755ntlovpxe.jpg", "cju6z7e4bwgdd0987ogkzq9kt.jpg", "cju7avvi51iox0817ym55y6tt.jpg", "cju0qkwl35piu0993l0dewei2.jpg", "cju85hguellg50818kwu3s8d1.jpg", "cju302fqq9spc0878rrygyzzz.jpg", "cju8amfdtqi4x09871tygrgqe.jpg", "cju6z600qwh4z081700qimgl9.jpg", "cju7ea4om2l910801bohqjccy.jpg", "cju45t5ddnbio0987qtqzx762.jpg", "cju84dsvaklpx098750hp83x4.jpg", "cju1cqc7n4gpy0855jt246k68.jpg", "cju5thdbrjp1108715xdfx356.jpg", "cju87kbcen2av0987usezo8kn.jpg", "cju8b7aqtr4a00987coba14b7.jpg", "cju5xjn5mm78b09871spyqhhr.jpg", "cju3x7xsaijq80818f0psavav.jpg", "cju5fu081d8gc0818l3yylujk.jpg", "cju171py4qiha0835u8sl59ds.jpg", "cju2qn2fzs1vy0988l243cvzy.jpg", "cju7d1tvt25bu08019dvw3uff.jpg", "cju2ueb6j5ado0878vf5md13o.jpg", "cju13cgqmnhwn0988yrainhcp.jpg", "cju3tx1qyg0c907552fglumhc.jpg", "cju31rb7vb6110801p9rhacuw.jpg", "cju77afzlz3kp07550x5nafzs.jpg", "cju2hugv9vget0799hhk7ksvg.jpg", "cju88evxanv9r08176zkeovec.jpg", "cju6xifswvwbo0987nibtdr50.jpg", "cju43jcqim2cp08172dvjvyui.jpg", "cju5es375cnzy0801nkq35ffs.jpg", "cju7dff9529h208503w60lbil.jpg", "cju2hgsptlfam0835o3b59h1o.jpg", "cju8buos5rz9b08715lfr0f4f.jpg", "cju7ctvqn25dy08186g442m1r.jpg", "cju7dz5yy2i7z0801ausi7rna.jpg", "cju7b4mtw1n9n080186209f3d.jpg", "cju42g865lorv07552ytz6xxa.jpg", "cju5von04litr08718j8po40a.jpg", "cju7eea9b2m0z0801ynqv1fqu.jpg", "cju8bi8q7rlmn0871abc5ch8k.jpg", "cju5kre09fhka0850h7b1898j.jpg", "cju40m0rjkpw80871z6n6yg1u.jpg", "cju6wt9jvvn500871hjn3t3g0.jpg", "cju2xjz2ju8pe0993ysv9wg17.jpg", "cju2t2ivz43i10878zeg8r1br.jpg"], "train_labels": ["cju5wphwwlu3m0987hh3ltg88.jpg", "cju8aj01yqeqm0850lhdz3xdw.jpg", "cju2lz8vqktne0993fuym6drw.jpg", "cju2rxm8rpbaf0993o3qr2oph.jpg", "cju3xga12iixg0817dijbvjxw.jpg", "cju5fb86jd1jp0755b1ukbhq5.jpg", "cju2nsmwjlzyl0993jl80chvz.jpg", "cju7fq7mm2pw508176uk5ugtx.jpg", "cju18kevfrojc0835bn90f1in.jpg", "cju2tvrvm53ws0801a0jfjdxg.jpg", "cju2qqn5ys4uo0988ewrt2ip2.jpg", "cju306x7w05nb0835cunv799x.jpg", "cju5c7oijaqmq09878qwgqv8n.jpg", "cju35oyvd3y850988km12hdz1.jpg", "cju2yljr0yzhw0988ecf271ly.jpg", "cju8c9akjsdjj0850s67uzlxq.jpg", "cju5ccpvqash50850kb4bs22k.jpg", "cju7db7lp2f400755tntd1ohf.jpg", "cju6vgdmivcvb08018fra5lnv.jpg", "cju2syxa93yw40799x2iuwabz.jpg", "cju1cvkfwqrec0993wbp1jlzm.jpg", "cju320gyvbch60801v2amdi2g.jpg", "cju7emdni2py40871ivhxjtut.jpg", "cju5f8hxdcxxn08188obby0ea.jpg", "cju2lcyfgkf5809932fn9gucn.jpg", "cju5i5oh2efg60987ez6cpf72.jpg", "cjyzkmjy8evns070165gf9dmq.jpg", "cju31y80qbawn0801twwm2l5s.jpg", "cju5bwhapakm90987c1v4z46a.jpg", "cju1hyolc7aqu0878rrkfn1lr.jpg", "cju0qx73cjw570799j4n5cjze.jpg", "cju7dbppn28nx085097654msi.jpg", "cju2i6acqvo6l0799u20fift8.jpg", "cju2raxlosl630988jdbfy9b0.jpg", "cju30xqmh0ni00835ix3batv1.jpg", "cju1alwgo30z60855fm3y23sm.jpg", "cju7fpfzq2wyf0818xxd1oziv.jpg", "cju8bk8oirjhw0817hgkua2w8.jpg", "cju34i3qvcyog0855qiejxx5w.jpg", "cju2qozsk20cq0855ugrg3cri.jpg", "cju84kplnl1y30755ropua1b0.jpg", "cju40taxlkrho0987smigg0x0.jpg", "cju18849rrsgr0988p90hkygb.jpg", "cju1cyjb5qtie0993njqne9m3.jpg", "cju33o12x2jm50988944mxq0v.jpg", "cju7cq6su27qv075574dir0r3.jpg", "cju0qoxqj9q6s0835b43399p4.jpg", "cju2igw4gvxds0878808qj398.jpg", "cju2zxja9w1eh09933609ho9z.jpg", "cju2iatlki5u309930zmgkv6h.jpg", "cju83rcnzkbsj0755x5anfrcg.jpg", "cju2xs6na81t20878pt6nkfip.jpg", "cju1fmsyf6gxb0801cimx2gle.jpg", "cju2u73dj53oz0878486k8k4b.jpg", "cju7efffp2ivf0817etg3jehl.jpg", "cju8a1jtvpt9m081712iwkca7.jpg", "cju85je7vlht70817c9jcjwi4.jpg", "cju5wcc90lu020850mjrxppv6.jpg", "cju1f320ewfyu0988ndz6blh5.jpg", "cju2zwg05a0oy0801yr73ig7g.jpg", "cju32phw2bv130801yj7bkouq.jpg", "cju32zhbnc1oy0801iyv1ix6p.jpg", "cju887ftknop008177nnjt46y.jpg", "cju32gzs6xo8x0993r8tedbpb.jpg", "cju0rx1idathl0835detmsp84.jpg", "cju2r7h21sj9608354gzks3ae.jpg", "cju5jz5fff8c50871hbe6108f.jpg", "cju35ldepdtlm0801yv79y8vu.jpg", "cju34uhepd3dd0799hs8782ad.jpg", "cju2otvvv0l7z0855x7we8cb0.jpg", "cju7ajnbo1gvm098749rdouk0.jpg", "cju1dnz61vfp40988e78bkjga.jpg", "cju846ec0kj7z08012o10klrb.jpg", "cju5eyfe9cpk90987laa7tsl3.jpg", "cju2zgbj9zmrw0835nnlzxj4c.jpg", "cju7dizi82h2i0755doucgnt3.jpg", "cju7dhpsc2dnn0818025m6857.jpg", "cju1dia8hvc6v098827mgffnm.jpg", "cju6x35ervu2808015c7eoqe4.jpg", "cju2upu4evw7g08358guwozxv.jpg", "cju77196iyshb0850ycbto50a.jpg", "cju87xn2snfmv0987sc3d9xnq.jpg", "cjyzl833ndne80838pzuq6ila.jpg", "cju8bj2ssrmlm0871gc2ug2rs.jpg", "cju7d5m0p23kn09871rk7pu3v.jpg", "cju8432cmkgq90871cxe4iptl.jpg", "cju34xspwzenf0993cyzajv9n.jpg", "cju5eftctcdbj08712gdp989f.jpg", "cju772304yw5t0818vbw8kkjf.jpg", "cju83u9ftk3ni0987qnhlcinv.jpg", "cju2tqfgw4oat0799rn0g5b2z.jpg", "cju2qfie4rvz508357kad9z5o.jpg", "cju2sszfq3uye0878sucelzk2.jpg", "cju1drnhbrb9409935wi7vkhg.jpg", "cju2i03ptvkiu0799xbbd4det.jpg", "cju2oo0wh0bqy0878biujeyhe.jpg", "cju1haab178i70799tk9z8y8x.jpg", "cju5nyu31gv8e0871zpk74a2n.jpg", "cju3v0fl3gwce0755qkjhzmd4.jpg", "cju2r2obh2bjm08553kng0rh7.jpg", "cju2srvy5440s0801y1ba9akr.jpg", "cju8bm24yrrdp081829mbo8ic.jpg", "cju8bh8surexp0987o5pzklk1.jpg", "cju2okvco06xc0799kxe5n1qh.jpg", "cju8ceacrsqkr0755hdz145es.jpg", "ck2bxpfgxu2mk0748gsh7xelu.jpg", "cju8cdeazsm8h0801jxifmzur.jpg", "cju2hw5gjlr5h0988so2qqres.jpg", "cju2suk42469908015ngmq6f2.jpg", "cju2p4ddkmzxj0993p94o62av.jpg", "cju88vx2uoocy075531lc63n3.jpg", "cju77re6fz5bb0817vp9redjg.jpg", "cju5wmvsdlx1j0871npgj8j4b.jpg", "cju787jnjzjuj0871p94nck9g.jpg", "cju5wkonqlrl409877y8zvnub.jpg", "cju1ffnjn6ctm08015perkg37.jpg", "cju2rpa30t07b0835im0erql0.jpg", "cju5ekty5ckzf07550c9u3ckk.jpg", "cju7awzmu1ncs0871hziy65zx.jpg", "cju7el6xv2k520817qxx9wdr5.jpg", "cju1gv7106qd008784gk603mg.jpg", "cju1dfeupuzlw0835gnxip369.jpg", "cju6xmqd9w0250817l5kxfnsk.jpg", "cju42py9mlqyd0818u3d1d7ga.jpg", "cju7eotqi2qea0871y8yc7tqh.jpg", "cju2ricdv2iys0878sv1adh0u.jpg", "cju7f4sc62xqj075597xpmuoy.jpg", "cju2zdhsczmn50988z64qwg2q.jpg", "cju2s9g11pnra0993gn4eh793.jpg", "cju88k75inzyb0850ccv5x3vk.jpg", "cju2saez63gxl08559ucjq3kt.jpg", "cju3yht87j83m08507yk1u1fg.jpg", "cju336l68y7if0993wf092166.jpg", "cjyztzaqtrv430848l8xgcerw.jpg", "cju8b8yair65w09878pyqtr96.jpg", "cju2z6ez69g4u0801qwt088lw.jpg", "cju1c0qb4tzi308355wtsnp0y.jpg", "cju5ht88gedbu0755xrcuddcx.jpg", "cju7d2q1k27nf08715zshsckt.jpg", "cju7epwj82koz098713apjnzo.jpg", "cju2rzpsmtb0f0835jabkbao1.jpg", "cju1d31sp4d4k0878r3fr02ul.jpg", "cju8bop5jrsid08716i24fqda.jpg", "cju2wve9v7esz0878mxsdcy04.jpg", "cju1b0y2e396p08558ois175d.jpg", "cju2top2ruxxy0988p1svx36g.jpg", "cju1erep75us208553i4ofwwe.jpg", "cju6v4szov55u0871qmqz3v8n.jpg", "cju76erapykj30871x5eaxh4q.jpg", "cju7ey10f2rvf0871bwbi9x82.jpg", "cju6v6g6kvdw007552x6mb0po.jpg", "cju85dx63lic408017f0l0400.jpg", "cju2hjrqcvi2j0801bx1i6gxg.jpg", "cju5huurrecm70801y680y13m.jpg", "cju2trbpkv0c00988hxla5dzz.jpg", "cju16ach3m1da0993r1dq3sn2.jpg", "cju6vifjlv55z0987un6y4zdo.jpg", "cju2qs32r1vys07999conmbvx.jpg", "cju32csyfblyh080170aa3x5p.jpg", "cju32fhnhbds40799broyoptc.jpg", "cju8bljw9rqk20801kr54akrl.jpg", "cju7fmvpk2q170987v6i3ola8.jpg", "cju7d6ux323ze0987xos3srkx.jpg", "cju85l4yjlops0801fvmnwptf.jpg", "cju1egh885m1l0855ci1lt37c.jpg", "cju6v5ilsv8hk0850rb5sgh6o.jpg", "cju8bzzy2s66m08016z6mouqt.jpg", "cju1djtprvd7b0988thwwrg09.jpg", "cju7aifyo1p3n07552nxjx51f.jpg", "cju2ulk385h170799rlklxob0.jpg", "cju424hy5lckr085073fva1ok.jpg", "cju5knbbqfipk080128cggukq.jpg", "cju5bf6hxa6m50817rbwettgu.jpg", "cju2tjrog4jy30878pawyazqc.jpg", "cju88rl5eo94l0850kf5wtrm1.jpg", "cju300m3s04fg0988uzupuf7z.jpg", "cju3u1c8tfyqx08503iedc3mx.jpg", "cju43mkj9m8wb0871qiadahub.jpg", "cju18ibp219ub08783i6o98g7.jpg", "cju5fydrud94708507vo6oy21.jpg", "cju8ashhnquqr0801rwduzt7d.jpg", "cju45ty6zn9oz0850qy4qnck1.jpg", "cju2y40d8ulqo0993q0adtgtb.jpg", "cju16whaj0e7n0855q7b6cjkm.jpg", "cju2wxv0hxs2f09884w48v8fi.jpg", "cju1hmff8tkp809931jps6fbr.jpg", "cju857ad0l88m0817qx4cwxnf.jpg", "cju7ekbo32pft0871fv7kzwb9.jpg", "cju8bff9nrfi10850fmfzbf8v.jpg", "cju85plp7lmkw0850rx42jdpf.jpg", "cju1ewnoh5z030855vpex9uzt.jpg", "cju5tenjojp1j0755ms4949h2.jpg", "cju2ntxtdzlvu0799xl3j9pan.jpg", "cju2zm0axztpe0988r8s9twjr.jpg", "cju3ykamdj9u208503pygyuc8.jpg", "cju15czxqp3lv0835jvhgzurz.jpg", "cju88v2f9oi8w0871hx9auh01.jpg", "cju8828oxnool0801qno9luhr.jpg", "cju2sxf3iqbpv09937iksn8ep.jpg", "cju0sr5ghl0nd08789uzf1raf.jpg", "cju43gfosm63n08714rpih8pe.jpg", "cju3ya7goj6at0818v2l5ay7f.jpg", "cju5hjxaae3i40850h5z2laf5.jpg", "cju30u1hbakn808019g15nb8b.jpg", "cju8bbznkrf5g0871jncffynk.jpg", "cju85citjlnfm0755i4rk5tqj.jpg", "cju7dqcwi2dz00850gcmr2ert.jpg", "cju87q6yoneim0871dl4phvkd.jpg", "cju35c4wzdhow0799h6eq4sgs.jpg", "cju7b1ygu1msd0801hywhy0mc.jpg", "cju8c5223s8j80850b4kealt4.jpg", "cju1bhnfitmge0835ynls0l6b.jpg", "cju5x7iskmad90818frchyfwd.jpg", "cju83vvmik9wa08710yeh7cuk.jpg", "cju87nkyrnb970801q84m47yt.jpg", "cju2oi8sq0i2y0801mektzvw8.jpg", "cju40wto5kxwi0755it190f2k.jpg", "cju5jx7jzf7c90871c2i9aiov.jpg", "cju6vucxvvlda0755j7msqnya.jpg", "cju3xtufwiv9c0818djsc4cqd.jpg", "cju84ffdzkrjn08183jh1fxmb.jpg", "cju8auylgqx0z0871u4o4db7o.jpg", "cju2nbdpmlmcj0993s1cht0dz.jpg", "cju6yywx1whbb0871ksgfgf9f.jpg", "cju5ft6mcd5q40987rhjgbrr6.jpg", "cju7dymur2od30755eg8yv2ht.jpg", "cju14g8o4xui30878gkgbrvqj.jpg", "cju83syhdk6gs0801rf1rekdl.jpg", "cju7do8c72dbo0801vxfzxdc4.jpg", "cju7alcgr1lsr0871riqk84z7.jpg", "cju330ofbc2l30801th5g3hw6.jpg", "cju3xzvnzj0hd0755xprz39nj.jpg", "cju2rga4psq9n09881z519xx0.jpg", "cju18gzrq18zw0878wbf4ftw6.jpg", "cju8apjewqrk00801k5d71gky.jpg", "cju43b8daly4408170e5ev06g.jpg", "cju3xiic0ilzp0850lusrb42j.jpg", "cju3v11mrgwwb0755u242ygye.jpg", "cju77b3wyz4160755qis4ljsb.jpg", "cju3uz4o6gr9z0850lhxyxvsj.jpg", "cju5o1vu9gz8a0818eyy92bns.jpg", "cju8ayeq7r1fb0818z1junacy.jpg", "cjyzk8qieoboa0848ogj51wwm.jpg", "cju3128yi0rpu0988o4oo5n8n.jpg", "cju1d50a94qf50855wsowacrc.jpg", "cju6x0yqbvxqt0755dhxislgb.jpg", "cju5buy2bal250818ipl6fqwv.jpg", "cju7ff97z2ow40817u2r83my5.jpg", "cju1g4nsb6ngy0799l4ezm8ab.jpg", "cju34ds2531520988qjpqt6e3.jpg", "cju6wjm81vgsc0987enk9n3pr.jpg", "cju8dic9mtppa0987swn23wbc.jpg", "cju6vta3kvazg0817qbeppjtm.jpg", "cju15wdt3zla10801odjiw7sy.jpg", "cju8aeei7q8k308173n9y4klv.jpg", "cju7aez2x1jtj0871ztezs3oi.jpg", "cju1f5x1164xv08555654c24r.jpg", "cju8chxndsre008015uisl4si.jpg", "cju5wtdu4m0im0871mix0yvc0.jpg", "cju85c2d4ln1b0755zz1z3onx.jpg", "cju7g7ba42z310987bqzbi2bq.jpg", "cju7f5ghb2r5s0801chwkxxh9.jpg", "cju16jgnyzp970878melv7r25.jpg", "cju5v8pgplg6k0755rvi2t63h.jpg", "cju1euuc65wm00799m4sjdnnn.jpg", "cju1871y11d6r0799k6cw4yze.jpg", "cju8bqxxurs6i0850mu7mtef9.jpg", "cju7ecl9i2i060987xawjp4l0.jpg", "cju7d3oc82cho0755dajlwldz.jpg", "cju326h4v1gxw08352px40p7r.jpg", "cju8bafgqrf4x0818twisk3ea.jpg", "cju84aoa3ktwn0755pfl4gfwd.jpg", "cju6xa0qmvzun0818xjukgncj.jpg", "cju85qefyln6v0850szeb9byi.jpg", "cju8c5zcbsdfz0801o5t6jag1.jpg", "cjyzul1qggwwj07216mhiv5sy.jpg", "cju40jl7skiuo0817p0smlgg8.jpg", "cju2hos57llxm08359g92p6jj.jpg", "cju7dubap2g0w0801fgl42mg9.jpg", "cju85nr8elly209872w9n5m0s.jpg", "cju42wamblrqn098798r2yyok.jpg", "cju7f0ec32txj08184asb8w5f.jpg", "cju1cdxvz48hw0801i0fjwcnk.jpg", "cju3xl264ingx0850rcf0rshj.jpg", "cju17bz250pgd0799u1hqkj5u.jpg", "cju783tmkzkqu081803g7q5vk.jpg", "cju41p90plcsx08018cnzpndc.jpg", "cju32jcdabepz0878d0cznmfe.jpg", "cju1gghyjwxt80835vx0wgxw0.jpg", "cju7arvfe1ldu0850erdmphgj.jpg", "cju1hirfi7ekp0855q0vgm9qq.jpg", "cju30525w04r10835ygp257sb.jpg", "cju5vwbr4lhqn0987a1pji0ux.jpg", "cju42qet0lsq90871e50xbnuv.jpg", "cju8aw9n1qyg10801jkjlmors.jpg", "cju7787c5yy3l080159mwqsnj.jpg", "cju15ptjtppz40988odsm9azx.jpg", "cju34ymm8d6700799uop0cw33.jpg", "cju6uy20suzbl0987rzuhz7z9.jpg", "cju310f6val1v0855xo8tc3gu.jpg", "cju7atnm31if40817pqclnjer.jpg", "cju88aq6vo1ij0755c2ey7z7n.jpg", "cju8dn0c3u2v50801k8rvq02f.jpg", "cju8doa16u5gh0818w1ywda3q.jpg", "cju77t0razbvm080106o56289.jpg", "cju84hibuktj80871u519o71q.jpg", "cju7f900s2o0k08175gl1giid.jpg", "cju422cm8lfxn0818ojicxejb.jpg", "cju8567gdlcbq0801dwwyo2jt.jpg", "cju2lyynuymli0855g7fxgbhe.jpg", "cju3x5u2tiihx0818914gzxy1.jpg", "cju5vcmrqla7i0817x4sp4pqw.jpg", "cju0s2a9ekvms080138tjjpxr.jpg", "cju43kj2pm34f0850l28ahpni.jpg", "cju41lojblbs307555jdci937.jpg", "cju88trl3ogi208716qvti51b.jpg", "cju2ro5jqsy680988pi6qsujw.jpg", "cju8c5mxls96t0850wvkvsity.jpg", "cju890guyoiti098753yg6cdu.jpg", "cju5yimthmlv80850zhoc90c2.jpg", "cju1egx9pvz2n0988eoy8jp23.jpg", "cju7d4jk723eu0817bqz2n39m.jpg", "cju2qh5le1ock0878oahaql7d.jpg", "cju8b3ka8r64u0801fh18hk7l.jpg", "cju5yhgznmkzb0801cji2vi8j.jpg", "cju2nqapmzvk20801f9us40dx.jpg", "cju8djdqztu6408506pzhlo18.jpg", "cju418rckl3ur08012psrx1r1.jpg", "cju2hqt33lmra0988fr5ijv8j.jpg", "cju6vvxsev9y30987kespucdg.jpg", "cju84jdl9kv0i0871eog9b3i9.jpg", "cjz14qsk2wci60794un9ozwmw.jpg", "cju7afqon1ip40850ue2308b6.jpg", "cju8a84g0q76m0818hwiggkod.jpg", "cju2rz4k434s70855wwx3ddtx.jpg", "cju1c8ffau5770835g0g343o8.jpg", "cju41s6nbleqy0755e2mslg0b.jpg", "cju31t8xd17bk0835rnb893jk.jpg", "cju8cbsyssiqj0871gr4jedjp.jpg", "cju5klveuff6w0871wbibgh3m.jpg", "cju2sggy13na70855tbeoqgha.jpg", "cju8dm2cau2km0818jsv9eeq2.jpg", "cju7dtb1e2j0t0818deq51ib3.jpg", "cju2xyd9vyi7m098831qcucse.jpg", "cju2r11x7sdgx0988o8ule0wl.jpg", "cju85omszllp30850b6rm9mi3.jpg", "cju2nfnvxzdkd0878399axlco.jpg", "cju5bhv81abur0850ean02atv.jpg", "cju77j66ez52p08019xygi0co.jpg", "cju33yemn2qb20988wfjxximx.jpg", "cju3518w2d838079939fqztbc.jpg", "cju2t62nq45jl0799odpufwx6.jpg", "cju7fen322ou10817ziqkob4k.jpg", "cju5wqonpm0e60801z88ewmy1.jpg", "cju42romflni20817etb9a0fl.jpg", "cju3381d8bz3h07991xtl7ra0.jpg", "cju33qpdvc9g0087825jhf3s9.jpg", "cju8alhigqn2h0801zksudldd.jpg", "cju8418jhkf7d0818ga2v0xq0.jpg", "cju7fcgbe2z3p07550vaflqdb.jpg", "cju428k5fldt108177s6g6f45.jpg", "cju7fnfv02tt90801djnix9m8.jpg", "cju1cj3f0qi5n0993ut8f49rj.jpg", "cju1dq3x1vgx109889c7wyirg.jpg", "cju784jpdzeae0987q5ypq883.jpg", "cju1ats0y372e08011yazcsxm.jpg", "cju7deifq2fzn0755lc8idyh8.jpg", "cju1c6yfz42md08550zgoz3pw.jpg", "cju2zdvjn9h7r08553cp4eed5.jpg", "cju32upim1z7u0988l883nqp6.jpg", "cju0s690hkp960855tjuaqvv0.jpg", "cju7dglf226g50987ohbthl19.jpg", "cju1ejj7dvqfa0835ra184v5m.jpg", "cju83k8fyjsxr0817d6nxs6r4.jpg", "cju35a77vdj4n08556jj2lgmc.jpg", "cju45v0pungu40871acnwtmu5.jpg", "cju1hs0za7jha0855vj0mdrjt.jpg", "cju5woy82m07m08505dmjg7g1.jpg", "cju34repocy5208780gswillm.jpg", "cju5tgbzhjllu08174ca41eus.jpg", "cju5hqz50e7o90850e0prlpa0.jpg", "cju0t4oil7vzk099370nun5h9.jpg", "cju30qbm1ad3x0855znuhpz9u.jpg", "cju1b3zgj3d8e0801kpolea6c.jpg", "cju17r8il13910799dr2wme2e.jpg", "cju2rnkt22xep0801as160g9t.jpg", "cju88gx09o2vk0818610zody3.jpg", "cju0u2g7pmnux0801vkk47ivj.jpg", "cju5nxkujgscq0817l9gss626.jpg", "cju2hfqnmhisa0993gpleeldd.jpg", "cju5uhrdwkmsu0817ervv91l8.jpg", "cju6vrs1ov8cr098788h8gs6j.jpg", "cju353d1eda8c07992afde611.jpg", "cju6w733bveoz0817e600tw72.jpg", "cju2nyc5f02m40801ojqbtiea.jpg", "cju45rj7ln8980850a7821fov.jpg", "cju2xf8e5y2wm08359vcgk09b.jpg", "cju5yeqiwmkgl0801fzv2douc.jpg", "cju35740hzm0g0993zl5ic246.jpg", "cju13fwthn9mq0835gacxgy01.jpg", "cju76l27oyrw907551ri2a7fl.jpg", "cju7cp6dw244p0818gncdol4m.jpg", "cju8c6hnxsdvr0801wn0vrsa6.jpg", "cju43c92lm5cj0755lorsorfg.jpg", "cju45n0oxn5vu08500yfrt9jn.jpg", "cju30ywtc0oar0835bp2en7ec.jpg", "cju5waeduln160817w0agirve.jpg", "cju5clr68b48r0755cmuvponm.jpg", "cju2zpw4q9vzr0801p0lysjdl.jpg", "cju1b75x63ddl0799sdp0i2j3.jpg", "cju87tyddnnad0755bj0wxahe.jpg", "cju5ymyd8mmdc0801ry3by1xr.jpg", "cju8ando2qqdo0818ck7i1be1.jpg", "cju3tsh4lfsok0987w6x3a0v1.jpg", "cju2z2x3nvd3c099350zgty7w.jpg", "cju3uhb79gcgr0871orbrbi3x.jpg", "cju2omjpeqj5a0988pjdlb8l1.jpg", "cju15mhjczc8z0801kit5c6di.jpg", "cju2ycp1u8g2r0799jslnp7cz.jpg", "cju888fr7nveu0818r9uwtiit.jpg", "cju426tomlhll0818fc0i7nvh.jpg", "cju5ukkg6kv7u08011x2b6zl5.jpg", "ck2bxiswtxuw80838qkisqjwz.jpg", "cju5bj926aiec07559rshy4wa.jpg", "cju2zo0fwzv580988qlijd2xa.jpg", "cju1ddr6p4k5z08780uuuzit2.jpg", "cju85a8h8llwm07559wxg4t5w.jpg", "cju2htabevq9108015qjei0x7.jpg", "cju14hjh2ob2o0835ouz3r5aa.jpg", "cju88y1mwoln50871emyfny1g.jpg", "cju3xeexgii1j0817zs68tb4g.jpg", "cju6xlygpw7bs0818n691jsq4.jpg", "cju7aklv31h4309871m29l4e7.jpg", "cju6z2616wqbk07555bvnuyr1.jpg", "cju1c4fcu40hl07992b8gj0c8.jpg", "cju34eqjpcpm508788b3lhp97.jpg", "cju8b5p40r2c60987ofa0mu03.jpg", "cju77q10sz9ug0801449wu1nu.jpg", "cju6uzxk0v83p0801rcwnexdu.jpg", "cju7csvlb22fr0850lvm45n3x.jpg", "cju1cbokpuiw70988j4lq1fpi.jpg", "cju8b2rmgr52s0801p54eyflx.jpg", "cju357rxxdaz30878y2esjpjt.jpg", "cju34m7h536wq0988xz7gx79v.jpg", "cju3yb47cj1xq0817zfotbni4.jpg", "cju2u4pymvc720988wsxrmi84.jpg", "cju41nz76lcxu0755cya2qefx.jpg", "cju3xhpvvimda0987ygrpzni2.jpg", "cju8b4ja9r2s808509d45ma86.jpg", "cju88nroho44508500129f1nh.jpg", "cju1g20bdwq6u0835e16xugcd.jpg", "cju43in5fm22c08175rxziqrk.jpg", "cju8cgi2kspp308011nxdtjp6.jpg", "cju16b6ynq8e40988m8vx0xnj.jpg", "cju2qu37qobl50993aw7ghcfq.jpg", "cju2spdagu1l50835da1f46fr.jpg", "cju3v664kh0px0818y4y7wolf.jpg", "cju30lncba3ny0878jwnous8n.jpg", "cju1amqw6p8pw0993d9gc5crl.jpg", "cju2lejzcy4pc0878c9rlonot.jpg", "cju2qdj95ru8g09886gfi9rsz.jpg", "cju5eq8c8ck690850vix98hv3.jpg", "cju1fyb1d69et0878muzdak9u.jpg", "cju2wtwj87kys0855kx6mddzw.jpg", "cju88itqbny720987hxizbj5y.jpg", "cju3v3ac9gyz30755hfqwyp1i.jpg", "cju5vzjoslpj708186z2fusmz.jpg", "cju2ij9uiic2l09933ljiv6gm.jpg", "cju2uwz9f5yf1085506cfamfx.jpg", "cju2lberzkdzm09938cl40pog.jpg", "cju7cufm7298k0755j09uf3of.jpg", "cju7b9vcs1luz0987ta60j1dy.jpg", "cju40poe4kt9s0755f9cnm3h5.jpg", "cju7ap09p1kz10850ldccjebj.jpg", "cju5ew4h9cqaf0818rrczkmqh.jpg", "cju88fpm4o0tl0871w1i6a4ds.jpg", "cju7bgnvb1sf808717qa799ir.jpg", "cju2m71z2ywwv080131bcrsd3.jpg", "cju41r6v2lcww0871ps8k8pf5.jpg", "cju30ajhw09sx0988qyahx9s8.jpg", "cju87zv8lni0o0850hbbecbq6.jpg", "cju5yjq1pmlgc0801z0t24bly.jpg", "cjyzuio1qgh040763k56deohv.jpg", "cjyzufihqquiw0a46jatrbwln.jpg", "cju2zrojo9kcd0878ld2epejq.jpg", "cju5vxuc5loxw0818u8xgf45p.jpg", "cju2rlqdnoz9k0993cpjae3x0.jpg", "cju8402x1kcy70801t6kz6bdi.jpg", "cju1h89h6xbnx08352k2790o9.jpg", "cju8at3s1qqqx0850hcq8nmnq.jpg", "cju886ryxnsl50801r93jai7q.jpg", "ck2bxqz3evvg20794iiyv5v2m.jpg", "cju35k2fr3vc50988c85qkrwg.jpg", "cju8c1a0ws7o208181c6lbsom.jpg", "cju7dda8w2br20818zhsuz8s7.jpg", "cju1ftaji6isw0855108yqcse.jpg", "cju8c3xs7sauj0801ieyzezr5.jpg", "cju33za6l2qy70988jhrlp2ev.jpg", "cju30ov1oah920801mi8thuyg.jpg", "cju7ez7r22qbc08015xfoz2wb.jpg", "cju5o4pk9h0720755lgp9jq8m.jpg", "cju410dnfl0960755y8lu8d79.jpg", "cju334jzo261t0835yqudnfs1.jpg", "cju2yi9tz8vky0801yqip0xyl.jpg", "cju45qbf3n9sa0987oonbkly9.jpg", "cju6ut4l8va6y0755tyw3vfqq.jpg", "cju6ur9l9v9jq0755paud9uka.jpg", "cju89z6pqpqfx0817mfv8ixjc.jpg", "cju7b10ce1mnm08011c5bwyr4.jpg", "cju175facms5f0993a5tjikvt.jpg", "cju2txjfzv60w098839dcimys.jpg", "cju1csmlc4ht10799b8ymmghg.jpg", "cju3tp94kfstl08181awh6z49.jpg", "cju2p91qir00k08350ddfif0w.jpg", "cju5hyi9yegob0755ho3do8en.jpg", "cju43h43am1dy08176gwfhmnt.jpg", "cju88msmoo3470817m441j4sg.jpg", "cju13hp5rnbjx0835bf0jowgx.jpg", "cju83ipu3jwpx0801z5pvguf8.jpg", "cju33w4sdcivk0855x879zht7.jpg", "cju2tzypl4wss0799ow05oxb9.jpg", "cju6wll7wvo3y08502pagos8m.jpg", "cju7bc95p1mdm0817yqj5jc6j.jpg", "cju30j1rgadut0801vuyrsnt8.jpg", "cju35mdz73x890835eynq1h9v.jpg", "cju43lcnum9y10755bjs7z87f.jpg", "cju7etr3y2p4t0801cdzjj8ab.jpg", "cju774fmayxif0818u2g79usw.jpg", "cju1cu1u2474n0878tt7v4tdr.jpg", "cju323ypb1fbb0988gx5rzudb.jpg", "cju33x0f22peh0988g0ln7w5v.jpg", "cju2y5zas8m7f0801d34g5owq.jpg", "cju5ddda9bkkt0850enzwatb1.jpg", "cju5ufn3skquf0818dhapnhba.jpg", "cju84gpefknwm098714oq8q61.jpg", "cju2trtjf4qjd0878a2zle9v9.jpg", "cju7fazv92ywx0755xov2erga.jpg", "cju2rn0hasxri0835nfy3buay.jpg", "cju3x4blzieu30850x10uuvbm.jpg", "cju3y9difj6th0801kd1rqm3w.jpg", "cju2osuru0ki00855txo0n3uu.jpg", "cjyzlw7f9faqr070129au64sq.jpg", "cju3u39fog1bo0871lxjrabks.jpg", "cju2yo1j1v0qz09934o0e683p.jpg", "cju33231uy4gi0993qc7b1jch.jpg", "cju2xlcqxy9c60988vjacdznb.jpg", "cju77vvcwzcm50850lzoykuva.jpg", "cju8dpa89u6l80818dj6lldh9.jpg", "cju3xwpgviwlx0871rwm15q7v.jpg", "cju1h5w4wxajx0835mc954kxy.jpg", "cju303j5r062k098835zxfds5.jpg", "cju35fxqyzt5p0993vusm54qz.jpg", "cju31w6goazci0799n014ly1q.jpg", "cju3u815rg4ek0850vvhtcvcm.jpg", "cju8bpctzrqkr0850zeldv9kt.jpg", "cju5f26ebcuai0818xlwh6116.jpg", "cju5g163vd6mt0817uccuga6u.jpg", "cju2zjcvj9qma0801dk71hhi0.jpg", "cju2mfjndoz700988b9lc3zeq.jpg", "cju5uxjnol2r509871qv2yeia.jpg", "cju7dwe282dc309876rco45ts.jpg", "cju5wuhm1lwm40987vugqn3vv.jpg", "cju77u1sjz77b0817ft44r3fk.jpg", "cju2yv4imv6cz099314jveiib.jpg", "cju16d65tzw9d0799ouslsw25.jpg", "cju8b6rp0r5st0850184f79xt.jpg", "cju3umoh1geet0817cmpef5am.jpg", "cju5y7buemcw80987p0r30g9f.jpg", "cju32srle1xfq083575i3fl75.jpg", "cju0roawvklrq0799vmjorwfv.jpg", "cju7b5afm1nfw0801xqm8bf8q.jpg", "cju1fb9236a110801yvg0fwju.jpg", "cju2z45kuzf6d0988nz2c819m.jpg", "cju45lbgznahl08180xz1h7u6.jpg", "cju2tpfa5uyx408359datxqqj.jpg", "cju3v72v5h1qz0818fggilwtq.jpg", "cju2yb31a8e8u0878wdashg7o.jpg", "cju6x97w4vwua0850x0997r0a.jpg", "cju42u5bjlvi10801dc13sskp.jpg", "cju77bvg0yv4r0987yh60xmjo.jpg", "cju5wrapcm2290818jsh26ppb.jpg", "cju17v6ih0u7808783zcbg1jy.jpg", "cju8d2q30tfhs0801n7lx77xl.jpg", "cju2nd7l7z98o0799gfjvyfmw.jpg", "cju5uzmaol56l0817flxh4w9p.jpg", "cju17otoe119u0799nqcbl8n1.jpg", "cju2s16zp317h0799gr67jqc2.jpg", "cju0u82z3cuma0835wlxrnrjv.jpg", "cju2zy1e49pqk0878t6ncqn12.jpg", "cju1f15k3w4ct0835cmde6ypo.jpg", "cju8bgdmqrksy0801tozdmraa.jpg", "cju3521y5d5mq0878t3ezsu4p.jpg", "cju40sdwukv3k0755y99ug1k8.jpg", "cju2pjb9v0ywn0878j5g5n69j.jpg", "cju2i3hzclw3o0988rrgh911i.jpg", "cju6yxyt0wh080871sqpepu47.jpg", "cju5x28nzm7t907558ocq4bt7.jpg", "cju1hp9i2xu8e0988u2dazk7m.jpg", "cju160wshltz10993i1gmqxbe.jpg", "cju34aozyyy830993bn16u32n.jpg", "cju7dxffn2eam0817qxosfwch.jpg", "cju8dqkrqu83i0818ev74qpxq.jpg", "cju8bysfgrzkl081786jwac09.jpg", "cju5gucasds9d0801019axylx.jpg", "cju5vi4nxlc530817uoqm2m7a.jpg", "cju5wj0faly5008187n6530af.jpg", "cju30mm25a53s0799qa5wiqe8.jpg", "cju5ca9hcatkc0801jzwe7tfx.jpg", "cju7bd1qu1mx409877xjxibox.jpg", "cju2yg5ht8i4p087800js8hp4.jpg", "cju5y4hgqmk0i08180rjhbwvp.jpg", "cju1euant5l960878iqj5vvto.jpg", "cju1dg44i4z3w0801nyz4p6zf.jpg", "cju2uy8ox62jo0801g88hh42z.jpg", "cju2t16vuucaq0835xcpsivn2.jpg", "cju5u4pywk81x0817vn9pe14z.jpg", "cju8ca4geseia0850i2ru11hw.jpg", "cju83wwn1k55e0850kw6i2d81.jpg", "cju45ofd9ne1j0801ri8dup7t.jpg", "cju2uzabhs6er0993x3aaf87p.jpg", "cju8a56vxpy780850r45yu4wk.jpg", "cju45pm27n80u08174kyow1gj.jpg", "cju3u4lxmg59o0755rz42b9en.jpg", "cju1gkndf6yi10801o1qnje19.jpg", "cju30gxjq0djk0988jytm49rs.jpg", "cju45jpvfn6c809873pv1i34s.jpg", "cju7adqyj1jcx08712r1ro5gx.jpg", "cju7dmlgf2ebw0871ieqas5fh.jpg", "cju8d4jgatgpj0871q2ophhkm.jpg", "cju2wx0gh7fpz0878wwyd9ep8.jpg", "cju2x7vw87mu30878hye2ca0m.jpg", "cju1hhj6mxfp90835n3wofrap.jpg", "cju30ftgja7170855xl9bkdm0.jpg", "cju43eigtm6ev0801mv0m96t1.jpg", "cju1fjsb4sipq09931lvd8e41.jpg", "cju77g99iyxc00817zqi2ppor.jpg", "cju34zivp3fq80988opxbaqyn.jpg", "cju2t9tdwuk700835kv0ljmtl.jpg", "cju5wi6bqlxy90755bu227nvb.jpg", "cju2rmd2rsw9g09888hh1efu0.jpg"], "valid_labels": ["cju5vgawslbe30987ndeepc1b.jpg", "ck2bxlujamu330725szlc2jdu.jpg", "cju2hx006vidl0799igm81vmh.jpg", "cju7aqkue1i2k09879uzcpt8r.jpg", "cju5uget8krjy0818kvywd0zu.jpg", "cju8dk7eztzup08182yxko5zh.jpg", "cju1c3218411b08014g9f6gig.jpg", "cju7f6cqy2ur20818t1saazbm.jpg", "cju6wi3akvn8r0801px8eligc.jpg", "cju5enq1tcn1i0755hnkon787.jpg", "cju2hewssldzx0835ep795xu0.jpg", "ck2bxskgxxzfv08386xkqtqdy.jpg", "cju2np2k9zi3v079992ypxqkn.jpg", "cju5bycdkalkb09875f7bfrvx.jpg", "cju2hlm19vjjf0801o69qnber.jpg", "cju7amjna1ly40871ugiokehb.jpg", "cju34c1xfyz920993itxkkfad.jpg", "cju88oh0po9gq0801nge4tgr1.jpg", "cju5i39mreass0817au8p22zy.jpg", "cju5f0dezct4q08183ydw11dx.jpg", "cju5cky5xb0ay0801oxet697t.jpg", "cju7ehljc2or70871261br8ai.jpg", "cju183od81ff608017ekzif89.jpg", "cju7evxt12m730987rxivne3x.jpg", "cju8cj10qsrau0871o2dr6ai1.jpg", "cju2y26c588bo07993ksd8eoz.jpg", "cju2zkpdl9h7t0799ix60teqg.jpg", "cju85bf1algsq0871y9gtlq97.jpg", "cju40w3hbkwpn08015rbs3wko.jpg", "cju17hw9hr9c5098800fu4u8e.jpg", "cju6wuojavt740818b5qcv3iw.jpg", "cju2qvuj1s9ok0835tp2k4ozh.jpg", "cju33jon3ygbj0993pu22a4k6.jpg", "cju7dsrtb2f8i085064kwugfk.jpg", "cju77k828z46w0871r0avuoo9.jpg", "cju1aqqv02qwz0878a5cyhr67.jpg", "cju412uwlkva50850d1ps1ww7.jpg", "cju2nguelpmlj0835rojdn097.jpg", "cju35eg0tdmjt085525sb4bua.jpg", "cju7dn24o296i09871qfxb8s2.jpg", "cju2ma647l0nj0993ot4deq2q.jpg", "cju1expq45zst0855rjqwwj4m.jpg", "cju88t4fvokxf07558ymyh281.jpg", "cju83nwu1jxte0987h1krpfmv.jpg", "cju1fj6axwfp30835ukhuzhw9.jpg", "cju8b0jr0r2oi0801jiquetd5.jpg", "cju7bmi1v1pnj0987pa52jjok.jpg", "ck2da7fwcjfis07218r1rvm95.jpg", "cju2zblxw9848087853csbrx1.jpg", "cju5udcufki0s09874ll1dbr5.jpg", "cju2rqo702wpx0855fn7d5cxh.jpg", "cju88l66no10s0850rsda7ej1.jpg", "cju83yddek68q0850d2x7zfkm.jpg", "cju6z1bzbwfq50817b2alatvr.jpg", "cju5hi52odyf90817prvcwg45.jpg", "cju324q101fhe08350wae9cif.jpg", "cju5cetivauok0987ok3e5bre.jpg", "cju5u6wf0kh1t0755bg1ssixv.jpg", "cju8clorgsuwn08714toqb7v6.jpg", "cju5b9oyda4yr0850g9viziyv.jpg", "cju2qtee81yd708787bsjr75d.jpg", "cju3tvffffx5f0818t5ov22al.jpg", "cju1d96gsv62d09881b3wecw2.jpg", "cju5yclrymlgj0818k426ud6z.jpg", "cju2i8br1vqtd08784u6vmcjk.jpg", "cju7dvl5m2n4t0755hlnnjjet.jpg", "cju5hl8nee8a40755fm8qjj0o.jpg", "cju7d8m3b2e210755l8fj1yph.jpg", "cju1f79yhsb5w0993txub59ol.jpg", "cju7frtqu2xa20818wq8r9fzf.jpg", "cju35atpxdjot0855q46aqrd0.jpg", "cju3y0pjrj1c30755nxekxccj.jpg", "cju2ti1du4idn0878giuozonw.jpg", "cju2yyhsp933j0855hp32e012.jpg", "cju2r6mt2om21099352pny5gw.jpg", "cju3ttznuftyf09875t11850w.jpg", "cju42xpi8lw4w0871ve317a1p.jpg", "cju5u8gz4kj5b07552e2wpkwp.jpg", "cju7ezs7g2mxm098787atbran.jpg", "cju17g6ykn1cs0993dww6qdi8.jpg", "cju8arof2qpf20850ifr1bnqj.jpg", "cju3xuj20ivgp0818mij8bjrd.jpg", "cju34sh43d8zm08019xbwhc0o.jpg", "cju2zp89k9q1g0855k1x0f1xa.jpg", "cju7agj961l2r0818z29iq8yn.jpg", "cju3ua8u0g9rg0801uayhdxhu.jpg", "cju2hdr06v2bq0799mbm3bks1.jpg", "cju2qz06823a40878ojcz9ccx.jpg", "cju8azmhcr66e0755t61atz72.jpg", "cju87ox0kncom0801b98hqnd2.jpg", "cju1fuoa4wmc50835qfd11sp9.jpg", "cju88cddensj00987788yotmg.jpg", "cju7azuu31mia0801pf9ib5ed.jpg", "cju5k3j3uf6de0817hszzfr7n.jpg", "cju5x00l6m5j608503k78ptee.jpg", "cju430pm2lz0y0755jkhcc3d1.jpg", "cju0vtox5ain6099360pu62rp.jpg", "cju7dp3dw2k4n0755zhe003ad.jpg", "cju885ikhnmkn09878s2lqtuh.jpg", "cju2zr3c3vwb00993jn06bbaz.jpg", "cju8bw697rwg308177tg8huas.jpg", "cju1efbr0rqxz09931z0lf4vf.jpg", "cju7d7aut2a2p0818z4uxc6cd.jpg", "cju2sevf53lkx08558h5bpaig.jpg", "cju15l5ubz9yh0855b3ivdpse.jpg", "cju2nnqrqzp580855z8mhzgd6.jpg", "cju2mh8t6p07008350e01tx2a.jpg", "cju30nyxe0gfb0835p256yoju.jpg", "cju85rkbnlo1c08503uxcpax1.jpg", "cju3280wv1ir009882jze27tc.jpg", "cju7b2l561oas0871decgslaf.jpg", "cju2m56cryvqd0801gtn2yp8t.jpg", "cju5boicjagt20871b1fotkh4.jpg", "ck2bxknhjvs1x0794iogrq49k.jpg", "cju7crgxa28550755wbsgqkel.jpg", "cju1brhsj3rls0855a1vgdlen.jpg", "cju8b542nr81x0871uxnkm9ih.jpg", "cju87li0zn3yb0817kbwgjiz8.jpg", "cju1f8w0t65en0799m9oacq0q.jpg", "cju8a3nhbpwnb0850d37fo2na.jpg", "cju1bm8063nmh07996rsjjemq.jpg", "cju2z1nxlzaj40835wj81s1iy.jpg", "cju83qd0yjyht0817ktkfl268.jpg", "cju414lf2l1lt0801rl3hjllj.jpg", "cju2yw4s7z7p20988lmf2gdgd.jpg", "cju5fs6j6d8350801vglraq4u.jpg", "cju7er4kc2opa0801anuxc0eb.jpg", "cju5bdwa3aatx0818b79i18zf.jpg", "cjyzurzvohqnr0794es1itzek.jpg", "ck2bxw18mmz1k0725litqq2mc.jpg", "cju41z76wlgbz0801qdetlvby.jpg", "cju84ih17kp5l09876bkooocl.jpg", "cju5vbo6jldrt0871jf6f1700.jpg", "cju7es23b2vcp0755gpbm9s7v.jpg", "cju6ywm40wdbo0987pbftsvtg.jpg", "cju32l161bi1v07990vm376in.jpg", "cju5fi0yxd3ei0801v7u0yudn.jpg", "cju884985nlmx0817vzpax3y4.jpg", "cju32qr9tbvsj08013pkpjenq.jpg", "cju7flevb2wii08188otgs9p2.jpg", "cju16fpvhzypl0799p9phnlx6.jpg", "cju1dhfok4mhe0878jlgrag0h.jpg", "cju5xq3tdm9fn0987pbedxdg5.jpg", "cju7bb3ss1uoo0755pmhyco7t.jpg", "cju6us80mv1b50871ebyq2wxa.jpg", "cju2p0eveqtdc0835gpi3p93i.jpg", "cju7apr0c1qqm0755s7msqot4.jpg", "cju34o6dbd2lo0855aqlcy1hs.jpg", "cju2xd75m82720801q4s4ik3n.jpg", "cju7ebe962hr409872ovibahw.jpg", "cju1fr4etsmrr09933u4t4aql.jpg", "cju0ue769mxii08019zqgdbxn.jpg", "cju7cl8zm1xcu0817ado0jpas.jpg", "cju5ktjwofed70817eg58ef7u.jpg", "cju843yjskhq30818qre4rwm2.jpg", "cju8cwy02t9eq08185qn12c02.jpg", "cju892fesoq2g0801n0e0jyia.jpg", "cju3ul8dogf1z09872y2ecowp.jpg", "cju85fc11ljr40818edpb0inh.jpg", "cju8c2rqzs5t80850d0zky5dy.jpg"], "test_labels": ["cju8abobpqbir08189u01huru.jpg", "cju8axq24r4an0755yhv9d4ly.jpg", "cju5cu8qkb84x08186jwo8yin.jpg", "cju30k2z40ds308353kdew70n.jpg", "cju6vqarjv7yo0987q4b1btk1.jpg", "cju1819curo000988pd5xcqme.jpg", "cju7d9seq29zd0871nzl2uu5m.jpg", "cju31ugmfb3dz0855xtqshki6.jpg", "cju858eswlepn0871pzvdrhj1.jpg", "cju5fw37edaae0801vkwvocn7.jpg", "cju83kxitjv340987z09m0ezy.jpg", "cjyzkpsbjdsjq07211dfi4sru.jpg", "cju2z9vlp9j0w0801oag91sy9.jpg", "cju0tl3uz8blh0993wxvn7ly3.jpg", "cju33eqwbcch208012jikwdky.jpg", "cju8bssulrrcy0987h1vq5060.jpg", "cju2qxxko229x08786gvxxhur.jpg", "cju6wn57mvooj0850rp78hhy7.jpg", "cju7ejm2l2ncl0801wq6y84nw.jpg", "cju7b3f5h1sm40755i572jden.jpg", "cju2pmhtr17a00855cvpelzb0.jpg", "cju2u2b9o4zvp08788qb9nqxj.jpg", "cju85ia1slh220987y7c20sm2.jpg", "cju439oazm2fu0871ma0vvrft.jpg", "cju5hwonqedw10801vsd3w6kk.jpg", "cju1gi7jlwyld0835cdf6g6qz.jpg", "cjyzjzssvd8pq0838f4nolj5l.jpg", "cju17x0j4nfc10993y31pvlgs.jpg", "cju1fm3id6gl50801r3fok20c.jpg", "cju30bmab08bi0835mvlr6e0r.jpg", "cju41kd7yl4nm0850gil5qqwh.jpg", "cju2s2527pfyr0993l3h1149a.jpg", "cju2urqpwvxw70835rvndvtsi.jpg", "cju7et17a2vjk0755e743npl1.jpg", "cju7fob3x301u0755x985pmmq.jpg", "cju17z0qongpa0993de4boim4.jpg", "cju2phaksnahz0993yxogjcpv.jpg", "cju8b1v3br45u087189kku66u.jpg", "cju8aqq8uqmoq0987hphto9gg.jpg", "cju33mirdc8mj0799k33wzoes.jpg", "cju2y0z6g87p10878fpk5d3rq.jpg", "cju6vvb8svhed0801jjcquh5e.jpg", "cju2t3ibkuecr0835o7si16zv.jpg", "cju8914beokbf0850isxpocrk.jpg", "cju8chdlqsu620755azjty1tj.jpg", "cju43o6n7m9nk087191ijwqq9.jpg", "cju7bf1lp1shi081835vs84lc.jpg", "cju2ysg748ru80878sp6j0gm0.jpg", "cju88z8bson4h0871nnd7fdxo.jpg", "cju8brv16rx7f0818uf5n89pv.jpg", "cju7da88w2eod0755wejzynvt.jpg", "cju7eueum2oqn0850rodmx8zo.jpg", "cju773hsyyosz0817pk1e7sjq.jpg", "cju358pwtdby20878cg7nm0np.jpg", "cju2oq5570avm079959o20op1.jpg", "cju3y54kwj3nr0801biidlb4e.jpg", "cju5bmhdcafs909878qfzrqzi.jpg", "cju83h9ysjwe808716nt35oah.jpg", "cju5chrxxawka0871qcj171yz.jpg", "cju8czvnztbf40871b4m7t78w.jpg", "cju3xvoo2iqlc0817eku2r3wl.jpg", "cju7f9umg2olj0987fj5y285w.jpg", "cju87vqa0ndwg0850onjdz7ol.jpg", "cju2ouil2mssu0993hvxsed6d.jpg", "cju2zi4l09f5807991s8do2b2.jpg", "cju5k503sfa5f0871lx0rpu5y.jpg", "cju87z6o6nh73085045bzsx6o.jpg", "cju5xopi0md7q0871r1sjc1av.jpg", "cju40r6jrksyk0871wg98zgho.jpg", "cju5xneamme2p0801qdf7fdwv.jpg", "cju3xjqtpikx50817tppy6g84.jpg", "cju8bn7m2rmm70817hgxpb1uq.jpg", "cju42nm68lpyo0818xvvqmupq.jpg", "cju88q6h6obpd0871ckmiabbo.jpg", "cju3wstckialg0871xs0vevsj.jpg", "cju2y8s56ymqr083541ggdsml.jpg", "cju5ygh1zmmdi0755uod5e17i.jpg", "cju7bfx651qr80801cs7epotb.jpg", "cju7d7tly27h408016fyp5nr7.jpg", "cju77idwfz36d0871tzfzz51i.jpg", "cju7ddtz729960801uazp1knc.jpg", "cju30ia8da2bq0799klnehml2.jpg", "cju3y79ofj3va0871uqfb1mzo.jpg", "cju7cue9b232j0801qdzk1ykj.jpg", "cju5vutu7ll8w0871dfp92n9p.jpg", "cju2xa3i4y0160988i679zsqd.jpg", "cju6x4t13vyw60755gtcf9ndu.jpg", "cju7fd6yt2p740987wkr8exo1.jpg", "cju2xbk0080y80801eghyddi2.jpg", "cju32a52lb9rc0799xi40qs00.jpg", "cju42tauqlo5p08171l3cuo4b.jpg", "cju7ae7bq1f820987toc8si1d.jpg", "cju5y84q3mdv50817eyp82xf3.jpg", "cju893jmdompz0817xn3g1w4h.jpg", "cju2uokeg5jm20799xwgsyz89.jpg", "cju42dwedlmk60871jbgu4ehi.jpg", "cju35i2e63uxr0835h7zgkg9k.jpg", "cju6z9a9kwsl007552s49rx6i.jpg", "cju15jr8jz8sb0855ukmkswkz.jpg", "cjyzu9th0qt4r0a46pyl4zik0.jpg", "cju89y9h0puti0818i5yw29e6.jpg", "cju7bduyq1rjf08719giru9ho.jpg", "cju2xwm1s84l50799i60mq0pu.jpg", "cju7dlk532dsh0871zvr6qz0r.jpg", "cju85mpuglq8k0818d2it6hzb.jpg", "cju5xkwzxmf0z0818gk4xabdm.jpg", "cju8cattbsivm0818p446wgel.jpg", "cju2pag1f0s4r0878h52uq83s.jpg", "cju5x15djm7ae0755h8czf6nt.jpg", "cju5c5xc7algd0817pb1ej5yo.jpg", "cju32pzh9bpw10855q4vaxfhe.jpg", "cju6v3bb2v7xo085090blubyw.jpg", "cju83mki1jv5w0817kubxm31r.jpg", "cju5bbtwsa8cl0987wgfsqpao.jpg", "cju14pxbaoksp0835qzorx6g6.jpg", "cju785htizjzo08017tvlhtg4.jpg", "cju42m60jlpcm08186kqppzqv.jpg", "cju847pxykriq0755268ktrk2.jpg", "cju3x9lttikfb0818a0g104zn.jpg", "cju1cnnziug1l0835yh4ropyg.jpg", "cju76lsehyia10987u54vn8rb.jpg", "cju7fbndk2sl608015ravktum.jpg", "cju5cjh3xattc0817j2vbulzi.jpg", "cju3uwz6ogsp10801h2r3bj5l.jpg", "cju34ouumcznz07996gg1xq7v.jpg", "cju3x2s11ibzi0817kk284k0j.jpg", "cju34fojcctcf0799ebolbvkn.jpg", "cju3v56bwgy8v0871w14pz8fx.jpg", "cju87r56lnkyp0755hz30leew.jpg", "cju2r91dg2k090801bh0xzbxk.jpg", "cju7ev2b12owa08500bpfpwyw.jpg", "cju76o55nymqd0871h31sph9w.jpg", "ck2395w2mb4vu07480otsu6tw.jpg", "cju2pkwt3r8b90988v2ywq1px.jpg", "cju33belnbyhm0878yxl42233.jpg", "cju7ahtkb1jr90801jck4kbds.jpg", "cju8adb60qbiu080188mxpf8d.jpg", "cju5wrrs0m2af0818vmnajbtw.jpg", "cju2wzu8wxtgu09880ku9x1pg.jpg", "cju3y21quj0ir0818kgjagr15.jpg", "cju87mrypnb1e0818scv1mxxg.jpg", "cju0sxqiclckk08551ycbwhno.jpg", "cju5h57xedz5h0755mjpc8694.jpg", "cju1cfhyg48bb0799cl5pr2jh.jpg", "cju8c82iosagu0817l74s4m5g.jpg", "cju30df2j09dd08351ayx2t6w.jpg", "cju2rkjfwoxys0993x768l1j2.jpg", "cjyzu3reghjya0794w7pwoi50.jpg", "cju7druhp2gp308715i6km7be.jpg", "cju5w7xn0lrkq0801f9k0htgx.jpg", "cju40u30gkuzc0871rq7t666d.jpg", "cju6v1m1xv07w09870ah3njy1.jpg", "cju849c23kgnk0817cgv2hw1e.jpg", "cju5k7r0yf98c09878csbxb4d.jpg", "cju8a2itsq4dv0755ntlovpxe.jpg", "cju6z7e4bwgdd0987ogkzq9kt.jpg", "cju7avvi51iox0817ym55y6tt.jpg", "cju0qkwl35piu0993l0dewei2.jpg", "cju85hguellg50818kwu3s8d1.jpg", "cju302fqq9spc0878rrygyzzz.jpg", "cju8amfdtqi4x09871tygrgqe.jpg", "cju6z600qwh4z081700qimgl9.jpg", "cju7ea4om2l910801bohqjccy.jpg", "cju45t5ddnbio0987qtqzx762.jpg", "cju84dsvaklpx098750hp83x4.jpg", "cju1cqc7n4gpy0855jt246k68.jpg", "cju5thdbrjp1108715xdfx356.jpg", "cju87kbcen2av0987usezo8kn.jpg", "cju8b7aqtr4a00987coba14b7.jpg", "cju5xjn5mm78b09871spyqhhr.jpg", "cju3x7xsaijq80818f0psavav.jpg", "cju5fu081d8gc0818l3yylujk.jpg", "cju171py4qiha0835u8sl59ds.jpg", "cju2qn2fzs1vy0988l243cvzy.jpg", "cju7d1tvt25bu08019dvw3uff.jpg", "cju2ueb6j5ado0878vf5md13o.jpg", "cju13cgqmnhwn0988yrainhcp.jpg", "cju3tx1qyg0c907552fglumhc.jpg", "cju31rb7vb6110801p9rhacuw.jpg", "cju77afzlz3kp07550x5nafzs.jpg", "cju2hugv9vget0799hhk7ksvg.jpg", "cju88evxanv9r08176zkeovec.jpg", "cju6xifswvwbo0987nibtdr50.jpg", "cju43jcqim2cp08172dvjvyui.jpg", "cju5es375cnzy0801nkq35ffs.jpg", "cju7dff9529h208503w60lbil.jpg", "cju2hgsptlfam0835o3b59h1o.jpg", "cju8buos5rz9b08715lfr0f4f.jpg", "cju7ctvqn25dy08186g442m1r.jpg", "cju7dz5yy2i7z0801ausi7rna.jpg", "cju7b4mtw1n9n080186209f3d.jpg", "cju42g865lorv07552ytz6xxa.jpg", "cju5von04litr08718j8po40a.jpg", "cju7eea9b2m0z0801ynqv1fqu.jpg", "cju8bi8q7rlmn0871abc5ch8k.jpg", "cju5kre09fhka0850h7b1898j.jpg", "cju40m0rjkpw80871z6n6yg1u.jpg", "cju6wt9jvvn500871hjn3t3g0.jpg", "cju2xjz2ju8pe0993ysv9wg17.jpg", "cju2t2ivz43i10878zeg8r1br.jpg"]} \ No newline at end of file diff --git a/files_split/MMWHS_CT_Heart.json b/files_split/MMWHS_CT_Heart.json new file mode 100644 index 0000000000000000000000000000000000000000..cf50b4417dd651be31ae4cca0129a321042f53c5 --- /dev/null +++ b/files_split/MMWHS_CT_Heart.json @@ -0,0 +1 @@ +{"train": ["ct_train_1020_image_frame_32.png", "ct_train_1006_image_frame_21.png", "ct_train_1018_image_frame_171.png", "ct_train_1018_image_frame_142.png", "ct_train_1017_image_frame_141.png", "ct_train_1003_image_frame_164.png", "ct_train_1018_image_frame_120.png", "ct_train_1016_image_frame_277.png", "ct_train_1017_image_frame_143.png", "ct_train_1001_image_frame_339.png", "ct_train_1006_image_frame_164.png", "ct_train_1001_image_frame_45.png", "ct_train_1016_image_frame_158.png", "ct_train_1014_image_frame_298.png", "ct_train_1017_image_frame_115.png", "ct_train_1017_image_frame_11.png", "ct_train_1003_image_frame_161.png", "ct_train_1015_image_frame_257.png", "ct_train_1009_image_frame_104.png", "ct_train_1017_image_frame_127.png", "ct_train_1001_image_frame_264.png", "ct_train_1014_image_frame_93.png", "ct_train_1017_image_frame_51.png", "ct_train_1001_image_frame_249.png", "ct_train_1014_image_frame_337.png", "ct_train_1014_image_frame_88.png", "ct_train_1017_image_frame_98.png", "ct_train_1005_image_frame_167.png", "ct_train_1015_image_frame_98.png", "ct_train_1014_image_frame_188.png", "ct_train_1020_image_frame_132.png", "ct_train_1011_image_frame_30.png", "ct_train_1020_image_frame_90.png", "ct_train_1020_image_frame_5.png", "ct_train_1017_image_frame_155.png", "ct_train_1006_image_frame_82.png", "ct_train_1011_image_frame_13.png", "ct_train_1016_image_frame_62.png", "ct_train_1009_image_frame_161.png", "ct_train_1003_image_frame_202.png", "ct_train_1003_image_frame_278.png", "ct_train_1003_image_frame_293.png", "ct_train_1020_image_frame_244.png", "ct_train_1011_image_frame_118.png", "ct_train_1016_image_frame_5.png", "ct_train_1005_image_frame_104.png", "ct_train_1017_image_frame_256.png", "ct_train_1001_image_frame_255.png", "ct_train_1014_image_frame_248.png", "ct_train_1009_image_frame_118.png", "ct_train_1001_image_frame_286.png", "ct_train_1020_image_frame_198.png", "ct_train_1003_image_frame_86.png", "ct_train_1015_image_frame_48.png", "ct_train_1009_image_frame_174.png", "ct_train_1003_image_frame_256.png", "ct_train_1017_image_frame_233.png", "ct_train_1008_image_frame_169.png", "ct_train_1009_image_frame_123.png", "ct_train_1009_image_frame_51.png", "ct_train_1003_image_frame_192.png", "ct_train_1009_image_frame_40.png", "ct_train_1006_image_frame_66.png", "ct_train_1009_image_frame_285.png", "ct_train_1014_image_frame_6.png", "ct_train_1014_image_frame_340.png", "ct_train_1011_image_frame_221.png", "ct_train_1006_image_frame_118.png", "ct_train_1006_image_frame_102.png", "ct_train_1003_image_frame_100.png", "ct_train_1015_image_frame_47.png", "ct_train_1014_image_frame_149.png", "ct_train_1016_image_frame_179.png", "ct_train_1011_image_frame_53.png", "ct_train_1008_image_frame_220.png", "ct_train_1001_image_frame_350.png", "ct_train_1020_image_frame_292.png", "ct_train_1006_image_frame_131.png", "ct_train_1001_image_frame_31.png", "ct_train_1006_image_frame_162.png", "ct_train_1014_image_frame_194.png", "ct_train_1001_image_frame_291.png", "ct_train_1020_image_frame_285.png", "ct_train_1020_image_frame_288.png", "ct_train_1016_image_frame_114.png", "ct_train_1005_image_frame_169.png", "ct_train_1009_image_frame_215.png", "ct_train_1008_image_frame_108.png", "ct_train_1006_image_frame_80.png", "ct_train_1001_image_frame_24.png", "ct_train_1008_image_frame_221.png", "ct_train_1014_image_frame_160.png", "ct_train_1011_image_frame_164.png", "ct_train_1006_image_frame_238.png", "ct_train_1006_image_frame_19.png", "ct_train_1020_image_frame_274.png", "ct_train_1018_image_frame_150.png", "ct_train_1003_image_frame_167.png", "ct_train_1008_image_frame_173.png", "ct_train_1008_image_frame_75.png", "ct_train_1014_image_frame_37.png", "ct_train_1016_image_frame_323.png", "ct_train_1014_image_frame_2.png", "ct_train_1009_image_frame_206.png", "ct_train_1001_image_frame_69.png", "ct_train_1014_image_frame_217.png", "ct_train_1008_image_frame_103.png", "ct_train_1016_image_frame_185.png", "ct_train_1011_image_frame_86.png", "ct_train_1014_image_frame_33.png", "ct_train_1009_image_frame_43.png", "ct_train_1020_image_frame_278.png", "ct_train_1020_image_frame_3.png", "ct_train_1016_image_frame_91.png", "ct_train_1015_image_frame_95.png", "ct_train_1006_image_frame_111.png", "ct_train_1015_image_frame_293.png", "ct_train_1018_image_frame_82.png", "ct_train_1015_image_frame_141.png", "ct_train_1014_image_frame_222.png", "ct_train_1008_image_frame_69.png", "ct_train_1015_image_frame_198.png", "ct_train_1014_image_frame_241.png", "ct_train_1017_image_frame_58.png", "ct_train_1016_image_frame_325.png", "ct_train_1020_image_frame_179.png", "ct_train_1016_image_frame_242.png", "ct_train_1014_image_frame_116.png", "ct_train_1001_image_frame_127.png", "ct_train_1001_image_frame_244.png", "ct_train_1014_image_frame_275.png", "ct_train_1005_image_frame_125.png", "ct_train_1008_image_frame_20.png", "ct_train_1001_image_frame_93.png", "ct_train_1016_image_frame_113.png", "ct_train_1014_image_frame_331.png", "ct_train_1011_image_frame_89.png", "ct_train_1006_image_frame_227.png", "ct_train_1016_image_frame_152.png", "ct_train_1003_image_frame_219.png", "ct_train_1014_image_frame_157.png", "ct_train_1017_image_frame_97.png", "ct_train_1005_image_frame_144.png", "ct_train_1017_image_frame_220.png", "ct_train_1015_image_frame_65.png", "ct_train_1001_image_frame_16.png", "ct_train_1011_image_frame_94.png", "ct_train_1017_image_frame_76.png", "ct_train_1020_image_frame_174.png", "ct_train_1001_image_frame_234.png", "ct_train_1017_image_frame_198.png", "ct_train_1001_image_frame_273.png", "ct_train_1006_image_frame_197.png", "ct_train_1001_image_frame_157.png", "ct_train_1011_image_frame_77.png", "ct_train_1011_image_frame_192.png", "ct_train_1003_image_frame_294.png", "ct_train_1001_image_frame_1.png", "ct_train_1006_image_frame_60.png", "ct_train_1001_image_frame_136.png", "ct_train_1001_image_frame_189.png", "ct_train_1003_image_frame_128.png", "ct_train_1020_image_frame_301.png", "ct_train_1001_image_frame_130.png", "ct_train_1015_image_frame_186.png", "ct_train_1008_image_frame_32.png", "ct_train_1014_image_frame_121.png", "ct_train_1003_image_frame_257.png", "ct_train_1014_image_frame_353.png", "ct_train_1016_image_frame_71.png", "ct_train_1006_image_frame_239.png", "ct_train_1003_image_frame_105.png", "ct_train_1005_image_frame_129.png", "ct_train_1005_image_frame_82.png", "ct_train_1020_image_frame_254.png", "ct_train_1018_image_frame_76.png", "ct_train_1017_image_frame_64.png", "ct_train_1020_image_frame_300.png", "ct_train_1001_image_frame_103.png", "ct_train_1001_image_frame_318.png", "ct_train_1015_image_frame_152.png", "ct_train_1014_image_frame_18.png", "ct_train_1015_image_frame_22.png", "ct_train_1006_image_frame_246.png", "ct_train_1020_image_frame_91.png", "ct_train_1018_image_frame_9.png", "ct_train_1008_image_frame_100.png", "ct_train_1017_image_frame_31.png", "ct_train_1001_image_frame_92.png", "ct_train_1017_image_frame_225.png", "ct_train_1008_image_frame_21.png", "ct_train_1001_image_frame_123.png", "ct_train_1014_image_frame_342.png", "ct_train_1006_image_frame_217.png", "ct_train_1016_image_frame_222.png", "ct_train_1001_image_frame_101.png", "ct_train_1001_image_frame_290.png", "ct_train_1017_image_frame_179.png", "ct_train_1001_image_frame_35.png", "ct_train_1005_image_frame_61.png", "ct_train_1020_image_frame_147.png", "ct_train_1009_image_frame_89.png", "ct_train_1006_image_frame_90.png", "ct_train_1009_image_frame_139.png", "ct_train_1016_image_frame_93.png", "ct_train_1015_image_frame_296.png", "ct_train_1020_image_frame_133.png", "ct_train_1015_image_frame_15.png", "ct_train_1020_image_frame_213.png", "ct_train_1015_image_frame_170.png", "ct_train_1017_image_frame_261.png", "ct_train_1005_image_frame_2.png", "ct_train_1009_image_frame_53.png", "ct_train_1006_image_frame_70.png", "ct_train_1011_image_frame_149.png", "ct_train_1014_image_frame_318.png", "ct_train_1020_image_frame_38.png", "ct_train_1001_image_frame_90.png", "ct_train_1001_image_frame_78.png", "ct_train_1015_image_frame_178.png", "ct_train_1008_image_frame_187.png", "ct_train_1014_image_frame_192.png", "ct_train_1020_image_frame_286.png", "ct_train_1003_image_frame_296.png", "ct_train_1018_image_frame_31.png", "ct_train_1006_image_frame_56.png", "ct_train_1015_image_frame_122.png", "ct_train_1014_image_frame_17.png", "ct_train_1001_image_frame_176.png", "ct_train_1003_image_frame_199.png", "ct_train_1005_image_frame_103.png", "ct_train_1014_image_frame_120.png", "ct_train_1001_image_frame_233.png", "ct_train_1006_image_frame_110.png", "ct_train_1008_image_frame_51.png", "ct_train_1006_image_frame_133.png", "ct_train_1018_image_frame_1.png", "ct_train_1003_image_frame_144.png", "ct_train_1018_image_frame_178.png", "ct_train_1015_image_frame_254.png", "ct_train_1009_image_frame_166.png", "ct_train_1014_image_frame_227.png", "ct_train_1003_image_frame_267.png", "ct_train_1006_image_frame_79.png", "ct_train_1003_image_frame_94.png", "ct_train_1005_image_frame_162.png", "ct_train_1009_image_frame_100.png", "ct_train_1001_image_frame_224.png", "ct_train_1020_image_frame_22.png", "ct_train_1011_image_frame_75.png", "ct_train_1011_image_frame_206.png", "ct_train_1011_image_frame_144.png", "ct_train_1020_image_frame_262.png", "ct_train_1020_image_frame_303.png", "ct_train_1015_image_frame_189.png", "ct_train_1006_image_frame_67.png", "ct_train_1003_image_frame_217.png", "ct_train_1009_image_frame_154.png", "ct_train_1015_image_frame_21.png", "ct_train_1014_image_frame_255.png", "ct_train_1014_image_frame_321.png", "ct_train_1003_image_frame_194.png", "ct_train_1005_image_frame_60.png", "ct_train_1005_image_frame_112.png", "ct_train_1003_image_frame_96.png", "ct_train_1016_image_frame_29.png", "ct_train_1003_image_frame_15.png", "ct_train_1005_image_frame_93.png", "ct_train_1006_image_frame_6.png", "ct_train_1003_image_frame_272.png", "ct_train_1014_image_frame_26.png", "ct_train_1015_image_frame_49.png", "ct_train_1014_image_frame_190.png", "ct_train_1001_image_frame_357.png", "ct_train_1016_image_frame_102.png", "ct_train_1016_image_frame_230.png", "ct_train_1020_image_frame_45.png", "ct_train_1020_image_frame_190.png", "ct_train_1009_image_frame_186.png", "ct_train_1016_image_frame_181.png", "ct_train_1017_image_frame_55.png", "ct_train_1015_image_frame_28.png", "ct_train_1001_image_frame_329.png", "ct_train_1014_image_frame_300.png", "ct_train_1014_image_frame_323.png", "ct_train_1003_image_frame_82.png", "ct_train_1006_image_frame_35.png", "ct_train_1015_image_frame_72.png", "ct_train_1020_image_frame_138.png", "ct_train_1017_image_frame_204.png", "ct_train_1006_image_frame_142.png", "ct_train_1006_image_frame_0.png", "ct_train_1015_image_frame_54.png", "ct_train_1003_image_frame_214.png", "ct_train_1016_image_frame_120.png", "ct_train_1009_image_frame_80.png", "ct_train_1003_image_frame_230.png", "ct_train_1005_image_frame_165.png", "ct_train_1020_image_frame_348.png", "ct_train_1009_image_frame_20.png", "ct_train_1017_image_frame_134.png", "ct_train_1009_image_frame_168.png", "ct_train_1005_image_frame_36.png", "ct_train_1014_image_frame_286.png", "ct_train_1014_image_frame_201.png", "ct_train_1008_image_frame_45.png", "ct_train_1016_image_frame_96.png", "ct_train_1008_image_frame_4.png", "ct_train_1006_image_frame_20.png", "ct_train_1009_image_frame_283.png", "ct_train_1016_image_frame_109.png", "ct_train_1003_image_frame_271.png", "ct_train_1020_image_frame_255.png", "ct_train_1015_image_frame_248.png", "ct_train_1015_image_frame_29.png", "ct_train_1014_image_frame_314.png", "ct_train_1018_image_frame_83.png", "ct_train_1016_image_frame_73.png", "ct_train_1011_image_frame_83.png", "ct_train_1009_image_frame_164.png", "ct_train_1005_image_frame_123.png", "ct_train_1020_image_frame_359.png", "ct_train_1008_image_frame_175.png", "ct_train_1018_image_frame_152.png", "ct_train_1005_image_frame_42.png", "ct_train_1003_image_frame_87.png", "ct_train_1020_image_frame_319.png", "ct_train_1001_image_frame_327.png", "ct_train_1015_image_frame_37.png", "ct_train_1015_image_frame_277.png", "ct_train_1017_image_frame_91.png", "ct_train_1001_image_frame_315.png", "ct_train_1006_image_frame_12.png", "ct_train_1017_image_frame_49.png", "ct_train_1014_image_frame_264.png", "ct_train_1017_image_frame_206.png", "ct_train_1014_image_frame_261.png", "ct_train_1001_image_frame_49.png", "ct_train_1014_image_frame_129.png", "ct_train_1009_image_frame_165.png", "ct_train_1015_image_frame_106.png", "ct_train_1020_image_frame_269.png", "ct_train_1009_image_frame_128.png", "ct_train_1016_image_frame_266.png", "ct_train_1020_image_frame_142.png", "ct_train_1020_image_frame_297.png", "ct_train_1003_image_frame_98.png", "ct_train_1003_image_frame_231.png", "ct_train_1001_image_frame_188.png", "ct_train_1018_image_frame_95.png", "ct_train_1006_image_frame_130.png", "ct_train_1017_image_frame_242.png", "ct_train_1015_image_frame_207.png", "ct_train_1008_image_frame_113.png", "ct_train_1006_image_frame_14.png", "ct_train_1001_image_frame_105.png", "ct_train_1011_image_frame_198.png", "ct_train_1020_image_frame_47.png", "ct_train_1006_image_frame_93.png", "ct_train_1006_image_frame_39.png", "ct_train_1008_image_frame_168.png", "ct_train_1008_image_frame_42.png", "ct_train_1009_image_frame_34.png", "ct_train_1009_image_frame_264.png", "ct_train_1016_image_frame_309.png", "ct_train_1001_image_frame_247.png", "ct_train_1014_image_frame_303.png", "ct_train_1009_image_frame_112.png", "ct_train_1014_image_frame_178.png", "ct_train_1005_image_frame_18.png", "ct_train_1018_image_frame_58.png", "ct_train_1009_image_frame_288.png", "ct_train_1020_image_frame_101.png", "ct_train_1006_image_frame_245.png", "ct_train_1003_image_frame_120.png", "ct_train_1003_image_frame_18.png", "ct_train_1014_image_frame_293.png", "ct_train_1001_image_frame_331.png", "ct_train_1009_image_frame_278.png", "ct_train_1020_image_frame_159.png", "ct_train_1011_image_frame_51.png", "ct_train_1001_image_frame_358.png", "ct_train_1016_image_frame_293.png", "ct_train_1020_image_frame_193.png", "ct_train_1020_image_frame_113.png", "ct_train_1011_image_frame_100.png", "ct_train_1020_image_frame_50.png", "ct_train_1005_image_frame_135.png", "ct_train_1001_image_frame_208.png", "ct_train_1006_image_frame_200.png", "ct_train_1011_image_frame_238.png", "ct_train_1003_image_frame_215.png", "ct_train_1009_image_frame_29.png", "ct_train_1001_image_frame_84.png", "ct_train_1014_image_frame_350.png", "ct_train_1006_image_frame_209.png", "ct_train_1014_image_frame_83.png", "ct_train_1018_image_frame_8.png", "ct_train_1001_image_frame_137.png", "ct_train_1018_image_frame_65.png", "ct_train_1014_image_frame_203.png", "ct_train_1015_image_frame_87.png", "ct_train_1014_image_frame_43.png", "ct_train_1018_image_frame_164.png", "ct_train_1008_image_frame_188.png", "ct_train_1006_image_frame_146.png", "ct_train_1016_image_frame_41.png", "ct_train_1008_image_frame_25.png", "ct_train_1017_image_frame_277.png", "ct_train_1008_image_frame_154.png", "ct_train_1020_image_frame_299.png", "ct_train_1005_image_frame_163.png", "ct_train_1017_image_frame_108.png", "ct_train_1009_image_frame_138.png", "ct_train_1005_image_frame_3.png", "ct_train_1008_image_frame_72.png", "ct_train_1016_image_frame_320.png", "ct_train_1015_image_frame_43.png", "ct_train_1008_image_frame_138.png", "ct_train_1001_image_frame_68.png", "ct_train_1015_image_frame_6.png", "ct_train_1016_image_frame_200.png", "ct_train_1020_image_frame_361.png", "ct_train_1009_image_frame_276.png", "ct_train_1001_image_frame_259.png", "ct_train_1016_image_frame_2.png", "ct_train_1016_image_frame_83.png", "ct_train_1020_image_frame_131.png", "ct_train_1008_image_frame_207.png", "ct_train_1003_image_frame_291.png", "ct_train_1016_image_frame_26.png", "ct_train_1014_image_frame_181.png", "ct_train_1017_image_frame_176.png", "ct_train_1003_image_frame_48.png", "ct_train_1017_image_frame_121.png", "ct_train_1014_image_frame_251.png", "ct_train_1009_image_frame_172.png", "ct_train_1003_image_frame_12.png", "ct_train_1020_image_frame_165.png", "ct_train_1017_image_frame_236.png", "ct_train_1014_image_frame_76.png", "ct_train_1016_image_frame_246.png", "ct_train_1015_image_frame_149.png", "ct_train_1014_image_frame_299.png", "ct_train_1014_image_frame_260.png", "ct_train_1008_image_frame_115.png", "ct_train_1014_image_frame_254.png", "ct_train_1009_image_frame_13.png", "ct_train_1014_image_frame_151.png", "ct_train_1011_image_frame_16.png", "ct_train_1003_image_frame_95.png", "ct_train_1018_image_frame_86.png", "ct_train_1006_image_frame_55.png", "ct_train_1001_image_frame_323.png", "ct_train_1001_image_frame_56.png", "ct_train_1017_image_frame_43.png", "ct_train_1015_image_frame_244.png", "ct_train_1006_image_frame_240.png", "ct_train_1018_image_frame_73.png", "ct_train_1015_image_frame_165.png", "ct_train_1015_image_frame_176.png", "ct_train_1006_image_frame_158.png", "ct_train_1003_image_frame_93.png", "ct_train_1016_image_frame_21.png", "ct_train_1008_image_frame_28.png", "ct_train_1014_image_frame_113.png", "ct_train_1011_image_frame_188.png", "ct_train_1014_image_frame_223.png", "ct_train_1014_image_frame_107.png", "ct_train_1006_image_frame_27.png", "ct_train_1011_image_frame_19.png", "ct_train_1005_image_frame_6.png", "ct_train_1020_image_frame_239.png", "ct_train_1001_image_frame_94.png", "ct_train_1001_image_frame_11.png", "ct_train_1015_image_frame_79.png", "ct_train_1003_image_frame_204.png", "ct_train_1020_image_frame_48.png", "ct_train_1014_image_frame_356.png", "ct_train_1005_image_frame_5.png", "ct_train_1001_image_frame_300.png", "ct_train_1016_image_frame_212.png", "ct_train_1008_image_frame_208.png", "ct_train_1014_image_frame_102.png", "ct_train_1015_image_frame_117.png", "ct_train_1014_image_frame_9.png", "ct_train_1020_image_frame_247.png", "ct_train_1016_image_frame_296.png", "ct_train_1001_image_frame_336.png", "ct_train_1020_image_frame_143.png", "ct_train_1016_image_frame_307.png", "ct_train_1017_image_frame_82.png", "ct_train_1001_image_frame_126.png", "ct_train_1005_image_frame_170.png", "ct_train_1008_image_frame_123.png", "ct_train_1011_image_frame_7.png", "ct_train_1003_image_frame_67.png", "ct_train_1015_image_frame_107.png", "ct_train_1016_image_frame_48.png", "ct_train_1001_image_frame_326.png", "ct_train_1001_image_frame_298.png", "ct_train_1005_image_frame_45.png", "ct_train_1014_image_frame_69.png", "ct_train_1011_image_frame_123.png", "ct_train_1014_image_frame_205.png", "ct_train_1001_image_frame_39.png", "ct_train_1014_image_frame_158.png", "ct_train_1017_image_frame_258.png", "ct_train_1017_image_frame_167.png", "ct_train_1001_image_frame_198.png", "ct_train_1014_image_frame_5.png", "ct_train_1006_image_frame_168.png", "ct_train_1020_image_frame_200.png", "ct_train_1020_image_frame_162.png", "ct_train_1011_image_frame_178.png", "ct_train_1017_image_frame_52.png", "ct_train_1009_image_frame_281.png", "ct_train_1003_image_frame_45.png", "ct_train_1006_image_frame_78.png", "ct_train_1003_image_frame_121.png", "ct_train_1001_image_frame_173.png", "ct_train_1018_image_frame_3.png", "ct_train_1006_image_frame_122.png", "ct_train_1014_image_frame_132.png", "ct_train_1009_image_frame_46.png", "ct_train_1015_image_frame_56.png", "ct_train_1016_image_frame_99.png", "ct_train_1015_image_frame_172.png", "ct_train_1014_image_frame_176.png", "ct_train_1020_image_frame_158.png", "ct_train_1020_image_frame_186.png", "ct_train_1015_image_frame_92.png", "ct_train_1005_image_frame_168.png", "ct_train_1003_image_frame_200.png", "ct_train_1016_image_frame_202.png", "ct_train_1020_image_frame_126.png", "ct_train_1011_image_frame_42.png", "ct_train_1016_image_frame_303.png", "ct_train_1001_image_frame_269.png", "ct_train_1001_image_frame_33.png", "ct_train_1020_image_frame_54.png", "ct_train_1017_image_frame_27.png", "ct_train_1018_image_frame_81.png", "ct_train_1015_image_frame_211.png", "ct_train_1009_image_frame_5.png", "ct_train_1015_image_frame_191.png", "ct_train_1017_image_frame_103.png", "ct_train_1006_image_frame_199.png", "ct_train_1014_image_frame_182.png", "ct_train_1008_image_frame_11.png", "ct_train_1011_image_frame_33.png", "ct_train_1018_image_frame_60.png", "ct_train_1015_image_frame_58.png", "ct_train_1008_image_frame_95.png", "ct_train_1018_image_frame_35.png", "ct_train_1020_image_frame_238.png", "ct_train_1009_image_frame_145.png", "ct_train_1017_image_frame_185.png", "ct_train_1001_image_frame_62.png", "ct_train_1006_image_frame_75.png", "ct_train_1014_image_frame_252.png", "ct_train_1009_image_frame_290.png", "ct_train_1008_image_frame_80.png", "ct_train_1016_image_frame_68.png", "ct_train_1018_image_frame_104.png", "ct_train_1014_image_frame_329.png", "ct_train_1001_image_frame_277.png", "ct_train_1014_image_frame_273.png", "ct_train_1011_image_frame_23.png", "ct_train_1011_image_frame_195.png", "ct_train_1009_image_frame_48.png", "ct_train_1005_image_frame_118.png", "ct_train_1016_image_frame_193.png", "ct_train_1020_image_frame_11.png", "ct_train_1018_image_frame_121.png", "ct_train_1014_image_frame_87.png", "ct_train_1001_image_frame_26.png", "ct_train_1020_image_frame_155.png", "ct_train_1001_image_frame_332.png", "ct_train_1017_image_frame_46.png", "ct_train_1009_image_frame_185.png", "ct_train_1006_image_frame_120.png", "ct_train_1011_image_frame_115.png", "ct_train_1011_image_frame_133.png", "ct_train_1009_image_frame_179.png", "ct_train_1001_image_frame_303.png", "ct_train_1020_image_frame_349.png", "ct_train_1014_image_frame_225.png", "ct_train_1011_image_frame_84.png", "ct_train_1020_image_frame_357.png", "ct_train_1018_image_frame_179.png", "ct_train_1020_image_frame_277.png", "ct_train_1008_image_frame_67.png", "ct_train_1009_image_frame_241.png", "ct_train_1006_image_frame_1.png", "ct_train_1014_image_frame_265.png", "ct_train_1016_image_frame_306.png", "ct_train_1005_image_frame_133.png", "ct_train_1020_image_frame_125.png", "ct_train_1015_image_frame_168.png", "ct_train_1016_image_frame_192.png", "ct_train_1015_image_frame_59.png", "ct_train_1009_image_frame_60.png", "ct_train_1017_image_frame_234.png", "ct_train_1015_image_frame_40.png", "ct_train_1011_image_frame_2.png", "ct_train_1001_image_frame_86.png", "ct_train_1016_image_frame_236.png", "ct_train_1015_image_frame_226.png", "ct_train_1009_image_frame_203.png", "ct_train_1015_image_frame_89.png", "ct_train_1015_image_frame_82.png", "ct_train_1020_image_frame_257.png", "ct_train_1011_image_frame_168.png", "ct_train_1020_image_frame_72.png", "ct_train_1017_image_frame_29.png", "ct_train_1001_image_frame_153.png", "ct_train_1016_image_frame_207.png", "ct_train_1018_image_frame_168.png", "ct_train_1009_image_frame_56.png", "ct_train_1005_image_frame_53.png", "ct_train_1018_image_frame_28.png", "ct_train_1008_image_frame_145.png", "ct_train_1017_image_frame_135.png", "ct_train_1014_image_frame_73.png", "ct_train_1018_image_frame_135.png", "ct_train_1006_image_frame_28.png", "ct_train_1005_image_frame_65.png", "ct_train_1020_image_frame_151.png", "ct_train_1018_image_frame_6.png", "ct_train_1020_image_frame_346.png", "ct_train_1018_image_frame_184.png", "ct_train_1014_image_frame_231.png", "ct_train_1014_image_frame_280.png", "ct_train_1018_image_frame_63.png", "ct_train_1016_image_frame_130.png", "ct_train_1003_image_frame_168.png", "ct_train_1015_image_frame_235.png", "ct_train_1016_image_frame_53.png", "ct_train_1014_image_frame_15.png", "ct_train_1018_image_frame_61.png", "ct_train_1016_image_frame_10.png", "ct_train_1016_image_frame_287.png", "ct_train_1017_image_frame_170.png", "ct_train_1006_image_frame_182.png", "ct_train_1003_image_frame_89.png", "ct_train_1003_image_frame_136.png", "ct_train_1009_image_frame_252.png", "ct_train_1020_image_frame_219.png", "ct_train_1015_image_frame_217.png", "ct_train_1015_image_frame_222.png", "ct_train_1015_image_frame_76.png", "ct_train_1014_image_frame_154.png", "ct_train_1020_image_frame_222.png", "ct_train_1006_image_frame_170.png", "ct_train_1014_image_frame_71.png", "ct_train_1011_image_frame_122.png", "ct_train_1020_image_frame_124.png", "ct_train_1014_image_frame_332.png", "ct_train_1005_image_frame_98.png", "ct_train_1016_image_frame_316.png", "ct_train_1015_image_frame_263.png", "ct_train_1011_image_frame_138.png", "ct_train_1009_image_frame_84.png", "ct_train_1014_image_frame_167.png", "ct_train_1018_image_frame_98.png", "ct_train_1008_image_frame_16.png", "ct_train_1020_image_frame_313.png", "ct_train_1001_image_frame_281.png", "ct_train_1020_image_frame_139.png", "ct_train_1014_image_frame_163.png", "ct_train_1015_image_frame_196.png", "ct_train_1011_image_frame_9.png", "ct_train_1006_image_frame_137.png", "ct_train_1014_image_frame_79.png", "ct_train_1020_image_frame_17.png", "ct_train_1017_image_frame_186.png", "ct_train_1011_image_frame_125.png", "ct_train_1016_image_frame_250.png", "ct_train_1006_image_frame_179.png", "ct_train_1001_image_frame_81.png", "ct_train_1016_image_frame_24.png", "ct_train_1014_image_frame_357.png", "ct_train_1003_image_frame_114.png", "ct_train_1018_image_frame_112.png", "ct_train_1011_image_frame_0.png", "ct_train_1006_image_frame_73.png", "ct_train_1009_image_frame_232.png", "ct_train_1001_image_frame_25.png", "ct_train_1001_image_frame_82.png", "ct_train_1009_image_frame_175.png", "ct_train_1020_image_frame_156.png", "ct_train_1009_image_frame_107.png", "ct_train_1003_image_frame_122.png", "ct_train_1015_image_frame_182.png", "ct_train_1015_image_frame_97.png", "ct_train_1014_image_frame_305.png", "ct_train_1005_image_frame_149.png", "ct_train_1003_image_frame_5.png", "ct_train_1003_image_frame_36.png", "ct_train_1003_image_frame_145.png", "ct_train_1005_image_frame_137.png", "ct_train_1017_image_frame_59.png", "ct_train_1005_image_frame_86.png", "ct_train_1017_image_frame_151.png", "ct_train_1014_image_frame_72.png", "ct_train_1011_image_frame_95.png", "ct_train_1001_image_frame_316.png", "ct_train_1011_image_frame_229.png", "ct_train_1015_image_frame_267.png", "ct_train_1015_image_frame_166.png", "ct_train_1001_image_frame_305.png", "ct_train_1014_image_frame_349.png", "ct_train_1015_image_frame_69.png", "ct_train_1003_image_frame_247.png", "ct_train_1020_image_frame_291.png", "ct_train_1006_image_frame_184.png", "ct_train_1008_image_frame_140.png", "ct_train_1001_image_frame_5.png", "ct_train_1016_image_frame_80.png", "ct_train_1014_image_frame_204.png", "ct_train_1017_image_frame_78.png", "ct_train_1014_image_frame_214.png", "ct_train_1011_image_frame_211.png", "ct_train_1015_image_frame_55.png", "ct_train_1015_image_frame_140.png", "ct_train_1016_image_frame_36.png", "ct_train_1020_image_frame_35.png", "ct_train_1006_image_frame_85.png", "ct_train_1020_image_frame_0.png", "ct_train_1016_image_frame_172.png", "ct_train_1020_image_frame_240.png", "ct_train_1005_image_frame_131.png", "ct_train_1011_image_frame_236.png", "ct_train_1016_image_frame_111.png", "ct_train_1017_image_frame_164.png", "ct_train_1017_image_frame_6.png", "ct_train_1009_image_frame_72.png", "ct_train_1016_image_frame_310.png", "ct_train_1006_image_frame_135.png", "ct_train_1015_image_frame_30.png", "ct_train_1015_image_frame_249.png", "ct_train_1003_image_frame_189.png", "ct_train_1006_image_frame_225.png", "ct_train_1008_image_frame_189.png", "ct_train_1017_image_frame_54.png", "ct_train_1001_image_frame_109.png", "ct_train_1008_image_frame_40.png", "ct_train_1020_image_frame_226.png", "ct_train_1005_image_frame_19.png", "ct_train_1017_image_frame_177.png", "ct_train_1006_image_frame_81.png", "ct_train_1020_image_frame_217.png", "ct_train_1015_image_frame_231.png", "ct_train_1009_image_frame_37.png", "ct_train_1001_image_frame_206.png", "ct_train_1014_image_frame_230.png", "ct_train_1014_image_frame_144.png", "ct_train_1015_image_frame_70.png", "ct_train_1014_image_frame_209.png", "ct_train_1006_image_frame_57.png", "ct_train_1003_image_frame_140.png", "ct_train_1011_image_frame_61.png", "ct_train_1018_image_frame_133.png", "ct_train_1017_image_frame_105.png", "ct_train_1016_image_frame_75.png", "ct_train_1003_image_frame_221.png", "ct_train_1008_image_frame_105.png", "ct_train_1009_image_frame_87.png", "ct_train_1020_image_frame_53.png", "ct_train_1011_image_frame_143.png", "ct_train_1008_image_frame_178.png", "ct_train_1011_image_frame_99.png", "ct_train_1020_image_frame_310.png", "ct_train_1001_image_frame_310.png", "ct_train_1011_image_frame_72.png", "ct_train_1014_image_frame_320.png", "ct_train_1001_image_frame_355.png", "ct_train_1016_image_frame_82.png", "ct_train_1001_image_frame_61.png", "ct_train_1018_image_frame_176.png", "ct_train_1003_image_frame_8.png", "ct_train_1011_image_frame_12.png", "ct_train_1017_image_frame_129.png", "ct_train_1018_image_frame_2.png", "ct_train_1001_image_frame_232.png", "ct_train_1016_image_frame_175.png", "ct_train_1014_image_frame_155.png", "ct_train_1015_image_frame_42.png", "ct_train_1015_image_frame_276.png", "ct_train_1014_image_frame_123.png", "ct_train_1008_image_frame_91.png", "ct_train_1011_image_frame_194.png", "ct_train_1014_image_frame_86.png", "ct_train_1001_image_frame_272.png", "ct_train_1020_image_frame_121.png", "ct_train_1008_image_frame_15.png", "ct_train_1018_image_frame_36.png", "ct_train_1015_image_frame_134.png", "ct_train_1011_image_frame_82.png", "ct_train_1001_image_frame_165.png", "ct_train_1018_image_frame_186.png", "ct_train_1016_image_frame_146.png", "ct_train_1018_image_frame_145.png", "ct_train_1003_image_frame_17.png", "ct_train_1016_image_frame_297.png", "ct_train_1014_image_frame_40.png", "ct_train_1003_image_frame_235.png", "ct_train_1009_image_frame_216.png", "ct_train_1014_image_frame_52.png", "ct_train_1001_image_frame_34.png", "ct_train_1001_image_frame_337.png", "ct_train_1003_image_frame_236.png", "ct_train_1016_image_frame_322.png", "ct_train_1016_image_frame_283.png", "ct_train_1011_image_frame_205.png", "ct_train_1015_image_frame_68.png", "ct_train_1016_image_frame_282.png", "ct_train_1015_image_frame_174.png", "ct_train_1003_image_frame_113.png", "ct_train_1015_image_frame_116.png", "ct_train_1003_image_frame_276.png", "ct_train_1018_image_frame_5.png", "ct_train_1020_image_frame_137.png", "ct_train_1009_image_frame_62.png", "ct_train_1006_image_frame_94.png", "ct_train_1017_image_frame_254.png", "ct_train_1001_image_frame_248.png", "ct_train_1020_image_frame_30.png", "ct_train_1015_image_frame_39.png", "ct_train_1016_image_frame_76.png", "ct_train_1009_image_frame_91.png", "ct_train_1020_image_frame_268.png", "ct_train_1003_image_frame_183.png", "ct_train_1020_image_frame_345.png", "ct_train_1006_image_frame_234.png", "ct_train_1008_image_frame_146.png", "ct_train_1016_image_frame_243.png", "ct_train_1008_image_frame_111.png", "ct_train_1020_image_frame_4.png", "ct_train_1005_image_frame_121.png", "ct_train_1020_image_frame_360.png", "ct_train_1015_image_frame_53.png", "ct_train_1017_image_frame_251.png", "ct_train_1009_image_frame_11.png", "ct_train_1005_image_frame_39.png", "ct_train_1017_image_frame_37.png", "ct_train_1020_image_frame_343.png", "ct_train_1017_image_frame_154.png", "ct_train_1016_image_frame_223.png", "ct_train_1014_image_frame_48.png", "ct_train_1001_image_frame_301.png", "ct_train_1008_image_frame_148.png", "ct_train_1018_image_frame_29.png", "ct_train_1016_image_frame_201.png", "ct_train_1020_image_frame_152.png", "ct_train_1001_image_frame_46.png", "ct_train_1017_image_frame_69.png", "ct_train_1017_image_frame_246.png", "ct_train_1020_image_frame_93.png", "ct_train_1005_image_frame_77.png", "ct_train_1020_image_frame_23.png", "ct_train_1011_image_frame_112.png", "ct_train_1016_image_frame_156.png", "ct_train_1006_image_frame_195.png", "ct_train_1014_image_frame_29.png", "ct_train_1005_image_frame_27.png", "ct_train_1001_image_frame_199.png", "ct_train_1006_image_frame_244.png", "ct_train_1001_image_frame_0.png", "ct_train_1001_image_frame_66.png", "ct_train_1006_image_frame_223.png", "ct_train_1020_image_frame_25.png", "ct_train_1017_image_frame_205.png", "ct_train_1011_image_frame_180.png", "ct_train_1009_image_frame_173.png", "ct_train_1009_image_frame_163.png", "ct_train_1006_image_frame_112.png", "ct_train_1014_image_frame_126.png", "ct_train_1014_image_frame_66.png", "ct_train_1018_image_frame_43.png", "ct_train_1015_image_frame_283.png", "ct_train_1009_image_frame_0.png", "ct_train_1001_image_frame_150.png", "ct_train_1018_image_frame_161.png", "ct_train_1005_image_frame_114.png", "ct_train_1006_image_frame_156.png", "ct_train_1009_image_frame_152.png", "ct_train_1015_image_frame_285.png", "ct_train_1016_image_frame_216.png", "ct_train_1016_image_frame_59.png", "ct_train_1008_image_frame_206.png", "ct_train_1020_image_frame_157.png", "ct_train_1003_image_frame_32.png", "ct_train_1001_image_frame_252.png", "ct_train_1018_image_frame_100.png", "ct_train_1003_image_frame_103.png", "ct_train_1020_image_frame_105.png", "ct_train_1014_image_frame_229.png", "ct_train_1020_image_frame_63.png", "ct_train_1006_image_frame_233.png", "ct_train_1009_image_frame_223.png", "ct_train_1014_image_frame_180.png", "ct_train_1020_image_frame_320.png", "ct_train_1015_image_frame_173.png", "ct_train_1015_image_frame_112.png", "ct_train_1003_image_frame_210.png", "ct_train_1016_image_frame_126.png", "ct_train_1006_image_frame_180.png", "ct_train_1015_image_frame_99.png", "ct_train_1017_image_frame_192.png", "ct_train_1008_image_frame_38.png", "ct_train_1003_image_frame_124.png", "ct_train_1005_image_frame_26.png", "ct_train_1003_image_frame_112.png", "ct_train_1011_image_frame_185.png", "ct_train_1001_image_frame_131.png", "ct_train_1018_image_frame_147.png", "ct_train_1016_image_frame_180.png", "ct_train_1006_image_frame_88.png", "ct_train_1003_image_frame_232.png", "ct_train_1011_image_frame_183.png", "ct_train_1016_image_frame_160.png", "ct_train_1014_image_frame_173.png", "ct_train_1015_image_frame_175.png", "ct_train_1001_image_frame_77.png", "ct_train_1014_image_frame_319.png", "ct_train_1008_image_frame_94.png", "ct_train_1003_image_frame_264.png", "ct_train_1011_image_frame_161.png", "ct_train_1014_image_frame_109.png", "ct_train_1017_image_frame_269.png", "ct_train_1015_image_frame_78.png", "ct_train_1003_image_frame_1.png", "ct_train_1009_image_frame_197.png", "ct_train_1017_image_frame_153.png", "ct_train_1020_image_frame_178.png", "ct_train_1017_image_frame_172.png", "ct_train_1009_image_frame_190.png", "ct_train_1008_image_frame_24.png", "ct_train_1016_image_frame_170.png", "ct_train_1008_image_frame_54.png", "ct_train_1017_image_frame_72.png", "ct_train_1014_image_frame_125.png", "ct_train_1014_image_frame_148.png", "ct_train_1020_image_frame_341.png", "ct_train_1014_image_frame_267.png", "ct_train_1005_image_frame_132.png", "ct_train_1003_image_frame_211.png", "ct_train_1016_image_frame_31.png", "ct_train_1011_image_frame_21.png", "ct_train_1009_image_frame_50.png", "ct_train_1009_image_frame_129.png", "ct_train_1015_image_frame_63.png", "ct_train_1016_image_frame_94.png", "ct_train_1014_image_frame_206.png", "ct_train_1017_image_frame_178.png", "ct_train_1014_image_frame_31.png", "ct_train_1014_image_frame_59.png", "ct_train_1017_image_frame_104.png", "ct_train_1018_image_frame_68.png", "ct_train_1020_image_frame_250.png", "ct_train_1016_image_frame_257.png", "ct_train_1005_image_frame_124.png", "ct_train_1003_image_frame_226.png", "ct_train_1006_image_frame_51.png", "ct_train_1011_image_frame_41.png", "ct_train_1003_image_frame_30.png", "ct_train_1006_image_frame_136.png", "ct_train_1015_image_frame_121.png", "ct_train_1018_image_frame_118.png", "ct_train_1016_image_frame_52.png", "ct_train_1014_image_frame_283.png", "ct_train_1008_image_frame_22.png", "ct_train_1006_image_frame_157.png", "ct_train_1008_image_frame_106.png", "ct_train_1011_image_frame_106.png", "ct_train_1008_image_frame_143.png", "ct_train_1011_image_frame_88.png", "ct_train_1008_image_frame_12.png", "ct_train_1016_image_frame_34.png", "ct_train_1018_image_frame_88.png", "ct_train_1009_image_frame_262.png", "ct_train_1003_image_frame_273.png", "ct_train_1015_image_frame_205.png", "ct_train_1008_image_frame_66.png", "ct_train_1011_image_frame_31.png", "ct_train_1015_image_frame_225.png", "ct_train_1003_image_frame_176.png", "ct_train_1017_image_frame_173.png", "ct_train_1014_image_frame_90.png", "ct_train_1015_image_frame_179.png", "ct_train_1006_image_frame_40.png", "ct_train_1020_image_frame_62.png", "ct_train_1014_image_frame_208.png", "ct_train_1011_image_frame_146.png", "ct_train_1011_image_frame_173.png", "ct_train_1003_image_frame_172.png", "ct_train_1015_image_frame_66.png", "ct_train_1015_image_frame_215.png", "ct_train_1020_image_frame_79.png", "ct_train_1020_image_frame_354.png", "ct_train_1020_image_frame_127.png", "ct_train_1001_image_frame_284.png", "ct_train_1003_image_frame_248.png", "ct_train_1001_image_frame_117.png", "ct_train_1017_image_frame_175.png", "ct_train_1003_image_frame_73.png", "ct_train_1003_image_frame_175.png", "ct_train_1016_image_frame_301.png", "ct_train_1009_image_frame_21.png", "ct_train_1015_image_frame_4.png", "ct_train_1003_image_frame_152.png", "ct_train_1003_image_frame_173.png", "ct_train_1001_image_frame_148.png", "ct_train_1006_image_frame_4.png", "ct_train_1016_image_frame_49.png", "ct_train_1001_image_frame_311.png", "ct_train_1001_image_frame_210.png", "ct_train_1014_image_frame_44.png", "ct_train_1001_image_frame_21.png", "ct_train_1003_image_frame_35.png", "ct_train_1011_image_frame_189.png", "ct_train_1003_image_frame_56.png", "ct_train_1008_image_frame_41.png", "ct_train_1020_image_frame_75.png", "ct_train_1001_image_frame_344.png", "ct_train_1008_image_frame_19.png", "ct_train_1003_image_frame_108.png", "ct_train_1009_image_frame_122.png", "ct_train_1020_image_frame_69.png", "ct_train_1016_image_frame_249.png", "ct_train_1020_image_frame_16.png", "ct_train_1009_image_frame_211.png", "ct_train_1014_image_frame_228.png", "ct_train_1014_image_frame_327.png", "ct_train_1015_image_frame_171.png", "ct_train_1015_image_frame_32.png", "ct_train_1006_image_frame_208.png", "ct_train_1017_image_frame_90.png", "ct_train_1014_image_frame_199.png", "ct_train_1008_image_frame_89.png", "ct_train_1005_image_frame_159.png", "ct_train_1016_image_frame_45.png", "ct_train_1006_image_frame_186.png", "ct_train_1008_image_frame_130.png", "ct_train_1016_image_frame_64.png", "ct_train_1014_image_frame_288.png", "ct_train_1020_image_frame_355.png", "ct_train_1011_image_frame_157.png", "ct_train_1017_image_frame_275.png", "ct_train_1017_image_frame_93.png", "ct_train_1009_image_frame_224.png", "ct_train_1017_image_frame_174.png", "ct_train_1015_image_frame_20.png", "ct_train_1005_image_frame_46.png", "ct_train_1016_image_frame_28.png", "ct_train_1016_image_frame_144.png", "ct_train_1020_image_frame_336.png", "ct_train_1017_image_frame_157.png", "ct_train_1008_image_frame_29.png", "ct_train_1005_image_frame_110.png", "ct_train_1018_image_frame_12.png", "ct_train_1003_image_frame_185.png", "ct_train_1020_image_frame_326.png", "ct_train_1017_image_frame_231.png", "ct_train_1006_image_frame_30.png", "ct_train_1001_image_frame_312.png", "ct_train_1014_image_frame_295.png", "ct_train_1001_image_frame_319.png", "ct_train_1001_image_frame_163.png", "ct_train_1008_image_frame_126.png", "ct_train_1014_image_frame_310.png", "ct_train_1020_image_frame_86.png", "ct_train_1001_image_frame_180.png", "ct_train_1001_image_frame_149.png", "ct_train_1014_image_frame_296.png", "ct_train_1009_image_frame_289.png", "ct_train_1008_image_frame_150.png", "ct_train_1009_image_frame_143.png", "ct_train_1016_image_frame_121.png", "ct_train_1005_image_frame_148.png", "ct_train_1009_image_frame_268.png", "ct_train_1017_image_frame_257.png", "ct_train_1015_image_frame_101.png", "ct_train_1003_image_frame_21.png", "ct_train_1018_image_frame_15.png", "ct_train_1018_image_frame_50.png", "ct_train_1006_image_frame_107.png", "ct_train_1005_image_frame_4.png", "ct_train_1014_image_frame_200.png", "ct_train_1009_image_frame_254.png", "ct_train_1001_image_frame_343.png", "ct_train_1006_image_frame_242.png", "ct_train_1011_image_frame_52.png", "ct_train_1016_image_frame_262.png", "ct_train_1005_image_frame_20.png", "ct_train_1014_image_frame_307.png", "ct_train_1001_image_frame_15.png", "ct_train_1003_image_frame_229.png", "ct_train_1016_image_frame_269.png", "ct_train_1008_image_frame_182.png", "ct_train_1005_image_frame_47.png", "ct_train_1016_image_frame_101.png", "ct_train_1011_image_frame_139.png", "ct_train_1020_image_frame_42.png", "ct_train_1020_image_frame_362.png", "ct_train_1009_image_frame_98.png", "ct_train_1015_image_frame_80.png", "ct_train_1005_image_frame_57.png", "ct_train_1018_image_frame_107.png", "ct_train_1001_image_frame_328.png", "ct_train_1003_image_frame_258.png", "ct_train_1014_image_frame_213.png", "ct_train_1006_image_frame_33.png", "ct_train_1008_image_frame_118.png", "ct_train_1011_image_frame_35.png", "ct_train_1015_image_frame_289.png", "ct_train_1005_image_frame_62.png", "ct_train_1017_image_frame_56.png", "ct_train_1005_image_frame_21.png", "ct_train_1014_image_frame_345.png", "ct_train_1016_image_frame_258.png", "ct_train_1016_image_frame_51.png", "ct_train_1015_image_frame_123.png", "ct_train_1003_image_frame_65.png", "ct_train_1003_image_frame_238.png", "ct_train_1014_image_frame_290.png", "ct_train_1006_image_frame_174.png", "ct_train_1003_image_frame_160.png", "ct_train_1011_image_frame_202.png", "ct_train_1014_image_frame_114.png", "ct_train_1016_image_frame_292.png", "ct_train_1001_image_frame_155.png", "ct_train_1009_image_frame_231.png", "ct_train_1020_image_frame_128.png", "ct_train_1016_image_frame_70.png", "ct_train_1001_image_frame_283.png", "ct_train_1006_image_frame_143.png", "ct_train_1016_image_frame_123.png", "ct_train_1017_image_frame_263.png", "ct_train_1017_image_frame_146.png", "ct_train_1009_image_frame_258.png", "ct_train_1008_image_frame_152.png", "ct_train_1009_image_frame_137.png", "ct_train_1016_image_frame_290.png", "ct_train_1005_image_frame_127.png", "ct_train_1001_image_frame_302.png", "ct_train_1011_image_frame_186.png", "ct_train_1008_image_frame_99.png", "ct_train_1006_image_frame_36.png", "ct_train_1014_image_frame_184.png", "ct_train_1006_image_frame_172.png", "ct_train_1006_image_frame_165.png", "ct_train_1017_image_frame_195.png", "ct_train_1001_image_frame_285.png", "ct_train_1005_image_frame_130.png", "ct_train_1017_image_frame_156.png", "ct_train_1017_image_frame_17.png", "ct_train_1009_image_frame_279.png", "ct_train_1003_image_frame_216.png", "ct_train_1001_image_frame_97.png", "ct_train_1018_image_frame_90.png", "ct_train_1014_image_frame_172.png", "ct_train_1016_image_frame_168.png", "ct_train_1006_image_frame_231.png", "ct_train_1009_image_frame_82.png", "ct_train_1017_image_frame_248.png", "ct_train_1017_image_frame_214.png", "ct_train_1001_image_frame_299.png", "ct_train_1015_image_frame_200.png", "ct_train_1017_image_frame_239.png", "ct_train_1016_image_frame_119.png", "ct_train_1001_image_frame_70.png", "ct_train_1020_image_frame_14.png", "ct_train_1003_image_frame_177.png", "ct_train_1003_image_frame_155.png", "ct_train_1018_image_frame_113.png", "ct_train_1001_image_frame_73.png", "ct_train_1020_image_frame_84.png", "ct_train_1001_image_frame_293.png", "ct_train_1016_image_frame_295.png", "ct_train_1006_image_frame_150.png", "ct_train_1001_image_frame_140.png", "ct_train_1001_image_frame_51.png", "ct_train_1001_image_frame_13.png", "ct_train_1016_image_frame_273.png", "ct_train_1015_image_frame_100.png", "ct_train_1001_image_frame_19.png", "ct_train_1018_image_frame_34.png", "ct_train_1014_image_frame_134.png", "ct_train_1005_image_frame_68.png", "ct_train_1009_image_frame_18.png", "ct_train_1006_image_frame_224.png", "ct_train_1006_image_frame_13.png", "ct_train_1001_image_frame_169.png", "ct_train_1011_image_frame_124.png", "ct_train_1008_image_frame_97.png", "ct_train_1003_image_frame_255.png", "ct_train_1008_image_frame_112.png", "ct_train_1009_image_frame_261.png", "ct_train_1020_image_frame_95.png", "ct_train_1020_image_frame_204.png", "ct_train_1020_image_frame_196.png", "ct_train_1009_image_frame_114.png", "ct_train_1003_image_frame_245.png", "ct_train_1003_image_frame_42.png", "ct_train_1014_image_frame_47.png", "ct_train_1003_image_frame_71.png", "ct_train_1005_image_frame_30.png", "ct_train_1008_image_frame_18.png", "ct_train_1020_image_frame_46.png", "ct_train_1018_image_frame_138.png", "ct_train_1006_image_frame_65.png", "ct_train_1015_image_frame_241.png", "ct_train_1001_image_frame_161.png", "ct_train_1001_image_frame_23.png", "ct_train_1014_image_frame_140.png", "ct_train_1005_image_frame_116.png", "ct_train_1016_image_frame_238.png", "ct_train_1003_image_frame_187.png", "ct_train_1015_image_frame_144.png", "ct_train_1009_image_frame_141.png", "ct_train_1001_image_frame_320.png", "ct_train_1011_image_frame_219.png", "ct_train_1001_image_frame_200.png", "ct_train_1014_image_frame_348.png", "ct_train_1003_image_frame_252.png", "ct_train_1016_image_frame_276.png", "ct_train_1001_image_frame_41.png", "ct_train_1018_image_frame_10.png", "ct_train_1001_image_frame_88.png", "ct_train_1015_image_frame_85.png", "ct_train_1016_image_frame_154.png", "ct_train_1009_image_frame_202.png", "ct_train_1003_image_frame_59.png", "ct_train_1020_image_frame_149.png", "ct_train_1009_image_frame_28.png", "ct_train_1016_image_frame_131.png", "ct_train_1017_image_frame_262.png", "ct_train_1015_image_frame_156.png", "ct_train_1008_image_frame_170.png", "ct_train_1001_image_frame_236.png", "ct_train_1001_image_frame_162.png", "ct_train_1014_image_frame_315.png", "ct_train_1003_image_frame_84.png", "ct_train_1011_image_frame_34.png", "ct_train_1018_image_frame_44.png", "ct_train_1011_image_frame_131.png", "ct_train_1008_image_frame_10.png", "ct_train_1011_image_frame_114.png", "ct_train_1003_image_frame_125.png", "ct_train_1009_image_frame_68.png", "ct_train_1020_image_frame_44.png", "ct_train_1003_image_frame_178.png", "ct_train_1008_image_frame_7.png", "ct_train_1009_image_frame_270.png", "ct_train_1011_image_frame_231.png", "ct_train_1020_image_frame_287.png", "ct_train_1016_image_frame_313.png", "ct_train_1001_image_frame_124.png", "ct_train_1016_image_frame_280.png", "ct_train_1015_image_frame_110.png", "ct_train_1014_image_frame_84.png", "ct_train_1006_image_frame_145.png", "ct_train_1017_image_frame_36.png", "ct_train_1015_image_frame_157.png", "ct_train_1008_image_frame_2.png", "ct_train_1017_image_frame_41.png", "ct_train_1011_image_frame_184.png", "ct_train_1001_image_frame_42.png", "ct_train_1017_image_frame_264.png", "ct_train_1017_image_frame_183.png", "ct_train_1001_image_frame_287.png", "ct_train_1011_image_frame_151.png", "ct_train_1017_image_frame_223.png", "ct_train_1003_image_frame_242.png", "ct_train_1020_image_frame_224.png", "ct_train_1018_image_frame_96.png", "ct_train_1006_image_frame_72.png", "ct_train_1001_image_frame_108.png", "ct_train_1016_image_frame_164.png", "ct_train_1020_image_frame_246.png", "ct_train_1001_image_frame_347.png", "ct_train_1008_image_frame_70.png", "ct_train_1014_image_frame_335.png", "ct_train_1014_image_frame_309.png", "ct_train_1018_image_frame_119.png", "ct_train_1001_image_frame_22.png", "ct_train_1008_image_frame_134.png", "ct_train_1016_image_frame_304.png", "ct_train_1003_image_frame_286.png", "ct_train_1011_image_frame_193.png", "ct_train_1014_image_frame_232.png", "ct_train_1020_image_frame_176.png", "ct_train_1008_image_frame_124.png", "ct_train_1015_image_frame_210.png", "ct_train_1003_image_frame_6.png", "ct_train_1016_image_frame_15.png", "ct_train_1020_image_frame_202.png", "ct_train_1018_image_frame_22.png", "ct_train_1011_image_frame_67.png", "ct_train_1008_image_frame_199.png", "ct_train_1003_image_frame_44.png", "ct_train_1020_image_frame_8.png", "ct_train_1001_image_frame_44.png", "ct_train_1016_image_frame_281.png", "ct_train_1006_image_frame_220.png", "ct_train_1001_image_frame_145.png", "ct_train_1017_image_frame_219.png", "ct_train_1011_image_frame_154.png", "ct_train_1006_image_frame_77.png", "ct_train_1008_image_frame_79.png", "ct_train_1020_image_frame_67.png", "ct_train_1016_image_frame_252.png", "ct_train_1005_image_frame_49.png", "ct_train_1017_image_frame_119.png", "ct_train_1018_image_frame_116.png", "ct_train_1017_image_frame_50.png", "ct_train_1001_image_frame_288.png", "ct_train_1017_image_frame_67.png", "ct_train_1015_image_frame_147.png", "ct_train_1001_image_frame_85.png", "ct_train_1001_image_frame_202.png", "ct_train_1001_image_frame_28.png", "ct_train_1017_image_frame_203.png", "ct_train_1009_image_frame_75.png", "ct_train_1003_image_frame_146.png", "ct_train_1020_image_frame_184.png", "ct_train_1017_image_frame_66.png", "ct_train_1018_image_frame_92.png", "ct_train_1003_image_frame_159.png", "ct_train_1015_image_frame_271.png", "ct_train_1015_image_frame_233.png", "ct_train_1008_image_frame_14.png", "ct_train_1008_image_frame_195.png", "ct_train_1014_image_frame_136.png", "ct_train_1005_image_frame_64.png", "ct_train_1011_image_frame_145.png", "ct_train_1014_image_frame_240.png", "ct_train_1006_image_frame_192.png", "ct_train_1018_image_frame_137.png", "ct_train_1018_image_frame_11.png", "ct_train_1009_image_frame_113.png", "ct_train_1008_image_frame_52.png", "ct_train_1018_image_frame_37.png", "ct_train_1018_image_frame_26.png", "ct_train_1016_image_frame_210.png", "ct_train_1017_image_frame_182.png", "ct_train_1015_image_frame_51.png", "ct_train_1008_image_frame_102.png", "ct_train_1016_image_frame_140.png", "ct_train_1009_image_frame_291.png", "ct_train_1011_image_frame_80.png", "ct_train_1003_image_frame_227.png", "ct_train_1011_image_frame_159.png", "ct_train_1014_image_frame_291.png", "ct_train_1001_image_frame_120.png", "ct_train_1001_image_frame_246.png", "ct_train_1014_image_frame_150.png", "ct_train_1005_image_frame_158.png", "ct_train_1005_image_frame_13.png", "ct_train_1011_image_frame_171.png", "ct_train_1020_image_frame_358.png", "ct_train_1016_image_frame_32.png", "ct_train_1006_image_frame_5.png", "ct_train_1020_image_frame_342.png", "ct_train_1014_image_frame_11.png", "ct_train_1014_image_frame_233.png", "ct_train_1017_image_frame_122.png", "ct_train_1009_image_frame_219.png", "ct_train_1016_image_frame_171.png", "ct_train_1003_image_frame_150.png", "ct_train_1015_image_frame_130.png", "ct_train_1017_image_frame_267.png", "ct_train_1020_image_frame_187.png", "ct_train_1020_image_frame_221.png", "ct_train_1015_image_frame_218.png", "ct_train_1009_image_frame_287.png", "ct_train_1020_image_frame_51.png", "ct_train_1005_image_frame_8.png", "ct_train_1003_image_frame_277.png", "ct_train_1016_image_frame_204.png", "ct_train_1009_image_frame_16.png", "ct_train_1014_image_frame_24.png", "ct_train_1017_image_frame_247.png", "ct_train_1009_image_frame_76.png", "ct_train_1005_image_frame_76.png", "ct_train_1003_image_frame_37.png", "ct_train_1011_image_frame_140.png", "ct_train_1011_image_frame_29.png", "ct_train_1020_image_frame_273.png", "ct_train_1009_image_frame_184.png", "ct_train_1001_image_frame_154.png", "ct_train_1009_image_frame_259.png", "ct_train_1008_image_frame_13.png", "ct_train_1008_image_frame_64.png", "ct_train_1011_image_frame_182.png", "ct_train_1014_image_frame_257.png", "ct_train_1017_image_frame_40.png", "ct_train_1018_image_frame_101.png", "ct_train_1017_image_frame_255.png", "ct_train_1017_image_frame_109.png", "ct_train_1001_image_frame_112.png", "ct_train_1020_image_frame_324.png", "ct_train_1020_image_frame_201.png", "ct_train_1016_image_frame_240.png", "ct_train_1008_image_frame_104.png", "ct_train_1017_image_frame_136.png", "ct_train_1018_image_frame_54.png", "ct_train_1020_image_frame_88.png", "ct_train_1017_image_frame_70.png", "ct_train_1009_image_frame_23.png", "ct_train_1014_image_frame_189.png", "ct_train_1006_image_frame_43.png", "ct_train_1011_image_frame_17.png", "ct_train_1018_image_frame_0.png", "ct_train_1020_image_frame_109.png", "ct_train_1001_image_frame_348.png", "ct_train_1003_image_frame_207.png", "ct_train_1020_image_frame_7.png", "ct_train_1006_image_frame_116.png", "ct_train_1001_image_frame_196.png", "ct_train_1008_image_frame_158.png", "ct_train_1005_image_frame_51.png", "ct_train_1015_image_frame_250.png", "ct_train_1017_image_frame_216.png", "ct_train_1009_image_frame_101.png", "ct_train_1001_image_frame_325.png", "ct_train_1016_image_frame_35.png", "ct_train_1020_image_frame_1.png", "ct_train_1020_image_frame_281.png", "ct_train_1015_image_frame_221.png", "ct_train_1003_image_frame_243.png", "ct_train_1020_image_frame_211.png", "ct_train_1014_image_frame_341.png", "ct_train_1014_image_frame_103.png", "ct_train_1015_image_frame_24.png", "ct_train_1014_image_frame_35.png", "ct_train_1001_image_frame_317.png", "ct_train_1008_image_frame_160.png", "ct_train_1006_image_frame_97.png", "ct_train_1016_image_frame_251.png", "ct_train_1015_image_frame_159.png", "ct_train_1014_image_frame_57.png", "ct_train_1005_image_frame_63.png", "ct_train_1011_image_frame_93.png", "ct_train_1006_image_frame_106.png", "ct_train_1001_image_frame_251.png", "ct_train_1001_image_frame_113.png", "ct_train_1020_image_frame_314.png", "ct_train_1018_image_frame_69.png", "ct_train_1003_image_frame_104.png", "ct_train_1016_image_frame_57.png", "ct_train_1014_image_frame_10.png", "ct_train_1009_image_frame_242.png", "ct_train_1020_image_frame_210.png", "ct_train_1014_image_frame_82.png", "ct_train_1003_image_frame_24.png", "ct_train_1020_image_frame_329.png", "ct_train_1003_image_frame_116.png", "ct_train_1006_image_frame_105.png", "ct_train_1017_image_frame_0.png", "ct_train_1003_image_frame_259.png", "ct_train_1005_image_frame_41.png", "ct_train_1020_image_frame_33.png", "ct_train_1006_image_frame_38.png", "ct_train_1014_image_frame_141.png", "ct_train_1005_image_frame_85.png", "ct_train_1020_image_frame_203.png", "ct_train_1005_image_frame_56.png", "ct_train_1008_image_frame_109.png", "ct_train_1009_image_frame_198.png", "ct_train_1009_image_frame_88.png", "ct_train_1006_image_frame_42.png", "ct_train_1014_image_frame_7.png", "ct_train_1014_image_frame_85.png", "ct_train_1014_image_frame_175.png", "ct_train_1009_image_frame_2.png", "ct_train_1017_image_frame_181.png", "ct_train_1009_image_frame_71.png", "ct_train_1008_image_frame_3.png", "ct_train_1014_image_frame_131.png", "ct_train_1014_image_frame_294.png", "ct_train_1008_image_frame_157.png", "ct_train_1018_image_frame_115.png", "ct_train_1008_image_frame_107.png", "ct_train_1009_image_frame_126.png", "ct_train_1001_image_frame_237.png", "ct_train_1005_image_frame_52.png", "ct_train_1020_image_frame_129.png", "ct_train_1020_image_frame_275.png", "ct_train_1001_image_frame_211.png", "ct_train_1017_image_frame_265.png", "ct_train_1014_image_frame_239.png", "ct_train_1009_image_frame_49.png", "ct_train_1015_image_frame_133.png", "ct_train_1018_image_frame_177.png", "ct_train_1011_image_frame_210.png", "ct_train_1014_image_frame_16.png", "ct_train_1011_image_frame_220.png", "ct_train_1015_image_frame_192.png", "ct_train_1018_image_frame_79.png", "ct_train_1011_image_frame_73.png", "ct_train_1001_image_frame_55.png", "ct_train_1011_image_frame_167.png", "ct_train_1011_image_frame_230.png", "ct_train_1014_image_frame_145.png", "ct_train_1001_image_frame_340.png", "ct_train_1020_image_frame_59.png", "ct_train_1014_image_frame_95.png", "ct_train_1006_image_frame_48.png", "ct_train_1009_image_frame_106.png", "ct_train_1006_image_frame_32.png", "ct_train_1016_image_frame_256.png", "ct_train_1008_image_frame_185.png", "ct_train_1005_image_frame_74.png", "ct_train_1017_image_frame_200.png", "ct_train_1005_image_frame_11.png", "ct_train_1006_image_frame_247.png", "ct_train_1016_image_frame_206.png", "ct_train_1020_image_frame_197.png", "ct_train_1001_image_frame_240.png", "ct_train_1003_image_frame_34.png", "ct_train_1003_image_frame_33.png", "ct_train_1009_image_frame_131.png", "ct_train_1001_image_frame_274.png", "ct_train_1016_image_frame_43.png", "ct_train_1005_image_frame_91.png", "ct_train_1015_image_frame_269.png", "ct_train_1001_image_frame_40.png", "ct_train_1003_image_frame_85.png", "ct_train_1014_image_frame_14.png", "ct_train_1016_image_frame_265.png", "ct_train_1008_image_frame_177.png", "ct_train_1015_image_frame_12.png", "ct_train_1018_image_frame_94.png", "ct_train_1015_image_frame_33.png", "ct_train_1016_image_frame_55.png", "ct_train_1015_image_frame_281.png", "ct_train_1017_image_frame_209.png", "ct_train_1016_image_frame_254.png", "ct_train_1003_image_frame_181.png", "ct_train_1020_image_frame_73.png", "ct_train_1006_image_frame_193.png", "ct_train_1003_image_frame_263.png", "ct_train_1001_image_frame_110.png", "ct_train_1008_image_frame_156.png", "ct_train_1001_image_frame_135.png", "ct_train_1009_image_frame_134.png", "ct_train_1001_image_frame_63.png", "ct_train_1008_image_frame_155.png", "ct_train_1020_image_frame_135.png", "ct_train_1020_image_frame_191.png", "ct_train_1001_image_frame_58.png", "ct_train_1020_image_frame_260.png", "ct_train_1016_image_frame_237.png", "ct_train_1006_image_frame_212.png", "ct_train_1014_image_frame_115.png", "ct_train_1003_image_frame_79.png", "ct_train_1016_image_frame_187.png", "ct_train_1009_image_frame_228.png", "ct_train_1015_image_frame_268.png", "ct_train_1009_image_frame_124.png", "ct_train_1016_image_frame_112.png", "ct_train_1001_image_frame_60.png", "ct_train_1016_image_frame_299.png", "ct_train_1020_image_frame_225.png", "ct_train_1005_image_frame_126.png", "ct_train_1001_image_frame_338.png", "ct_train_1003_image_frame_268.png", "ct_train_1017_image_frame_100.png", "ct_train_1001_image_frame_314.png", "ct_train_1001_image_frame_142.png", "ct_train_1001_image_frame_280.png", "ct_train_1017_image_frame_193.png", "ct_train_1001_image_frame_295.png", "ct_train_1016_image_frame_3.png", "ct_train_1008_image_frame_36.png", "ct_train_1018_image_frame_85.png", "ct_train_1003_image_frame_241.png", "ct_train_1006_image_frame_226.png", "ct_train_1020_image_frame_166.png", "ct_train_1011_image_frame_201.png", "ct_train_1016_image_frame_271.png", "ct_train_1015_image_frame_253.png", "ct_train_1009_image_frame_30.png", "ct_train_1020_image_frame_144.png", "ct_train_1005_image_frame_95.png", "ct_train_1014_image_frame_161.png", "ct_train_1001_image_frame_171.png", "ct_train_1003_image_frame_70.png", "ct_train_1009_image_frame_57.png", "ct_train_1015_image_frame_208.png", "ct_train_1006_image_frame_144.png", "ct_train_1020_image_frame_167.png", "ct_train_1014_image_frame_118.png", "ct_train_1011_image_frame_70.png", "ct_train_1008_image_frame_114.png", "ct_train_1015_image_frame_234.png", "ct_train_1008_image_frame_82.png", "ct_train_1016_image_frame_108.png", "ct_train_1008_image_frame_5.png", "ct_train_1014_image_frame_185.png", "ct_train_1006_image_frame_15.png", "ct_train_1009_image_frame_238.png", "ct_train_1016_image_frame_173.png", "ct_train_1009_image_frame_52.png", "ct_train_1005_image_frame_31.png", "ct_train_1017_image_frame_158.png", "ct_train_1011_image_frame_152.png", "ct_train_1016_image_frame_33.png", "ct_train_1001_image_frame_187.png", "ct_train_1014_image_frame_334.png", "ct_train_1008_image_frame_77.png", "ct_train_1001_image_frame_96.png", "ct_train_1017_image_frame_30.png", "ct_train_1001_image_frame_76.png", "ct_train_1016_image_frame_231.png", "ct_train_1006_image_frame_11.png", "ct_train_1017_image_frame_4.png", "ct_train_1011_image_frame_71.png", "ct_train_1008_image_frame_1.png", "ct_train_1014_image_frame_258.png", "ct_train_1014_image_frame_174.png", "ct_train_1006_image_frame_46.png", "ct_train_1009_image_frame_58.png", "ct_train_1008_image_frame_164.png", "ct_train_1016_image_frame_161.png", "ct_train_1001_image_frame_304.png", "ct_train_1006_image_frame_108.png", "ct_train_1001_image_frame_9.png", "ct_train_1003_image_frame_225.png", "ct_train_1018_image_frame_47.png", "ct_train_1018_image_frame_70.png", "ct_train_1009_image_frame_47.png", "ct_train_1014_image_frame_55.png", "ct_train_1017_image_frame_112.png", "ct_train_1006_image_frame_50.png", "ct_train_1017_image_frame_77.png", "ct_train_1001_image_frame_57.png", "ct_train_1015_image_frame_229.png", "ct_train_1003_image_frame_102.png", "ct_train_1006_image_frame_44.png", "ct_train_1006_image_frame_29.png", "ct_train_1003_image_frame_184.png", "ct_train_1008_image_frame_62.png", "ct_train_1018_image_frame_40.png", "ct_train_1005_image_frame_154.png", "ct_train_1006_image_frame_232.png", "ct_train_1009_image_frame_178.png", "ct_train_1016_image_frame_203.png", "ct_train_1015_image_frame_146.png", "ct_train_1014_image_frame_130.png", "ct_train_1017_image_frame_106.png", "ct_train_1020_image_frame_123.png", "ct_train_1020_image_frame_89.png", "ct_train_1016_image_frame_20.png", "ct_train_1015_image_frame_81.png", "ct_train_1016_image_frame_143.png", "ct_train_1014_image_frame_117.png", "ct_train_1015_image_frame_109.png", "ct_train_1020_image_frame_339.png", "ct_train_1003_image_frame_287.png", "ct_train_1005_image_frame_105.png", "ct_train_1009_image_frame_94.png", "ct_train_1015_image_frame_219.png", "ct_train_1015_image_frame_290.png", "ct_train_1006_image_frame_10.png", "ct_train_1005_image_frame_0.png", "ct_train_1003_image_frame_20.png", "ct_train_1017_image_frame_2.png", "ct_train_1006_image_frame_187.png", "ct_train_1017_image_frame_159.png", "ct_train_1020_image_frame_188.png", "ct_train_1011_image_frame_142.png", "ct_train_1014_image_frame_97.png", "ct_train_1011_image_frame_110.png", "ct_train_1020_image_frame_154.png", "ct_train_1020_image_frame_334.png", "ct_train_1016_image_frame_321.png", "ct_train_1016_image_frame_110.png", "ct_train_1008_image_frame_30.png", "ct_train_1020_image_frame_205.png", "ct_train_1011_image_frame_18.png", "ct_train_1017_image_frame_87.png", "ct_train_1011_image_frame_172.png", "ct_train_1014_image_frame_347.png", "ct_train_1017_image_frame_191.png", "ct_train_1003_image_frame_253.png", "ct_train_1001_image_frame_229.png", "ct_train_1001_image_frame_114.png", "ct_train_1001_image_frame_250.png", "ct_train_1001_image_frame_178.png", "ct_train_1001_image_frame_107.png", "ct_train_1016_image_frame_157.png", "ct_train_1016_image_frame_189.png", "ct_train_1018_image_frame_140.png", "ct_train_1017_image_frame_131.png", "ct_train_1014_image_frame_187.png", "ct_train_1020_image_frame_189.png", "ct_train_1001_image_frame_297.png", "ct_train_1015_image_frame_298.png", "ct_train_1017_image_frame_110.png", "ct_train_1020_image_frame_351.png", "ct_train_1005_image_frame_58.png", "ct_train_1017_image_frame_75.png", "ct_train_1015_image_frame_102.png", "ct_train_1011_image_frame_90.png", "ct_train_1018_image_frame_143.png", "ct_train_1001_image_frame_50.png", "ct_train_1003_image_frame_81.png", "ct_train_1016_image_frame_224.png", "ct_train_1015_image_frame_203.png", "ct_train_1016_image_frame_104.png", "ct_train_1003_image_frame_250.png", "ct_train_1005_image_frame_141.png", "ct_train_1011_image_frame_176.png", "ct_train_1017_image_frame_162.png", "ct_train_1020_image_frame_323.png", "ct_train_1001_image_frame_74.png", "ct_train_1016_image_frame_263.png", "ct_train_1020_image_frame_49.png", "ct_train_1008_image_frame_56.png", "ct_train_1006_image_frame_52.png", "ct_train_1020_image_frame_243.png", "ct_train_1018_image_frame_56.png", "ct_train_1003_image_frame_239.png", "ct_train_1020_image_frame_99.png", "ct_train_1001_image_frame_267.png", "ct_train_1011_image_frame_197.png", "ct_train_1008_image_frame_63.png", "ct_train_1017_image_frame_18.png", "ct_train_1020_image_frame_256.png", "ct_train_1008_image_frame_137.png", "ct_train_1018_image_frame_111.png", "ct_train_1014_image_frame_308.png", "ct_train_1003_image_frame_57.png", "ct_train_1008_image_frame_58.png", "ct_train_1008_image_frame_65.png", "ct_train_1003_image_frame_16.png", "ct_train_1018_image_frame_109.png", "ct_train_1017_image_frame_130.png", "ct_train_1001_image_frame_260.png", "ct_train_1015_image_frame_169.png", "ct_train_1003_image_frame_203.png", "ct_train_1003_image_frame_166.png", "ct_train_1003_image_frame_66.png", "ct_train_1015_image_frame_73.png", "ct_train_1001_image_frame_226.png", "ct_train_1016_image_frame_167.png", "ct_train_1006_image_frame_126.png", "ct_train_1020_image_frame_150.png", "ct_train_1016_image_frame_232.png", "ct_train_1008_image_frame_212.png", "ct_train_1003_image_frame_3.png", "ct_train_1003_image_frame_223.png", "ct_train_1008_image_frame_37.png", "ct_train_1011_image_frame_166.png", "ct_train_1020_image_frame_160.png", "ct_train_1016_image_frame_77.png", "ct_train_1011_image_frame_224.png", "ct_train_1014_image_frame_271.png", "ct_train_1008_image_frame_110.png", "ct_train_1020_image_frame_177.png", "ct_train_1017_image_frame_85.png", "ct_train_1009_image_frame_55.png", "ct_train_1006_image_frame_216.png", "ct_train_1017_image_frame_202.png", "ct_train_1017_image_frame_188.png", "ct_train_1006_image_frame_47.png", "ct_train_1017_image_frame_137.png", "ct_train_1014_image_frame_183.png", "ct_train_1001_image_frame_79.png", "ct_train_1020_image_frame_347.png", "ct_train_1016_image_frame_61.png", "ct_train_1009_image_frame_274.png", "ct_train_1005_image_frame_81.png", "ct_train_1015_image_frame_77.png", "ct_train_1018_image_frame_132.png", "ct_train_1020_image_frame_335.png", "ct_train_1009_image_frame_146.png", "ct_train_1017_image_frame_113.png", "ct_train_1011_image_frame_78.png", "ct_train_1006_image_frame_159.png", "ct_train_1020_image_frame_235.png", "ct_train_1011_image_frame_119.png", "ct_train_1011_image_frame_14.png", "ct_train_1005_image_frame_69.png", "ct_train_1018_image_frame_89.png", "ct_train_1015_image_frame_91.png", "ct_train_1001_image_frame_71.png", "ct_train_1018_image_frame_148.png", "ct_train_1009_image_frame_17.png", "ct_train_1020_image_frame_353.png", "ct_train_1009_image_frame_249.png", "ct_train_1017_image_frame_68.png", "ct_train_1015_image_frame_199.png", "ct_train_1017_image_frame_126.png", "ct_train_1003_image_frame_171.png", "ct_train_1008_image_frame_209.png", "ct_train_1011_image_frame_20.png", "ct_train_1003_image_frame_69.png", "ct_train_1001_image_frame_194.png", "ct_train_1016_image_frame_205.png", "ct_train_1018_image_frame_30.png", "ct_train_1009_image_frame_103.png", "ct_train_1001_image_frame_217.png", "ct_train_1001_image_frame_174.png", "ct_train_1014_image_frame_166.png", "ct_train_1020_image_frame_134.png", "ct_train_1018_image_frame_153.png", "ct_train_1011_image_frame_105.png", "ct_train_1003_image_frame_43.png", "ct_train_1015_image_frame_52.png", "ct_train_1011_image_frame_162.png", "ct_train_1020_image_frame_284.png", "ct_train_1017_image_frame_61.png", "ct_train_1006_image_frame_132.png", "ct_train_1006_image_frame_160.png", "ct_train_1015_image_frame_31.png", "ct_train_1001_image_frame_83.png", "ct_train_1014_image_frame_193.png", "ct_train_1017_image_frame_201.png", "ct_train_1016_image_frame_229.png", "ct_train_1014_image_frame_139.png", "ct_train_1020_image_frame_103.png", "ct_train_1003_image_frame_111.png", "ct_train_1008_image_frame_129.png", "ct_train_1005_image_frame_14.png", "ct_train_1014_image_frame_278.png", "ct_train_1015_image_frame_143.png", "ct_train_1003_image_frame_180.png", "ct_train_1016_image_frame_122.png", "ct_train_1017_image_frame_12.png", "ct_train_1006_image_frame_37.png", "ct_train_1009_image_frame_77.png", "ct_train_1014_image_frame_42.png", "ct_train_1014_image_frame_322.png", "ct_train_1008_image_frame_50.png", "ct_train_1011_image_frame_39.png", "ct_train_1009_image_frame_97.png", "ct_train_1008_image_frame_186.png", "ct_train_1014_image_frame_316.png", "ct_train_1017_image_frame_62.png", "ct_train_1001_image_frame_128.png", "ct_train_1016_image_frame_145.png", "ct_train_1006_image_frame_115.png", "ct_train_1017_image_frame_271.png", "ct_train_1014_image_frame_54.png", "ct_train_1003_image_frame_47.png", "ct_train_1017_image_frame_194.png", "ct_train_1014_image_frame_297.png", "ct_train_1020_image_frame_214.png", "ct_train_1011_image_frame_74.png", "ct_train_1014_image_frame_243.png", "ct_train_1018_image_frame_46.png", "ct_train_1015_image_frame_240.png", "ct_train_1001_image_frame_152.png", "ct_train_1016_image_frame_289.png", "ct_train_1020_image_frame_340.png", "ct_train_1006_image_frame_147.png", "ct_train_1014_image_frame_235.png", "ct_train_1003_image_frame_53.png", "ct_train_1016_image_frame_163.png", "ct_train_1009_image_frame_244.png", "ct_train_1017_image_frame_215.png", "ct_train_1017_image_frame_63.png", "ct_train_1001_image_frame_222.png", "ct_train_1016_image_frame_105.png", "ct_train_1014_image_frame_197.png", "ct_train_1018_image_frame_105.png", "ct_train_1006_image_frame_236.png", "ct_train_1011_image_frame_208.png", "ct_train_1006_image_frame_59.png", "ct_train_1003_image_frame_157.png", "ct_train_1006_image_frame_2.png", "ct_train_1020_image_frame_13.png", "ct_train_1003_image_frame_297.png", "ct_train_1020_image_frame_312.png", "ct_train_1003_image_frame_280.png", "ct_train_1015_image_frame_61.png", "ct_train_1016_image_frame_7.png", "ct_train_1008_image_frame_27.png", "ct_train_1015_image_frame_38.png", "ct_train_1020_image_frame_55.png", "ct_train_1009_image_frame_135.png", "ct_train_1017_image_frame_22.png", "ct_train_1015_image_frame_265.png", "ct_train_1014_image_frame_36.png", "ct_train_1008_image_frame_49.png", "ct_train_1016_image_frame_141.png", "ct_train_1017_image_frame_5.png", "ct_train_1011_image_frame_87.png", "ct_train_1017_image_frame_232.png", "ct_train_1003_image_frame_156.png", "ct_train_1015_image_frame_139.png", "ct_train_1011_image_frame_222.png", "ct_train_1001_image_frame_138.png", "ct_train_1011_image_frame_228.png", "ct_train_1009_image_frame_130.png", "ct_train_1015_image_frame_153.png", "ct_train_1020_image_frame_302.png", "ct_train_1016_image_frame_50.png", "ct_train_1008_image_frame_159.png", "ct_train_1017_image_frame_152.png", "ct_train_1005_image_frame_59.png", "ct_train_1005_image_frame_32.png", "ct_train_1015_image_frame_212.png", "ct_train_1011_image_frame_15.png", "ct_train_1005_image_frame_78.png", "ct_train_1015_image_frame_194.png", "ct_train_1020_image_frame_311.png", "ct_train_1018_image_frame_97.png", "ct_train_1018_image_frame_48.png", "ct_train_1006_image_frame_230.png", "ct_train_1015_image_frame_60.png", "ct_train_1020_image_frame_100.png", "ct_train_1009_image_frame_153.png", "ct_train_1011_image_frame_96.png", "ct_train_1016_image_frame_159.png", "ct_train_1001_image_frame_313.png", "ct_train_1020_image_frame_85.png", "ct_train_1016_image_frame_239.png", "ct_train_1014_image_frame_4.png", "ct_train_1014_image_frame_311.png", "ct_train_1017_image_frame_276.png", "ct_train_1018_image_frame_78.png", "ct_train_1015_image_frame_286.png", "ct_train_1016_image_frame_9.png", "ct_train_1014_image_frame_336.png", "ct_train_1008_image_frame_144.png", "ct_train_1016_image_frame_199.png", "ct_train_1011_image_frame_179.png", "ct_train_1003_image_frame_52.png", "ct_train_1015_image_frame_299.png", "ct_train_1020_image_frame_26.png", "ct_train_1016_image_frame_18.png", "ct_train_1016_image_frame_136.png", "ct_train_1016_image_frame_117.png", "ct_train_1014_image_frame_196.png", "ct_train_1009_image_frame_243.png", "ct_train_1011_image_frame_59.png", "ct_train_1011_image_frame_10.png", "ct_train_1018_image_frame_62.png", "ct_train_1009_image_frame_263.png", "ct_train_1003_image_frame_147.png", "ct_train_1008_image_frame_203.png", "ct_train_1009_image_frame_204.png", "ct_train_1015_image_frame_190.png", "ct_train_1015_image_frame_84.png", "ct_train_1001_image_frame_37.png", "ct_train_1016_image_frame_58.png", "ct_train_1001_image_frame_330.png", "ct_train_1015_image_frame_23.png", "ct_train_1016_image_frame_129.png", "ct_train_1016_image_frame_63.png", "ct_train_1001_image_frame_118.png", "ct_train_1009_image_frame_74.png", "ct_train_1005_image_frame_71.png", "ct_train_1015_image_frame_239.png", "ct_train_1014_image_frame_352.png", "ct_train_1014_image_frame_226.png", "ct_train_1011_image_frame_37.png", "ct_train_1003_image_frame_72.png", "ct_train_1014_image_frame_75.png", "ct_train_1001_image_frame_36.png", "ct_train_1020_image_frame_352.png", "ct_train_1001_image_frame_270.png", "ct_train_1011_image_frame_174.png", "ct_train_1005_image_frame_37.png", "ct_train_1018_image_frame_52.png", "ct_train_1008_image_frame_180.png", "ct_train_1015_image_frame_220.png", "ct_train_1014_image_frame_266.png", "ct_train_1020_image_frame_304.png", "ct_train_1018_image_frame_134.png", "ct_train_1001_image_frame_262.png", "ct_train_1017_image_frame_140.png", "ct_train_1015_image_frame_83.png", "ct_train_1017_image_frame_217.png", "ct_train_1001_image_frame_160.png", "ct_train_1020_image_frame_231.png", "ct_train_1006_image_frame_68.png", "ct_train_1001_image_frame_354.png", "ct_train_1017_image_frame_123.png", "ct_train_1018_image_frame_159.png", "ct_train_1017_image_frame_210.png", "ct_train_1014_image_frame_224.png", "ct_train_1006_image_frame_189.png", "ct_train_1006_image_frame_210.png", "ct_train_1001_image_frame_245.png", "ct_train_1015_image_frame_129.png", "ct_train_1009_image_frame_199.png", "ct_train_1003_image_frame_80.png", "ct_train_1011_image_frame_6.png", "ct_train_1014_image_frame_354.png", "ct_train_1009_image_frame_8.png", "ct_train_1009_image_frame_19.png", "ct_train_1009_image_frame_157.png", "ct_train_1011_image_frame_65.png", "ct_train_1014_image_frame_346.png", "ct_train_1016_image_frame_42.png", "ct_train_1018_image_frame_59.png", "ct_train_1011_image_frame_126.png", "ct_train_1014_image_frame_46.png", "ct_train_1016_image_frame_218.png", "ct_train_1003_image_frame_54.png", "ct_train_1016_image_frame_88.png", "ct_train_1016_image_frame_328.png", "ct_train_1018_image_frame_87.png", "ct_train_1014_image_frame_284.png", "ct_train_1014_image_frame_67.png", "ct_train_1003_image_frame_29.png", "ct_train_1005_image_frame_157.png", "ct_train_1015_image_frame_103.png", "ct_train_1020_image_frame_242.png", "ct_train_1009_image_frame_149.png", "ct_train_1008_image_frame_166.png", "ct_train_1001_image_frame_359.png", "ct_train_1018_image_frame_4.png", "ct_train_1015_image_frame_161.png", "ct_train_1005_image_frame_174.png", "ct_train_1001_image_frame_218.png", "ct_train_1014_image_frame_242.png", "ct_train_1014_image_frame_19.png", "ct_train_1008_image_frame_17.png", "ct_train_1009_image_frame_151.png", "ct_train_1017_image_frame_249.png", "ct_train_1001_image_frame_362.png", "ct_train_1008_image_frame_76.png", "ct_train_1015_image_frame_214.png", "ct_train_1014_image_frame_313.png", "ct_train_1008_image_frame_92.png", "ct_train_1001_image_frame_59.png", "ct_train_1017_image_frame_79.png", "ct_train_1015_image_frame_119.png", "ct_train_1018_image_frame_162.png", "ct_train_1005_image_frame_161.png", "ct_train_1020_image_frame_140.png", "ct_train_1016_image_frame_44.png", "ct_train_1020_image_frame_31.png", "ct_train_1011_image_frame_26.png", "ct_train_1011_image_frame_191.png", "ct_train_1003_image_frame_198.png", "ct_train_1017_image_frame_180.png", "ct_train_1009_image_frame_195.png", "ct_train_1005_image_frame_146.png", "ct_train_1020_image_frame_34.png", "ct_train_1011_image_frame_147.png", "ct_train_1008_image_frame_71.png", "ct_train_1003_image_frame_141.png", "ct_train_1017_image_frame_120.png", "ct_train_1006_image_frame_228.png", "ct_train_1015_image_frame_25.png", "ct_train_1011_image_frame_55.png", "ct_train_1017_image_frame_1.png", "ct_train_1001_image_frame_91.png", "ct_train_1016_image_frame_302.png", "ct_train_1018_image_frame_180.png", "ct_train_1009_image_frame_210.png", "ct_train_1016_image_frame_255.png", "ct_train_1006_image_frame_114.png", "ct_train_1009_image_frame_109.png", "ct_train_1020_image_frame_249.png", "ct_train_1009_image_frame_218.png", "ct_train_1006_image_frame_201.png", "ct_train_1016_image_frame_284.png", "ct_train_1015_image_frame_224.png", "ct_train_1001_image_frame_111.png", "ct_train_1006_image_frame_149.png", "ct_train_1003_image_frame_63.png", "ct_train_1003_image_frame_9.png", "ct_train_1009_image_frame_148.png", "ct_train_1017_image_frame_161.png", "ct_train_1005_image_frame_136.png", "ct_train_1009_image_frame_45.png", "ct_train_1014_image_frame_60.png", "ct_train_1003_image_frame_143.png", "ct_train_1015_image_frame_125.png", "ct_train_1011_image_frame_121.png", "ct_train_1003_image_frame_220.png", "ct_train_1015_image_frame_201.png", "ct_train_1005_image_frame_70.png", "ct_train_1016_image_frame_72.png", "ct_train_1015_image_frame_282.png", "ct_train_1001_image_frame_64.png", "ct_train_1014_image_frame_245.png", "ct_train_1020_image_frame_153.png", "ct_train_1014_image_frame_268.png", "ct_train_1005_image_frame_100.png", "ct_train_1018_image_frame_49.png", "ct_train_1017_image_frame_73.png", "ct_train_1001_image_frame_30.png", "ct_train_1014_image_frame_246.png", "ct_train_1006_image_frame_31.png", "ct_train_1017_image_frame_65.png", "ct_train_1001_image_frame_4.png", "ct_train_1020_image_frame_272.png", "ct_train_1008_image_frame_74.png", "ct_train_1017_image_frame_39.png", "ct_train_1006_image_frame_215.png", "ct_train_1011_image_frame_8.png", "ct_train_1009_image_frame_250.png", "ct_train_1001_image_frame_20.png", "ct_train_1001_image_frame_134.png", "ct_train_1014_image_frame_65.png", "ct_train_1015_image_frame_185.png", "ct_train_1003_image_frame_123.png", "ct_train_1001_image_frame_307.png", "ct_train_1017_image_frame_282.png", "ct_train_1017_image_frame_117.png", "ct_train_1003_image_frame_27.png", "ct_train_1003_image_frame_55.png", "ct_train_1009_image_frame_292.png", "ct_train_1003_image_frame_208.png", "ct_train_1017_image_frame_160.png", "ct_train_1003_image_frame_205.png", "ct_train_1008_image_frame_87.png", "ct_train_1017_image_frame_270.png", "ct_train_1009_image_frame_35.png", "ct_train_1009_image_frame_22.png", "ct_train_1020_image_frame_141.png", "ct_train_1003_image_frame_135.png", "ct_train_1015_image_frame_158.png", "ct_train_1017_image_frame_38.png", "ct_train_1016_image_frame_103.png", "ct_train_1005_image_frame_75.png", "ct_train_1016_image_frame_98.png", "ct_train_1016_image_frame_312.png", "ct_train_1014_image_frame_210.png", "ct_train_1003_image_frame_133.png", "ct_train_1005_image_frame_25.png", "ct_train_1008_image_frame_81.png", "ct_train_1015_image_frame_114.png", "ct_train_1006_image_frame_62.png", "ct_train_1009_image_frame_181.png", "ct_train_1006_image_frame_64.png", "ct_train_1006_image_frame_53.png", "ct_train_1003_image_frame_88.png", "ct_train_1006_image_frame_123.png", "ct_train_1006_image_frame_103.png", "ct_train_1005_image_frame_109.png", "ct_train_1003_image_frame_78.png", "ct_train_1009_image_frame_237.png", "ct_train_1003_image_frame_279.png", "ct_train_1015_image_frame_45.png", "ct_train_1005_image_frame_92.png", "ct_train_1011_image_frame_69.png", "ct_train_1014_image_frame_282.png", "ct_train_1005_image_frame_147.png", "ct_train_1011_image_frame_226.png", "ct_train_1014_image_frame_301.png", "ct_train_1008_image_frame_179.png", "ct_train_1014_image_frame_135.png", "ct_train_1006_image_frame_214.png", "ct_train_1009_image_frame_235.png", "ct_train_1001_image_frame_190.png", "ct_train_1020_image_frame_71.png", "ct_train_1015_image_frame_255.png", "ct_train_1001_image_frame_67.png", "ct_train_1014_image_frame_325.png", "ct_train_1020_image_frame_258.png", "ct_train_1003_image_frame_38.png", "ct_train_1014_image_frame_98.png", "ct_train_1003_image_frame_281.png", "ct_train_1005_image_frame_17.png", "ct_train_1005_image_frame_102.png", "ct_train_1005_image_frame_54.png", "ct_train_1001_image_frame_258.png", "ct_train_1020_image_frame_356.png", "ct_train_1018_image_frame_126.png", "ct_train_1018_image_frame_174.png", "ct_train_1020_image_frame_12.png", "ct_train_1003_image_frame_97.png", "ct_train_1001_image_frame_346.png", "ct_train_1015_image_frame_148.png", "ct_train_1018_image_frame_122.png", "ct_train_1020_image_frame_251.png", "ct_train_1009_image_frame_26.png", "ct_train_1003_image_frame_14.png", "ct_train_1015_image_frame_204.png", "ct_train_1006_image_frame_166.png", "ct_train_1011_image_frame_209.png", "ct_train_1005_image_frame_48.png", "ct_train_1001_image_frame_223.png", "ct_train_1006_image_frame_54.png", "ct_train_1015_image_frame_167.png", "ct_train_1015_image_frame_279.png", "ct_train_1015_image_frame_108.png", "ct_train_1020_image_frame_76.png", "ct_train_1005_image_frame_106.png", "ct_train_1011_image_frame_103.png", "ct_train_1005_image_frame_33.png", "ct_train_1008_image_frame_83.png", "ct_train_1003_image_frame_151.png", "ct_train_1020_image_frame_106.png", "ct_train_1009_image_frame_188.png", "ct_train_1015_image_frame_183.png", "ct_train_1006_image_frame_91.png", "ct_train_1017_image_frame_34.png", "ct_train_1016_image_frame_219.png", "ct_train_1016_image_frame_279.png", "ct_train_1003_image_frame_228.png", "ct_train_1015_image_frame_2.png", "ct_train_1016_image_frame_127.png", "ct_train_1017_image_frame_48.png", "ct_train_1011_image_frame_232.png", "ct_train_1009_image_frame_201.png", "ct_train_1015_image_frame_64.png", "ct_train_1006_image_frame_113.png", "ct_train_1020_image_frame_259.png", "ct_train_1011_image_frame_225.png", "ct_train_1003_image_frame_265.png", "ct_train_1009_image_frame_207.png", "ct_train_1008_image_frame_125.png", "ct_train_1008_image_frame_141.png", "ct_train_1017_image_frame_42.png", "ct_train_1017_image_frame_165.png", "ct_train_1008_image_frame_127.png", "ct_train_1009_image_frame_213.png", "ct_train_1005_image_frame_172.png", "ct_train_1017_image_frame_94.png", "ct_train_1006_image_frame_190.png", "ct_train_1011_image_frame_57.png", "ct_train_1014_image_frame_306.png", "ct_train_1014_image_frame_25.png", "ct_train_1009_image_frame_177.png", "ct_train_1020_image_frame_104.png", "ct_train_1006_image_frame_181.png", "ct_train_1015_image_frame_35.png", "ct_train_1011_image_frame_107.png", "ct_train_1006_image_frame_89.png", "ct_train_1020_image_frame_57.png", "ct_train_1014_image_frame_168.png", "ct_train_1015_image_frame_164.png", "ct_train_1015_image_frame_90.png", "ct_train_1009_image_frame_220.png", "ct_train_1003_image_frame_269.png", "ct_train_1020_image_frame_181.png", "ct_train_1018_image_frame_13.png", "ct_train_1014_image_frame_186.png", "ct_train_1014_image_frame_287.png", "ct_train_1006_image_frame_178.png", "ct_train_1016_image_frame_86.png", "ct_train_1008_image_frame_205.png", "ct_train_1003_image_frame_190.png", "ct_train_1014_image_frame_99.png", "ct_train_1016_image_frame_198.png", "ct_train_1003_image_frame_188.png", "ct_train_1003_image_frame_90.png", "ct_train_1020_image_frame_119.png", "ct_train_1005_image_frame_50.png", "ct_train_1009_image_frame_217.png", "ct_train_1009_image_frame_169.png", "ct_train_1015_image_frame_86.png", "ct_train_1017_image_frame_114.png", "ct_train_1015_image_frame_104.png", "ct_train_1001_image_frame_2.png", "ct_train_1017_image_frame_35.png", "ct_train_1020_image_frame_270.png", "ct_train_1009_image_frame_83.png", "ct_train_1005_image_frame_117.png", "ct_train_1008_image_frame_201.png", "ct_train_1011_image_frame_58.png", "ct_train_1001_image_frame_38.png", "ct_train_1014_image_frame_152.png", "ct_train_1001_image_frame_361.png", "ct_train_1017_image_frame_148.png", "ct_train_1009_image_frame_105.png", "ct_train_1001_image_frame_53.png", "ct_train_1005_image_frame_171.png", "ct_train_1016_image_frame_183.png", "ct_train_1016_image_frame_184.png", "ct_train_1009_image_frame_61.png", "ct_train_1016_image_frame_125.png", "ct_train_1005_image_frame_155.png", "ct_train_1017_image_frame_221.png", "ct_train_1017_image_frame_268.png", "ct_train_1020_image_frame_206.png", "ct_train_1015_image_frame_223.png", "ct_train_1014_image_frame_285.png", "ct_train_1015_image_frame_19.png", "ct_train_1001_image_frame_27.png", "ct_train_1005_image_frame_107.png", "ct_train_1017_image_frame_226.png", "ct_train_1005_image_frame_89.png", "ct_train_1020_image_frame_172.png", "ct_train_1009_image_frame_36.png", "ct_train_1016_image_frame_135.png", "ct_train_1009_image_frame_280.png", "ct_train_1015_image_frame_74.png", "ct_train_1003_image_frame_106.png", "ct_train_1020_image_frame_322.png", "ct_train_1016_image_frame_221.png", "ct_train_1005_image_frame_29.png", "ct_train_1014_image_frame_343.png", "ct_train_1009_image_frame_225.png", "ct_train_1001_image_frame_104.png", "ct_train_1016_image_frame_19.png", "ct_train_1015_image_frame_197.png", "ct_train_1016_image_frame_318.png", "ct_train_1016_image_frame_153.png", "ct_train_1009_image_frame_155.png", "ct_train_1003_image_frame_92.png", "ct_train_1018_image_frame_155.png", "ct_train_1005_image_frame_97.png", "ct_train_1016_image_frame_278.png", "ct_train_1009_image_frame_121.png", "ct_train_1020_image_frame_245.png", "ct_train_1016_image_frame_115.png", "ct_train_1011_image_frame_135.png", "ct_train_1017_image_frame_16.png", "ct_train_1001_image_frame_195.png", "ct_train_1008_image_frame_165.png", "ct_train_1001_image_frame_230.png", "ct_train_1015_image_frame_96.png", "ct_train_1015_image_frame_251.png", "ct_train_1020_image_frame_74.png", "ct_train_1009_image_frame_144.png", "ct_train_1016_image_frame_27.png", "ct_train_1006_image_frame_128.png", "ct_train_1014_image_frame_13.png", "ct_train_1011_image_frame_217.png", "ct_train_1017_image_frame_218.png", "ct_train_1008_image_frame_204.png", "ct_train_1018_image_frame_38.png", "ct_train_1020_image_frame_82.png", "ct_train_1009_image_frame_81.png", "ct_train_1001_image_frame_95.png", "ct_train_1011_image_frame_235.png", "ct_train_1003_image_frame_50.png", "ct_train_1008_image_frame_213.png", "ct_train_1003_image_frame_2.png", "ct_train_1016_image_frame_286.png", "ct_train_1020_image_frame_236.png", "ct_train_1009_image_frame_41.png", "ct_train_1016_image_frame_176.png", "ct_train_1003_image_frame_260.png", "ct_train_1014_image_frame_338.png", "ct_train_1001_image_frame_52.png", "ct_train_1020_image_frame_350.png", "ct_train_1006_image_frame_129.png", "ct_train_1020_image_frame_80.png", "ct_train_1008_image_frame_119.png", "ct_train_1011_image_frame_213.png", "ct_train_1005_image_frame_160.png", "ct_train_1014_image_frame_53.png", "ct_train_1006_image_frame_121.png", "ct_train_1018_image_frame_106.png", "ct_train_1001_image_frame_231.png", "ct_train_1015_image_frame_142.png", "ct_train_1009_image_frame_156.png", "ct_train_1016_image_frame_324.png", "ct_train_1014_image_frame_45.png", "ct_train_1008_image_frame_183.png", "ct_train_1008_image_frame_149.png", "ct_train_1020_image_frame_116.png", "ct_train_1009_image_frame_171.png", "ct_train_1011_image_frame_234.png", "ct_train_1003_image_frame_26.png", "ct_train_1009_image_frame_25.png", "ct_train_1017_image_frame_95.png", "ct_train_1020_image_frame_136.png", "ct_train_1008_image_frame_181.png", "ct_train_1001_image_frame_129.png", "ct_train_1014_image_frame_142.png", "ct_train_1009_image_frame_266.png", "ct_train_1016_image_frame_147.png", "ct_train_1018_image_frame_181.png", "ct_train_1014_image_frame_215.png", "ct_train_1011_image_frame_214.png", "ct_train_1015_image_frame_295.png", "ct_train_1016_image_frame_260.png", "ct_train_1011_image_frame_203.png", "ct_train_1016_image_frame_69.png", "ct_train_1018_image_frame_91.png", "ct_train_1014_image_frame_62.png", "ct_train_1001_image_frame_238.png", "ct_train_1011_image_frame_160.png", "ct_train_1016_image_frame_148.png", "ct_train_1003_image_frame_169.png", "ct_train_1006_image_frame_22.png", "ct_train_1020_image_frame_28.png", "ct_train_1015_image_frame_275.png", "ct_train_1015_image_frame_184.png", "ct_train_1017_image_frame_240.png", "ct_train_1011_image_frame_212.png", "ct_train_1016_image_frame_65.png", "ct_train_1017_image_frame_8.png", "ct_train_1009_image_frame_158.png", "ct_train_1001_image_frame_219.png", "ct_train_1017_image_frame_23.png", "ct_train_1001_image_frame_192.png", "ct_train_1018_image_frame_170.png", "ct_train_1011_image_frame_25.png", "ct_train_1003_image_frame_132.png", "ct_train_1006_image_frame_207.png", "ct_train_1009_image_frame_102.png", "ct_train_1017_image_frame_190.png", "ct_train_1014_image_frame_39.png", "ct_train_1017_image_frame_111.png", "ct_train_1001_image_frame_166.png", "ct_train_1016_image_frame_118.png", "ct_train_1006_image_frame_86.png", "ct_train_1008_image_frame_86.png", "ct_train_1020_image_frame_261.png", "ct_train_1001_image_frame_321.png", "ct_train_1001_image_frame_144.png", "ct_train_1003_image_frame_4.png", "ct_train_1015_image_frame_16.png", "ct_train_1008_image_frame_85.png", "ct_train_1020_image_frame_108.png", "ct_train_1017_image_frame_102.png", "ct_train_1017_image_frame_24.png", "ct_train_1008_image_frame_31.png", "ct_train_1017_image_frame_45.png", "ct_train_1005_image_frame_24.png", "ct_train_1005_image_frame_90.png", "ct_train_1018_image_frame_20.png", "ct_train_1003_image_frame_240.png", "ct_train_1001_image_frame_204.png", "ct_train_1003_image_frame_28.png", "ct_train_1020_image_frame_316.png", "ct_train_1014_image_frame_12.png", "ct_train_1008_image_frame_202.png", "ct_train_1016_image_frame_314.png", "ct_train_1016_image_frame_139.png", "ct_train_1011_image_frame_113.png", "ct_train_1016_image_frame_267.png", "ct_train_1018_image_frame_158.png", "ct_train_1006_image_frame_151.png", "ct_train_1016_image_frame_1.png", "ct_train_1003_image_frame_282.png", "ct_train_1018_image_frame_93.png", "ct_train_1016_image_frame_81.png", "ct_train_1006_image_frame_173.png", "ct_train_1020_image_frame_170.png", "ct_train_1014_image_frame_51.png", "ct_train_1020_image_frame_218.png", "ct_train_1017_image_frame_89.png", "ct_train_1009_image_frame_269.png", "ct_train_1001_image_frame_80.png", "ct_train_1001_image_frame_116.png", "ct_train_1017_image_frame_197.png", "ct_train_1020_image_frame_15.png", "ct_train_1015_image_frame_1.png", "ct_train_1014_image_frame_143.png", "ct_train_1016_image_frame_196.png", "ct_train_1015_image_frame_11.png", "ct_train_1018_image_frame_136.png", "ct_train_1009_image_frame_240.png", "ct_train_1014_image_frame_112.png", "ct_train_1017_image_frame_222.png", "ct_train_1018_image_frame_21.png", "ct_train_1009_image_frame_180.png", "ct_train_1009_image_frame_286.png", "ct_train_1008_image_frame_46.png", "ct_train_1014_image_frame_21.png", "ct_train_1003_image_frame_58.png", "ct_train_1017_image_frame_132.png", "ct_train_1020_image_frame_83.png", "ct_train_1009_image_frame_79.png", "ct_train_1008_image_frame_59.png", "ct_train_1018_image_frame_151.png", "ct_train_1001_image_frame_268.png", "ct_train_1018_image_frame_77.png", "ct_train_1009_image_frame_54.png", "ct_train_1020_image_frame_337.png", "ct_train_1020_image_frame_296.png", "ct_train_1014_image_frame_78.png", "ct_train_1003_image_frame_295.png", "ct_train_1006_image_frame_9.png", "ct_train_1009_image_frame_256.png", "ct_train_1006_image_frame_76.png", "ct_train_1009_image_frame_14.png", "ct_train_1006_image_frame_124.png", "ct_train_1016_image_frame_11.png", "ct_train_1006_image_frame_83.png", "ct_train_1016_image_frame_67.png", "ct_train_1016_image_frame_197.png", "ct_train_1011_image_frame_45.png", "ct_train_1016_image_frame_228.png", "ct_train_1001_image_frame_225.png", "ct_train_1015_image_frame_195.png", "ct_train_1017_image_frame_71.png", "ct_train_1009_image_frame_251.png", "ct_train_1009_image_frame_150.png", "ct_train_1020_image_frame_163.png", "ct_train_1017_image_frame_144.png", "ct_train_1005_image_frame_44.png", "ct_train_1003_image_frame_197.png", "ct_train_1016_image_frame_107.png", "ct_train_1003_image_frame_224.png", "ct_train_1006_image_frame_100.png", "ct_train_1016_image_frame_137.png", "ct_train_1006_image_frame_163.png", "ct_train_1003_image_frame_49.png", "ct_train_1016_image_frame_317.png", "ct_train_1003_image_frame_193.png", "ct_train_1014_image_frame_333.png", "ct_train_1001_image_frame_147.png", "ct_train_1001_image_frame_12.png", "ct_train_1014_image_frame_249.png", "ct_train_1015_image_frame_252.png", "ct_train_1015_image_frame_238.png", "ct_train_1018_image_frame_154.png", "ct_train_1006_image_frame_140.png", "ct_train_1020_image_frame_248.png", "ct_train_1009_image_frame_67.png", "ct_train_1016_image_frame_38.png", "ct_train_1015_image_frame_57.png", "ct_train_1009_image_frame_39.png", "ct_train_1017_image_frame_252.png", "ct_train_1009_image_frame_31.png", "ct_train_1015_image_frame_145.png", "ct_train_1016_image_frame_97.png", "ct_train_1006_image_frame_154.png", "ct_train_1009_image_frame_209.png", "ct_train_1020_image_frame_237.png", "ct_train_1006_image_frame_34.png", "ct_train_1008_image_frame_190.png", "ct_train_1001_image_frame_342.png", "ct_train_1015_image_frame_242.png", "ct_train_1017_image_frame_273.png", "ct_train_1001_image_frame_8.png", "ct_train_1014_image_frame_165.png", "ct_train_1015_image_frame_154.png", "ct_train_1015_image_frame_34.png", "ct_train_1006_image_frame_3.png", "ct_train_1018_image_frame_72.png", "ct_train_1015_image_frame_245.png", "ct_train_1001_image_frame_115.png", "ct_train_1016_image_frame_66.png", "ct_train_1018_image_frame_131.png", "ct_train_1014_image_frame_162.png", "ct_train_1011_image_frame_76.png", "ct_train_1016_image_frame_329.png", "ct_train_1006_image_frame_16.png", "ct_train_1009_image_frame_136.png", "ct_train_1014_image_frame_263.png", "ct_train_1020_image_frame_332.png", "ct_train_1006_image_frame_171.png", "ct_train_1016_image_frame_294.png", "ct_train_1008_image_frame_192.png", "ct_train_1020_image_frame_39.png", "ct_train_1015_image_frame_188.png", "ct_train_1014_image_frame_153.png", "ct_train_1001_image_frame_158.png", "ct_train_1016_image_frame_241.png", "ct_train_1011_image_frame_223.png", "ct_train_1003_image_frame_163.png", "ct_train_1005_image_frame_151.png", "ct_train_1001_image_frame_141.png", "ct_train_1017_image_frame_57.png", "ct_train_1009_image_frame_229.png", "ct_train_1020_image_frame_180.png", "ct_train_1017_image_frame_9.png", "ct_train_1014_image_frame_101.png", "ct_train_1020_image_frame_19.png", "ct_train_1017_image_frame_32.png", "ct_train_1018_image_frame_157.png", "ct_train_1001_image_frame_265.png", "ct_train_1011_image_frame_136.png", "ct_train_1003_image_frame_285.png", "ct_train_1020_image_frame_199.png", "ct_train_1001_image_frame_243.png", "ct_train_1009_image_frame_86.png", "ct_train_1003_image_frame_266.png", "ct_train_1008_image_frame_174.png", "ct_train_1003_image_frame_31.png", "ct_train_1011_image_frame_127.png", "ct_train_1001_image_frame_221.png", "ct_train_1014_image_frame_34.png", "ct_train_1006_image_frame_24.png", "ct_train_1015_image_frame_258.png", "ct_train_1016_image_frame_288.png", "ct_train_1008_image_frame_128.png", "ct_train_1017_image_frame_260.png", "ct_train_1017_image_frame_237.png", "ct_train_1020_image_frame_10.png", "ct_train_1016_image_frame_208.png", "ct_train_1006_image_frame_219.png", "ct_train_1001_image_frame_214.png", "ct_train_1020_image_frame_185.png", "ct_train_1008_image_frame_93.png", "ct_train_1009_image_frame_117.png", "ct_train_1011_image_frame_38.png", "ct_train_1001_image_frame_201.png", "ct_train_1006_image_frame_222.png", "ct_train_1014_image_frame_256.png", "ct_train_1017_image_frame_279.png", "ct_train_1017_image_frame_184.png", "ct_train_1017_image_frame_211.png", "ct_train_1009_image_frame_96.png", "ct_train_1015_image_frame_13.png", "ct_train_1018_image_frame_160.png", "ct_train_1018_image_frame_156.png", "ct_train_1001_image_frame_185.png", "ct_train_1016_image_frame_25.png", "ct_train_1020_image_frame_112.png", "ct_train_1011_image_frame_36.png", "ct_train_1003_image_frame_119.png", "ct_train_1014_image_frame_269.png", "ct_train_1018_image_frame_130.png", "ct_train_1014_image_frame_344.png", "ct_train_1001_image_frame_227.png", "ct_train_1020_image_frame_92.png", "ct_train_1016_image_frame_188.png", "ct_train_1017_image_frame_107.png", "ct_train_1020_image_frame_70.png", "ct_train_1005_image_frame_94.png", "ct_train_1020_image_frame_97.png", "ct_train_1001_image_frame_106.png", "ct_train_1020_image_frame_60.png", "ct_train_1009_image_frame_92.png", "ct_train_1020_image_frame_293.png", "ct_train_1020_image_frame_114.png", "ct_train_1015_image_frame_274.png", "ct_train_1003_image_frame_218.png", "ct_train_1015_image_frame_10.png", "ct_train_1020_image_frame_52.png", "ct_train_1001_image_frame_352.png", "ct_train_1020_image_frame_64.png", "ct_train_1001_image_frame_119.png", "ct_train_1015_image_frame_209.png", "ct_train_1020_image_frame_264.png", "ct_train_1015_image_frame_41.png", "ct_train_1005_image_frame_140.png", "ct_train_1008_image_frame_48.png", "ct_train_1009_image_frame_253.png", "ct_train_1009_image_frame_192.png", "ct_train_1016_image_frame_209.png", "ct_train_1020_image_frame_130.png", "ct_train_1009_image_frame_63.png", "ct_train_1008_image_frame_176.png", "ct_train_1011_image_frame_50.png", "ct_train_1001_image_frame_341.png", "ct_train_1014_image_frame_138.png", "ct_train_1015_image_frame_270.png", "ct_train_1001_image_frame_156.png", "ct_train_1005_image_frame_153.png", "ct_train_1003_image_frame_117.png", "ct_train_1011_image_frame_22.png", "ct_train_1015_image_frame_8.png", "ct_train_1005_image_frame_113.png", "ct_train_1011_image_frame_91.png", "ct_train_1006_image_frame_109.png", "ct_train_1015_image_frame_26.png", "ct_train_1014_image_frame_234.png", "ct_train_1018_image_frame_75.png", "ct_train_1011_image_frame_5.png", "ct_train_1017_image_frame_253.png", "ct_train_1017_image_frame_44.png", "ct_train_1020_image_frame_253.png", "ct_train_1008_image_frame_151.png", "ct_train_1001_image_frame_324.png", "ct_train_1008_image_frame_184.png", "ct_train_1011_image_frame_68.png", "ct_train_1005_image_frame_145.png", "ct_train_1006_image_frame_191.png", "ct_train_1015_image_frame_113.png", "ct_train_1006_image_frame_243.png", "ct_train_1009_image_frame_212.png", "ct_train_1014_image_frame_119.png", "ct_train_1016_image_frame_326.png", "ct_train_1020_image_frame_61.png", "ct_train_1006_image_frame_218.png", "ct_train_1014_image_frame_50.png", "ct_train_1014_image_frame_330.png", "ct_train_1014_image_frame_211.png", "ct_train_1003_image_frame_222.png", "ct_train_1020_image_frame_65.png", "ct_train_1003_image_frame_109.png", "ct_train_1001_image_frame_7.png", "ct_train_1008_image_frame_196.png", "ct_train_1005_image_frame_73.png", "ct_train_1009_image_frame_108.png", "ct_train_1001_image_frame_29.png", "ct_train_1005_image_frame_1.png", "ct_train_1016_image_frame_106.png", "ct_train_1006_image_frame_26.png", "ct_train_1006_image_frame_221.png", "ct_train_1008_image_frame_90.png", "ct_train_1015_image_frame_272.png", "ct_train_1016_image_frame_0.png", "ct_train_1014_image_frame_20.png", "ct_train_1005_image_frame_139.png", "ct_train_1009_image_frame_239.png", "ct_train_1003_image_frame_174.png", "ct_train_1015_image_frame_120.png", "ct_train_1006_image_frame_138.png", "ct_train_1003_image_frame_99.png", "ct_train_1018_image_frame_117.png", "ct_train_1014_image_frame_68.png", "ct_train_1014_image_frame_355.png", "ct_train_1014_image_frame_122.png", "ct_train_1005_image_frame_119.png", "ct_train_1016_image_frame_315.png", "ct_train_1005_image_frame_66.png", "ct_train_1011_image_frame_111.png", "ct_train_1014_image_frame_221.png", "ct_train_1015_image_frame_206.png", "ct_train_1003_image_frame_154.png", "ct_train_1014_image_frame_259.png", "ct_train_1003_image_frame_209.png", "ct_train_1001_image_frame_151.png", "ct_train_1020_image_frame_161.png", "ct_train_1015_image_frame_287.png", "ct_train_1017_image_frame_7.png", "ct_train_1009_image_frame_234.png", "ct_train_1008_image_frame_217.png", "ct_train_1008_image_frame_197.png", "ct_train_1001_image_frame_6.png", "ct_train_1001_image_frame_43.png", "ct_train_1011_image_frame_120.png", "ct_train_1006_image_frame_63.png", "ct_train_1003_image_frame_234.png", "ct_train_1009_image_frame_111.png", "ct_train_1011_image_frame_163.png", "ct_train_1001_image_frame_212.png", "ct_train_1014_image_frame_237.png", "ct_train_1011_image_frame_227.png", "ct_train_1018_image_frame_17.png", "ct_train_1016_image_frame_178.png", "ct_train_1015_image_frame_115.png", "ct_train_1009_image_frame_1.png", "ct_train_1016_image_frame_138.png", "ct_train_1016_image_frame_46.png", "ct_train_1016_image_frame_30.png", "ct_train_1008_image_frame_57.png", "ct_train_1006_image_frame_203.png", "ct_train_1005_image_frame_173.png", "ct_train_1020_image_frame_36.png", "ct_train_1001_image_frame_183.png", "ct_train_1003_image_frame_101.png", "ct_train_1015_image_frame_138.png", "ct_train_1011_image_frame_49.png", "ct_train_1015_image_frame_284.png", "ct_train_1020_image_frame_230.png", "ct_train_1020_image_frame_234.png", "ct_train_1008_image_frame_9.png", "ct_train_1011_image_frame_63.png", "ct_train_1009_image_frame_125.png", "ct_train_1001_image_frame_213.png", "ct_train_1020_image_frame_102.png", "ct_train_1016_image_frame_12.png", "ct_train_1017_image_frame_207.png", "ct_train_1020_image_frame_122.png", "ct_train_1009_image_frame_15.png", "ct_train_1001_image_frame_18.png", "ct_train_1016_image_frame_291.png", "ct_train_1014_image_frame_127.png", "ct_train_1003_image_frame_148.png", "ct_train_1018_image_frame_110.png", "ct_train_1003_image_frame_191.png", "ct_train_1015_image_frame_216.png", "ct_train_1014_image_frame_262.png", "ct_train_1008_image_frame_55.png", "ct_train_1011_image_frame_116.png", "ct_train_1011_image_frame_108.png", "ct_train_1016_image_frame_128.png", "ct_train_1011_image_frame_181.png", "ct_train_1009_image_frame_187.png", "ct_train_1014_image_frame_169.png", "ct_train_1018_image_frame_172.png", "ct_train_1001_image_frame_235.png", "ct_train_1015_image_frame_126.png", "ct_train_1020_image_frame_227.png", "ct_train_1005_image_frame_108.png", "ct_train_1006_image_frame_202.png", "ct_train_1003_image_frame_292.png", "ct_train_1020_image_frame_145.png", "ct_train_1016_image_frame_245.png", "ct_train_1020_image_frame_207.png", "ct_train_1001_image_frame_159.png", "ct_train_1001_image_frame_177.png", "ct_train_1005_image_frame_83.png", "ct_train_1003_image_frame_51.png", "ct_train_1005_image_frame_87.png", "ct_train_1018_image_frame_55.png", "ct_train_1009_image_frame_246.png", "ct_train_1006_image_frame_155.png", "ct_train_1001_image_frame_87.png", "ct_train_1008_image_frame_60.png", "ct_train_1003_image_frame_139.png", "ct_train_1017_image_frame_28.png", "ct_train_1001_image_frame_182.png", "ct_train_1020_image_frame_87.png", "ct_train_1018_image_frame_128.png", "ct_train_1003_image_frame_142.png", "ct_train_1009_image_frame_38.png", "ct_train_1001_image_frame_294.png", "ct_train_1020_image_frame_327.png", "ct_train_1015_image_frame_291.png", "ct_train_1014_image_frame_207.png", "ct_train_1005_image_frame_79.png", "ct_train_1003_image_frame_201.png", "ct_train_1017_image_frame_124.png", "ct_train_1014_image_frame_28.png", "ct_train_1016_image_frame_54.png", "ct_train_1016_image_frame_23.png", "ct_train_1014_image_frame_220.png", "ct_train_1014_image_frame_110.png", "ct_train_1014_image_frame_94.png", "ct_train_1005_image_frame_176.png", "ct_train_1017_image_frame_280.png", "ct_train_1005_image_frame_122.png", "ct_train_1009_image_frame_196.png", "ct_train_1011_image_frame_11.png", "ct_train_1016_image_frame_234.png", "ct_train_1017_image_frame_133.png", "ct_train_1011_image_frame_85.png", "ct_train_1009_image_frame_133.png", "ct_train_1014_image_frame_63.png", "ct_train_1003_image_frame_13.png", "ct_train_1020_image_frame_216.png", "ct_train_1014_image_frame_292.png", "ct_train_1008_image_frame_172.png", "ct_train_1015_image_frame_228.png", "ct_train_1006_image_frame_45.png", "ct_train_1009_image_frame_78.png", "ct_train_1018_image_frame_67.png", "ct_train_1020_image_frame_192.png", "ct_train_1003_image_frame_64.png", "ct_train_1014_image_frame_272.png", "ct_train_1006_image_frame_176.png", "ct_train_1015_image_frame_181.png", "ct_train_1015_image_frame_292.png", "ct_train_1011_image_frame_43.png", "ct_train_1008_image_frame_78.png", "ct_train_1009_image_frame_176.png", "ct_train_1014_image_frame_191.png", "ct_train_1020_image_frame_263.png", "ct_train_1020_image_frame_223.png", "ct_train_1009_image_frame_95.png", "ct_train_1016_image_frame_133.png", "ct_train_1001_image_frame_282.png", "ct_train_1017_image_frame_14.png", "ct_train_1011_image_frame_216.png", "ct_train_1006_image_frame_84.png", "ct_train_1020_image_frame_77.png", "ct_train_1003_image_frame_127.png", "ct_train_1018_image_frame_51.png", "ct_train_1014_image_frame_250.png", "ct_train_1016_image_frame_149.png", "ct_train_1016_image_frame_16.png", "ct_train_1017_image_frame_241.png", "ct_train_1011_image_frame_27.png", "ct_train_1005_image_frame_120.png", "ct_train_1001_image_frame_292.png", "ct_train_1017_image_frame_101.png", "ct_train_1017_image_frame_227.png", "ct_train_1009_image_frame_6.png", "ct_train_1009_image_frame_255.png", "ct_train_1009_image_frame_275.png", "ct_train_1017_image_frame_83.png", "ct_train_1014_image_frame_0.png", "ct_train_1011_image_frame_97.png", "ct_train_1020_image_frame_208.png", "ct_train_1020_image_frame_215.png", "ct_train_1020_image_frame_212.png", "ct_train_1009_image_frame_205.png", "ct_train_1001_image_frame_266.png", "ct_train_1016_image_frame_195.png", "ct_train_1006_image_frame_7.png", "ct_train_1009_image_frame_24.png", "ct_train_1006_image_frame_99.png", "ct_train_1003_image_frame_213.png", "ct_train_1020_image_frame_56.png", "ct_train_1015_image_frame_236.png", "ct_train_1003_image_frame_61.png", "ct_train_1015_image_frame_278.png", "ct_train_1011_image_frame_104.png", "ct_train_1020_image_frame_325.png", "ct_train_1018_image_frame_7.png", "ct_train_1017_image_frame_278.png", "ct_train_1014_image_frame_159.png", "ct_train_1011_image_frame_237.png", "ct_train_1015_image_frame_36.png", "ct_train_1017_image_frame_274.png", "ct_train_1001_image_frame_48.png", "ct_train_1014_image_frame_108.png", "ct_train_1018_image_frame_25.png", "ct_train_1009_image_frame_110.png", "ct_train_1011_image_frame_153.png", "ct_train_1001_image_frame_100.png", "ct_train_1001_image_frame_193.png", "ct_train_1003_image_frame_249.png", "ct_train_1016_image_frame_165.png", "ct_train_1011_image_frame_109.png", "ct_train_1006_image_frame_204.png", "ct_train_1006_image_frame_194.png", "ct_train_1015_image_frame_264.png", "ct_train_1003_image_frame_261.png", "ct_train_1001_image_frame_289.png", "ct_train_1020_image_frame_2.png", "ct_train_1011_image_frame_62.png", "ct_train_1020_image_frame_107.png", "ct_train_1017_image_frame_196.png", "ct_train_1017_image_frame_230.png", "ct_train_1016_image_frame_142.png", "ct_train_1018_image_frame_16.png", "ct_train_1020_image_frame_94.png", "ct_train_1016_image_frame_194.png", "ct_train_1020_image_frame_40.png", "ct_train_1001_image_frame_122.png", "ct_train_1001_image_frame_322.png", "ct_train_1020_image_frame_9.png", "ct_train_1011_image_frame_134.png", "ct_train_1015_image_frame_135.png", "ct_train_1003_image_frame_22.png", "ct_train_1009_image_frame_233.png", "ct_train_1014_image_frame_198.png", "ct_train_1009_image_frame_160.png", "ct_train_1014_image_frame_195.png", "ct_train_1014_image_frame_317.png", "ct_train_1020_image_frame_321.png", "ct_train_1008_image_frame_53.png", "ct_train_1006_image_frame_18.png", "ct_train_1014_image_frame_281.png", "ct_train_1017_image_frame_150.png", "ct_train_1003_image_frame_75.png", "ct_train_1015_image_frame_297.png", "ct_train_1001_image_frame_10.png", "ct_train_1008_image_frame_122.png", "ct_train_1003_image_frame_7.png", "ct_train_1014_image_frame_133.png", "ct_train_1001_image_frame_279.png", "ct_train_1009_image_frame_12.png", "ct_train_1020_image_frame_290.png", "ct_train_1003_image_frame_83.png", "ct_train_1014_image_frame_81.png", "ct_train_1014_image_frame_23.png", "ct_train_1014_image_frame_74.png", "ct_train_1016_image_frame_132.png", "ct_train_1003_image_frame_115.png", "ct_train_1015_image_frame_67.png", "ct_train_1014_image_frame_253.png", "ct_train_1009_image_frame_162.png", "ct_train_1011_image_frame_200.png", "ct_train_1018_image_frame_39.png", "ct_train_1005_image_frame_143.png", "ct_train_1018_image_frame_167.png", "ct_train_1003_image_frame_137.png", "ct_train_1014_image_frame_324.png", "ct_train_1008_image_frame_210.png", "ct_train_1018_image_frame_114.png", "ct_train_1003_image_frame_153.png", "ct_train_1003_image_frame_170.png", "ct_train_1020_image_frame_27.png", "ct_train_1018_image_frame_165.png", "ct_train_1001_image_frame_216.png", "ct_train_1017_image_frame_208.png", "ct_train_1016_image_frame_217.png", "ct_train_1020_image_frame_318.png", "ct_train_1016_image_frame_84.png", "ct_train_1001_image_frame_353.png", "ct_train_1009_image_frame_273.png", "ct_train_1009_image_frame_44.png", "ct_train_1020_image_frame_338.png", "ct_train_1011_image_frame_169.png", "ct_train_1016_image_frame_327.png", "ct_train_1011_image_frame_101.png", "ct_train_1011_image_frame_207.png", "ct_train_1003_image_frame_25.png", "ct_train_1016_image_frame_244.png", "ct_train_1008_image_frame_33.png", "ct_train_1006_image_frame_101.png", "ct_train_1001_image_frame_133.png", "ct_train_1009_image_frame_3.png", "ct_train_1014_image_frame_8.png", "ct_train_1015_image_frame_137.png", "ct_train_1020_image_frame_195.png", "ct_train_1011_image_frame_102.png", "ct_train_1009_image_frame_142.png", "ct_train_1018_image_frame_53.png", "ct_train_1016_image_frame_305.png", "ct_train_1009_image_frame_191.png", "ct_train_1017_image_frame_244.png", "ct_train_1016_image_frame_151.png", "ct_train_1008_image_frame_26.png", "ct_train_1016_image_frame_253.png", "ct_train_1006_image_frame_205.png", "ct_train_1018_image_frame_175.png", "ct_train_1015_image_frame_3.png", "ct_train_1016_image_frame_87.png", "ct_train_1003_image_frame_149.png", "ct_train_1017_image_frame_213.png", "ct_train_1016_image_frame_213.png", "ct_train_1005_image_frame_142.png", "ct_train_1011_image_frame_196.png", "ct_train_1001_image_frame_276.png", "ct_train_1009_image_frame_226.png", "ct_train_1006_image_frame_183.png", "ct_train_1001_image_frame_278.png", "ct_train_1008_image_frame_121.png", "ct_train_1016_image_frame_162.png", "ct_train_1018_image_frame_42.png", "ct_train_1014_image_frame_41.png", "ct_train_1014_image_frame_77.png", "ct_train_1016_image_frame_92.png", "ct_train_1005_image_frame_99.png", "ct_train_1020_image_frame_307.png", "ct_train_1016_image_frame_214.png", "ct_train_1009_image_frame_272.png", "ct_train_1018_image_frame_141.png", "ct_train_1016_image_frame_56.png", "ct_train_1017_image_frame_187.png", "ct_train_1017_image_frame_25.png", "ct_train_1015_image_frame_118.png", "ct_train_1014_image_frame_147.png", "ct_train_1005_image_frame_150.png", "ct_train_1020_image_frame_110.png", "ct_train_1018_image_frame_108.png", "ct_train_1001_image_frame_207.png", "ct_train_1003_image_frame_246.png", "ct_train_1014_image_frame_312.png", "ct_train_1016_image_frame_285.png", "ct_train_1020_image_frame_78.png", "ct_train_1011_image_frame_190.png", "ct_train_1020_image_frame_232.png", "ct_train_1008_image_frame_211.png", "ct_train_1020_image_frame_173.png", "ct_train_1008_image_frame_162.png", "ct_train_1011_image_frame_177.png", "ct_train_1003_image_frame_251.png", "ct_train_1003_image_frame_40.png", "ct_train_1011_image_frame_3.png", "ct_train_1008_image_frame_215.png", "ct_train_1009_image_frame_33.png", "ct_train_1009_image_frame_267.png", "ct_train_1018_image_frame_102.png", "ct_train_1011_image_frame_204.png", "ct_train_1015_image_frame_14.png", "ct_train_1006_image_frame_177.png", "ct_train_1008_image_frame_0.png", "ct_train_1015_image_frame_46.png", "ct_train_1016_image_frame_191.png", "ct_train_1008_image_frame_43.png", "ct_train_1020_image_frame_117.png", "ct_train_1014_image_frame_302.png", "ct_train_1014_image_frame_22.png", "ct_train_1003_image_frame_41.png", "ct_train_1014_image_frame_58.png", "ct_train_1014_image_frame_92.png", "ct_train_1008_image_frame_116.png", "ct_train_1006_image_frame_119.png", "ct_train_1005_image_frame_115.png", "ct_train_1016_image_frame_150.png", "ct_train_1017_image_frame_3.png", "ct_train_1005_image_frame_38.png", "ct_train_1015_image_frame_213.png", "ct_train_1011_image_frame_47.png", "ct_train_1016_image_frame_225.png", "ct_train_1017_image_frame_166.png", "ct_train_1016_image_frame_169.png", "ct_train_1016_image_frame_275.png", "ct_train_1001_image_frame_146.png", "ct_train_1003_image_frame_134.png", "ct_train_1020_image_frame_21.png", "ct_train_1020_image_frame_252.png", "ct_train_1009_image_frame_247.png", "ct_train_1001_image_frame_121.png", "ct_train_1017_image_frame_92.png", "ct_train_1006_image_frame_49.png", "ct_train_1020_image_frame_328.png", "ct_train_1001_image_frame_98.png", "ct_train_1009_image_frame_183.png", "ct_train_1020_image_frame_315.png", "ct_train_1016_image_frame_13.png", "ct_train_1006_image_frame_229.png", "ct_train_1005_image_frame_23.png", "ct_train_1009_image_frame_257.png", "ct_train_1017_image_frame_168.png", "ct_train_1020_image_frame_148.png", "ct_train_1009_image_frame_90.png", "ct_train_1018_image_frame_33.png", "ct_train_1015_image_frame_62.png", "ct_train_1020_image_frame_37.png", "ct_train_1014_image_frame_219.png", "ct_train_1005_image_frame_84.png", "ct_train_1020_image_frame_333.png", "ct_train_1003_image_frame_275.png", "ct_train_1006_image_frame_92.png", "ct_train_1015_image_frame_246.png", "ct_train_1009_image_frame_147.png", "ct_train_1018_image_frame_144.png", "ct_train_1014_image_frame_1.png", "ct_train_1020_image_frame_267.png", "ct_train_1016_image_frame_186.png", "ct_train_1020_image_frame_43.png", "ct_train_1008_image_frame_194.png", "ct_train_1017_image_frame_245.png", "ct_train_1016_image_frame_247.png", "ct_train_1005_image_frame_12.png", "ct_train_1006_image_frame_58.png", "ct_train_1018_image_frame_169.png", "ct_train_1003_image_frame_107.png", "ct_train_1020_image_frame_111.png", "ct_train_1016_image_frame_226.png", "ct_train_1016_image_frame_85.png", "ct_train_1011_image_frame_215.png", "ct_train_1009_image_frame_222.png", "ct_train_1006_image_frame_152.png", "ct_train_1020_image_frame_68.png", "ct_train_1006_image_frame_41.png", "ct_train_1006_image_frame_134.png", "ct_train_1015_image_frame_88.png", "ct_train_1018_image_frame_41.png", "ct_train_1006_image_frame_185.png", "ct_train_1016_image_frame_272.png", "ct_train_1001_image_frame_3.png", "ct_train_1011_image_frame_4.png", "ct_train_1020_image_frame_146.png", "ct_train_1009_image_frame_282.png", "ct_train_1008_image_frame_6.png", "ct_train_1001_image_frame_89.png", "ct_train_1014_image_frame_61.png", "ct_train_1009_image_frame_10.png", "ct_train_1001_image_frame_143.png", "ct_train_1011_image_frame_155.png", "ct_train_1017_image_frame_171.png", "ct_train_1003_image_frame_0.png", "ct_train_1016_image_frame_174.png", "ct_train_1005_image_frame_138.png", "ct_train_1003_image_frame_212.png", "ct_train_1016_image_frame_79.png", "ct_train_1017_image_frame_149.png", "ct_train_1006_image_frame_87.png", "ct_train_1016_image_frame_100.png", "ct_train_1015_image_frame_105.png", "ct_train_1018_image_frame_23.png", "ct_train_1020_image_frame_331.png", "ct_train_1017_image_frame_224.png", "ct_train_1003_image_frame_195.png", "ct_train_1017_image_frame_88.png", "ct_train_1005_image_frame_80.png", "ct_train_1003_image_frame_68.png", "ct_train_1001_image_frame_349.png", "ct_train_1020_image_frame_18.png", "ct_train_1014_image_frame_96.png", "ct_train_1006_image_frame_169.png", "ct_train_1003_image_frame_110.png", "ct_train_1018_image_frame_57.png", "ct_train_1001_image_frame_275.png", "ct_train_1015_image_frame_230.png", "ct_train_1020_image_frame_96.png", "ct_train_1017_image_frame_272.png", "ct_train_1011_image_frame_60.png", "ct_train_1018_image_frame_124.png", "ct_train_1009_image_frame_9.png", "ct_train_1005_image_frame_55.png", "ct_train_1001_image_frame_191.png", "ct_train_1015_image_frame_151.png", "ct_train_1001_image_frame_228.png", "ct_train_1020_image_frame_280.png", "ct_train_1017_image_frame_81.png", "ct_train_1001_image_frame_164.png", "ct_train_1017_image_frame_26.png", "ct_train_1015_image_frame_237.png", "ct_train_1014_image_frame_289.png", "ct_train_1014_image_frame_216.png", "ct_train_1020_image_frame_289.png", "ct_train_1008_image_frame_61.png", "ct_train_1011_image_frame_32.png", "ct_train_1006_image_frame_196.png", "ct_train_1017_image_frame_228.png", "ct_train_1016_image_frame_330.png", "ct_train_1001_image_frame_306.png", "ct_train_1006_image_frame_211.png", "ct_train_1005_image_frame_156.png", "ct_train_1014_image_frame_106.png", "ct_train_1018_image_frame_149.png", "ct_train_1006_image_frame_25.png", "ct_train_1001_image_frame_172.png", "ct_train_1015_image_frame_94.png", "ct_train_1020_image_frame_282.png", "ct_train_1016_image_frame_116.png", "ct_train_1009_image_frame_271.png", "ct_train_1015_image_frame_93.png", "ct_train_1020_image_frame_182.png", "ct_train_1016_image_frame_89.png", "ct_train_1003_image_frame_74.png", "ct_train_1009_image_frame_99.png", "ct_train_1001_image_frame_209.png", "ct_train_1020_image_frame_171.png", "ct_train_1017_image_frame_250.png", "ct_train_1006_image_frame_17.png", "ct_train_1008_image_frame_68.png", "ct_train_1014_image_frame_177.png", "ct_train_1016_image_frame_190.png", "ct_train_1017_image_frame_53.png", "ct_train_1015_image_frame_232.png", "ct_train_1001_image_frame_333.png", "ct_train_1011_image_frame_46.png", "ct_train_1017_image_frame_259.png", "ct_train_1001_image_frame_242.png", "ct_train_1017_image_frame_138.png", "ct_train_1020_image_frame_229.png", "ct_train_1018_image_frame_146.png", "ct_train_1017_image_frame_235.png", "ct_train_1014_image_frame_27.png", "ct_train_1014_image_frame_339.png", "ct_train_1001_image_frame_351.png", "ct_train_1008_image_frame_23.png", "ct_train_1015_image_frame_124.png", "ct_train_1011_image_frame_218.png", "ct_train_1011_image_frame_44.png", "ct_train_1009_image_frame_93.png", "ct_train_1016_image_frame_78.png", "ct_train_1006_image_frame_104.png", "ct_train_1009_image_frame_284.png", "ct_train_1008_image_frame_163.png", "ct_train_1016_image_frame_331.png", "ct_train_1003_image_frame_11.png", "ct_train_1018_image_frame_183.png", "ct_train_1001_image_frame_239.png", "ct_train_1009_image_frame_85.png", "ct_train_1020_image_frame_233.png", "ct_train_1018_image_frame_19.png", "ct_train_1014_image_frame_32.png", "ct_train_1009_image_frame_116.png", "ct_train_1006_image_frame_23.png", "ct_train_1015_image_frame_187.png", "ct_train_1016_image_frame_264.png", "ct_train_1005_image_frame_35.png", "ct_train_1005_image_frame_15.png", "ct_train_1006_image_frame_198.png", "ct_train_1006_image_frame_74.png", "ct_train_1005_image_frame_34.png", "ct_train_1003_image_frame_10.png", "ct_train_1005_image_frame_43.png", "ct_train_1011_image_frame_117.png", "ct_train_1011_image_frame_137.png", "ct_train_1017_image_frame_266.png", "ct_train_1003_image_frame_126.png", "ct_train_1009_image_frame_194.png", "ct_train_1003_image_frame_46.png", "ct_train_1018_image_frame_32.png", "ct_train_1008_image_frame_39.png", "ct_train_1020_image_frame_209.png", "ct_train_1009_image_frame_167.png", "ct_train_1017_image_frame_13.png", "ct_train_1017_image_frame_142.png", "ct_train_1011_image_frame_81.png", "ct_train_1020_image_frame_58.png", "ct_train_1006_image_frame_69.png", "ct_train_1018_image_frame_139.png", "ct_train_1001_image_frame_75.png", "ct_train_1020_image_frame_294.png", "ct_train_1001_image_frame_254.png", "ct_train_1008_image_frame_117.png", "ct_train_1011_image_frame_98.png", "ct_train_1003_image_frame_158.png", "ct_train_1018_image_frame_45.png", "ct_train_1016_image_frame_235.png", "ct_train_1001_image_frame_179.png", "ct_train_1017_image_frame_116.png", "ct_train_1008_image_frame_216.png", "ct_train_1014_image_frame_171.png", "ct_train_1017_image_frame_86.png", "ct_train_1014_image_frame_3.png", "ct_train_1009_image_frame_115.png", "ct_train_1009_image_frame_73.png", "ct_train_1014_image_frame_179.png", "ct_train_1008_image_frame_142.png", "ct_train_1008_image_frame_153.png", "ct_train_1016_image_frame_248.png", "ct_train_1016_image_frame_134.png", "ct_train_1017_image_frame_74.png", "ct_train_1020_image_frame_241.png", "ct_train_1020_image_frame_98.png", "ct_train_1008_image_frame_35.png", "ct_train_1014_image_frame_277.png", "ct_train_1018_image_frame_103.png", "ct_train_1009_image_frame_214.png", "ct_train_1008_image_frame_171.png", "ct_train_1011_image_frame_129.png", "ct_train_1009_image_frame_230.png", "ct_train_1006_image_frame_117.png", "ct_train_1009_image_frame_66.png", "ct_train_1009_image_frame_119.png", "ct_train_1003_image_frame_165.png", "ct_train_1017_image_frame_96.png", "ct_train_1017_image_frame_118.png", "ct_train_1001_image_frame_203.png", "ct_train_1003_image_frame_284.png", "ct_train_1020_image_frame_194.png", "ct_train_1001_image_frame_220.png", "ct_train_1001_image_frame_14.png", "ct_train_1017_image_frame_21.png", "ct_train_1020_image_frame_220.png", "ct_train_1014_image_frame_279.png", "ct_train_1003_image_frame_262.png", "ct_train_1003_image_frame_130.png", "ct_train_1003_image_frame_233.png", "ct_train_1009_image_frame_182.png", "ct_train_1006_image_frame_98.png", "ct_train_1011_image_frame_54.png", "ct_train_1003_image_frame_76.png", "ct_train_1001_image_frame_309.png", "ct_train_1014_image_frame_137.png", "ct_train_1018_image_frame_99.png", "ct_train_1008_image_frame_147.png", "ct_train_1015_image_frame_162.png", "ct_train_1001_image_frame_139.png", "ct_train_1011_image_frame_148.png", "ct_train_1017_image_frame_125.png", "ct_train_1018_image_frame_173.png", "ct_train_1006_image_frame_141.png", "ct_train_1016_image_frame_227.png", "ct_train_1009_image_frame_227.png", "ct_train_1001_image_frame_263.png", "ct_train_1016_image_frame_215.png", "ct_train_1006_image_frame_125.png", "ct_train_1005_image_frame_152.png", "ct_train_1017_image_frame_128.png", "ct_train_1020_image_frame_276.png", "ct_train_1011_image_frame_170.png", "ct_train_1020_image_frame_20.png", "ct_train_1006_image_frame_241.png", "ct_train_1020_image_frame_271.png", "ct_train_1020_image_frame_120.png", "ct_train_1015_image_frame_155.png", "ct_train_1016_image_frame_220.png", "ct_train_1015_image_frame_256.png", "ct_train_1001_image_frame_184.png", "ct_train_1008_image_frame_8.png", "ct_train_1001_image_frame_170.png", "ct_train_1014_image_frame_124.png", "ct_train_1014_image_frame_49.png", "ct_train_1015_image_frame_131.png", "ct_train_1016_image_frame_22.png", "ct_train_1008_image_frame_218.png", "ct_train_1018_image_frame_74.png", "ct_train_1009_image_frame_221.png", "ct_train_1001_image_frame_360.png", "ct_train_1020_image_frame_305.png", "ct_train_1020_image_frame_266.png", "ct_train_1008_image_frame_198.png", "ct_train_1008_image_frame_84.png", "ct_train_1017_image_frame_169.png", "ct_train_1005_image_frame_22.png", "ct_train_1016_image_frame_319.png", "ct_train_1003_image_frame_196.png", "ct_train_1009_image_frame_277.png", "ct_train_1003_image_frame_288.png", "ct_train_1011_image_frame_128.png", "ct_train_1011_image_frame_165.png", "ct_train_1017_image_frame_189.png", "ct_train_1015_image_frame_243.png", "ct_train_1009_image_frame_64.png", "ct_train_1016_image_frame_37.png", "ct_train_1006_image_frame_71.png", "ct_train_1005_image_frame_134.png", "ct_train_1011_image_frame_64.png", "ct_train_1020_image_frame_175.png", "ct_train_1014_image_frame_274.png", "ct_train_1008_image_frame_191.png", "ct_train_1016_image_frame_233.png", "ct_train_1018_image_frame_64.png", "ct_train_1003_image_frame_129.png", "ct_train_1015_image_frame_132.png", "ct_train_1014_image_frame_244.png", "ct_train_1014_image_frame_105.png", "ct_train_1018_image_frame_18.png", "ct_train_1020_image_frame_168.png", "ct_train_1016_image_frame_39.png", "ct_train_1009_image_frame_32.png", "ct_train_1009_image_frame_245.png", "ct_train_1014_image_frame_328.png", "ct_train_1015_image_frame_193.png", "ct_train_1006_image_frame_95.png", "ct_train_1009_image_frame_70.png", "ct_train_1016_image_frame_6.png", "ct_train_1006_image_frame_167.png", "ct_train_1011_image_frame_28.png", "ct_train_1008_image_frame_34.png", "ct_train_1014_image_frame_170.png", "ct_train_1011_image_frame_156.png", "ct_train_1009_image_frame_4.png", "ct_train_1003_image_frame_237.png", "ct_train_1015_image_frame_280.png", "ct_train_1005_image_frame_10.png", "ct_train_1017_image_frame_281.png", "ct_train_1008_image_frame_132.png", "ct_train_1017_image_frame_84.png", "ct_train_1020_image_frame_169.png", "ct_train_1001_image_frame_257.png", "ct_train_1016_image_frame_17.png", "ct_train_1018_image_frame_182.png", "ct_train_1003_image_frame_289.png", "ct_train_1001_image_frame_72.png", "ct_train_1014_image_frame_247.png", "ct_train_1001_image_frame_256.png", "ct_train_1003_image_frame_23.png", "ct_train_1016_image_frame_332.png", "ct_train_1008_image_frame_98.png", "ct_train_1003_image_frame_91.png", "ct_train_1001_image_frame_253.png", "ct_train_1017_image_frame_47.png", "ct_train_1016_image_frame_259.png", "ct_train_1015_image_frame_5.png", "ct_train_1016_image_frame_261.png", "ct_train_1017_image_frame_145.png", "ct_train_1008_image_frame_219.png", "ct_train_1020_image_frame_298.png", "ct_train_1015_image_frame_294.png", "ct_train_1001_image_frame_186.png", "ct_train_1017_image_frame_163.png", "ct_train_1015_image_frame_127.png", "ct_train_1006_image_frame_148.png", "ct_train_1016_image_frame_60.png", "ct_train_1014_image_frame_304.png", "ct_train_1005_image_frame_9.png", "ct_train_1020_image_frame_6.png", "ct_train_1006_image_frame_161.png", "ct_train_1006_image_frame_8.png", "ct_train_1017_image_frame_147.png", "ct_train_1015_image_frame_44.png", "ct_train_1001_image_frame_181.png", "ct_train_1009_image_frame_59.png", "ct_train_1015_image_frame_163.png", "ct_train_1017_image_frame_229.png", "ct_train_1001_image_frame_32.png", "ct_train_1018_image_frame_166.png", "ct_train_1008_image_frame_131.png", "ct_train_1016_image_frame_74.png", "ct_train_1009_image_frame_260.png", "ct_train_1009_image_frame_69.png", "ct_train_1009_image_frame_127.png", "ct_train_1008_image_frame_135.png", "ct_train_1015_image_frame_260.png", "ct_train_1015_image_frame_160.png", "ct_train_1016_image_frame_155.png", "ct_train_1016_image_frame_124.png", "ct_train_1018_image_frame_185.png", "ct_train_1015_image_frame_202.png", "ct_train_1018_image_frame_84.png", "ct_train_1017_image_frame_20.png", "ct_train_1016_image_frame_298.png", "ct_train_1005_image_frame_164.png", "ct_train_1015_image_frame_7.png", "ct_train_1001_image_frame_99.png", "ct_train_1009_image_frame_200.png", "ct_train_1020_image_frame_279.png", "ct_train_1011_image_frame_1.png", "ct_train_1016_image_frame_166.png", "ct_train_1014_image_frame_236.png", "ct_train_1018_image_frame_123.png", "ct_train_1020_image_frame_265.png", "ct_train_1016_image_frame_40.png", "ct_train_1001_image_frame_261.png", "ct_train_1014_image_frame_202.png", "ct_train_1014_image_frame_30.png", "ct_train_1014_image_frame_164.png", "ct_train_1015_image_frame_273.png", "ct_train_1003_image_frame_244.png", "ct_train_1001_image_frame_345.png", "ct_train_1016_image_frame_177.png", "ct_train_1008_image_frame_101.png", "ct_train_1006_image_frame_127.png", "ct_train_1020_image_frame_308.png", "ct_train_1009_image_frame_236.png", "ct_train_1003_image_frame_138.png", "ct_train_1015_image_frame_247.png", "ct_train_1020_image_frame_344.png", "ct_train_1008_image_frame_167.png", "ct_train_1017_image_frame_243.png", "ct_train_1015_image_frame_17.png", "ct_train_1008_image_frame_214.png", "ct_train_1014_image_frame_351.png", "ct_train_1009_image_frame_132.png", "ct_train_1015_image_frame_128.png", "ct_train_1009_image_frame_42.png", "ct_train_1017_image_frame_80.png", "ct_train_1016_image_frame_300.png", "ct_train_1011_image_frame_130.png", "ct_train_1006_image_frame_237.png", "ct_train_1001_image_frame_102.png", "ct_train_1011_image_frame_40.png", "ct_train_1020_image_frame_41.png", "ct_train_1020_image_frame_66.png", "ct_train_1001_image_frame_125.png", "ct_train_1014_image_frame_276.png", "ct_train_1006_image_frame_235.png", "ct_train_1014_image_frame_89.png", "ct_train_1005_image_frame_96.png", "ct_train_1008_image_frame_120.png", "ct_train_1014_image_frame_70.png", "ct_train_1020_image_frame_164.png", "ct_train_1011_image_frame_199.png", "ct_train_1015_image_frame_9.png", "ct_train_1006_image_frame_153.png", "ct_train_1009_image_frame_170.png", "ct_train_1020_image_frame_306.png", "ct_train_1008_image_frame_139.png", "ct_train_1008_image_frame_44.png", "ct_train_1009_image_frame_248.png", "ct_train_1017_image_frame_139.png", "ct_train_1016_image_frame_182.png", "ct_train_1003_image_frame_77.png", "ct_train_1020_image_frame_309.png", "ct_train_1017_image_frame_10.png", "ct_train_1016_image_frame_270.png", "ct_train_1014_image_frame_326.png", "ct_train_1006_image_frame_139.png", "ct_train_1001_image_frame_132.png", "ct_train_1017_image_frame_60.png", "ct_train_1017_image_frame_212.png", "ct_train_1001_image_frame_215.png", "ct_train_1008_image_frame_47.png", "ct_train_1006_image_frame_206.png", "ct_train_1008_image_frame_136.png", "ct_train_1011_image_frame_158.png", "ct_train_1011_image_frame_24.png", "ct_train_1018_image_frame_14.png", "ct_train_1003_image_frame_254.png", "ct_train_1003_image_frame_182.png", "ct_train_1015_image_frame_50.png", "ct_train_1014_image_frame_212.png", "ct_train_1014_image_frame_80.png", "ct_train_1008_image_frame_161.png", "ct_train_1003_image_frame_62.png", "ct_train_1016_image_frame_14.png", "ct_train_1017_image_frame_238.png", "ct_train_1014_image_frame_238.png", "ct_train_1015_image_frame_288.png", "ct_train_1015_image_frame_136.png", "ct_train_1018_image_frame_71.png", "ct_train_1020_image_frame_295.png", "ct_train_1015_image_frame_71.png", "ct_train_1020_image_frame_29.png", "ct_train_1016_image_frame_95.png", "ct_train_1015_image_frame_262.png", "ct_train_1011_image_frame_92.png", "ct_train_1020_image_frame_317.png", "ct_train_1008_image_frame_193.png", "ct_train_1003_image_frame_118.png", "ct_train_1006_image_frame_61.png", "ct_train_1011_image_frame_132.png", "ct_train_1014_image_frame_146.png", "ct_train_1015_image_frame_227.png", "ct_train_1017_image_frame_199.png", "ct_train_1017_image_frame_33.png", "ct_train_1005_image_frame_166.png", "ct_train_1020_image_frame_183.png", "ct_train_1008_image_frame_133.png", "ct_train_1005_image_frame_101.png", "ct_train_1009_image_frame_140.png", "ct_train_1003_image_frame_179.png", "ct_train_1016_image_frame_308.png", "ct_train_1003_image_frame_60.png", "ct_train_1005_image_frame_88.png", "ct_train_1018_image_frame_127.png", "ct_train_1001_image_frame_17.png", "ct_train_1020_image_frame_24.png", "ct_train_1020_image_frame_81.png", "ct_train_1015_image_frame_75.png", "ct_train_1015_image_frame_259.png", "ct_train_1020_image_frame_115.png", "ct_train_1001_image_frame_308.png", "ct_train_1015_image_frame_0.png", "ct_train_1011_image_frame_233.png", "ct_train_1015_image_frame_180.png", "ct_train_1001_image_frame_296.png", "ct_train_1011_image_frame_66.png", "ct_train_1011_image_frame_187.png", "ct_train_1017_image_frame_99.png", "ct_train_1005_image_frame_67.png", "ct_train_1005_image_frame_175.png", "ct_train_1016_image_frame_47.png", "ct_train_1001_image_frame_47.png", "ct_train_1018_image_frame_80.png", "ct_train_1018_image_frame_163.png", "ct_train_1018_image_frame_24.png", "ct_train_1016_image_frame_4.png", "ct_train_1018_image_frame_129.png", "ct_train_1006_image_frame_175.png", "ct_train_1006_image_frame_188.png", "ct_train_1001_image_frame_205.png", "ct_train_1003_image_frame_206.png", "ct_train_1008_image_frame_200.png", "ct_train_1003_image_frame_19.png", "ct_train_1018_image_frame_125.png", "ct_train_1001_image_frame_167.png", "ct_train_1005_image_frame_40.png", "ct_train_1016_image_frame_90.png", "ct_train_1016_image_frame_8.png", "ct_train_1009_image_frame_27.png", "ct_train_1003_image_frame_186.png", "ct_train_1014_image_frame_64.png", "ct_train_1009_image_frame_65.png", "ct_train_1017_image_frame_19.png", "ct_train_1015_image_frame_266.png", "ct_train_1014_image_frame_270.png", "ct_train_1020_image_frame_283.png", "ct_train_1001_image_frame_271.png", "ct_train_1003_image_frame_39.png", "ct_train_1008_image_frame_73.png", "ct_train_1009_image_frame_208.png", "ct_train_1014_image_frame_111.png", "ct_train_1015_image_frame_150.png", "ct_train_1017_image_frame_15.png", "ct_train_1018_image_frame_66.png", "ct_train_1003_image_frame_274.png", "ct_train_1009_image_frame_265.png", "ct_train_1005_image_frame_111.png", "ct_train_1003_image_frame_290.png", "ct_train_1008_image_frame_88.png", "ct_train_1016_image_frame_311.png", "ct_train_1009_image_frame_189.png", "ct_train_1014_image_frame_100.png", "ct_train_1020_image_frame_118.png", "ct_train_1016_image_frame_268.png", "ct_train_1001_image_frame_356.png", "ct_train_1005_image_frame_16.png", "ct_train_1001_image_frame_54.png", "ct_train_1001_image_frame_197.png", "ct_train_1006_image_frame_96.png", "ct_train_1014_image_frame_91.png", "ct_train_1015_image_frame_27.png", "ct_train_1009_image_frame_159.png", "ct_train_1011_image_frame_175.png", "ct_train_1003_image_frame_270.png", "ct_train_1020_image_frame_228.png", "ct_train_1001_image_frame_175.png", "ct_train_1011_image_frame_56.png", "ct_train_1015_image_frame_177.png", "ct_train_1014_image_frame_128.png", "ct_train_1014_image_frame_104.png", "ct_train_1015_image_frame_18.png", "ct_train_1005_image_frame_72.png", "ct_train_1015_image_frame_111.png", "ct_train_1001_image_frame_65.png", "ct_train_1001_image_frame_241.png", "ct_train_1008_image_frame_96.png", "ct_train_1006_image_frame_213.png", "ct_train_1009_image_frame_7.png", "ct_train_1018_image_frame_27.png", "ct_train_1001_image_frame_168.png", "ct_train_1001_image_frame_335.png", "ct_train_1003_image_frame_283.png", "ct_train_1016_image_frame_211.png", "ct_train_1011_image_frame_48.png", "ct_train_1003_image_frame_162.png", "ct_train_1014_image_frame_218.png", "ct_train_1020_image_frame_330.png", "ct_train_1011_image_frame_150.png", "ct_train_1011_image_frame_141.png", "ct_train_1014_image_frame_38.png", "ct_train_1014_image_frame_56.png", "ct_train_1011_image_frame_79.png", "ct_train_1014_image_frame_156.png", "ct_train_1015_image_frame_261.png", "ct_train_1003_image_frame_131.png", "ct_train_1009_image_frame_120.png", "ct_train_1005_image_frame_28.png", "ct_train_1005_image_frame_7.png", "ct_train_1001_image_frame_334.png", "ct_train_1005_image_frame_128.png", "ct_train_1009_image_frame_193.png", "ct_train_1016_image_frame_274.png"], "valid": ["ct_train_1007_image_frame_192.png", "ct_train_1007_image_frame_161.png", "ct_train_1013_image_frame_129.png", "ct_train_1007_image_frame_83.png", "ct_train_1004_image_frame_77.png", "ct_train_1004_image_frame_66.png", "ct_train_1013_image_frame_45.png", "ct_train_1013_image_frame_93.png", "ct_train_1013_image_frame_63.png", "ct_train_1013_image_frame_10.png", "ct_train_1007_image_frame_4.png", "ct_train_1013_image_frame_102.png", "ct_train_1013_image_frame_160.png", "ct_train_1007_image_frame_113.png", "ct_train_1013_image_frame_199.png", "ct_train_1013_image_frame_144.png", "ct_train_1013_image_frame_116.png", "ct_train_1004_image_frame_152.png", "ct_train_1004_image_frame_12.png", "ct_train_1004_image_frame_50.png", "ct_train_1004_image_frame_81.png", "ct_train_1007_image_frame_54.png", "ct_train_1007_image_frame_66.png", "ct_train_1007_image_frame_148.png", "ct_train_1013_image_frame_182.png", "ct_train_1013_image_frame_68.png", "ct_train_1004_image_frame_16.png", "ct_train_1013_image_frame_139.png", "ct_train_1004_image_frame_190.png", "ct_train_1007_image_frame_206.png", "ct_train_1013_image_frame_113.png", "ct_train_1007_image_frame_79.png", "ct_train_1004_image_frame_91.png", "ct_train_1007_image_frame_182.png", "ct_train_1013_image_frame_209.png", "ct_train_1004_image_frame_131.png", "ct_train_1013_image_frame_40.png", "ct_train_1007_image_frame_84.png", "ct_train_1004_image_frame_148.png", "ct_train_1007_image_frame_202.png", "ct_train_1007_image_frame_81.png", "ct_train_1013_image_frame_25.png", "ct_train_1007_image_frame_82.png", "ct_train_1007_image_frame_193.png", "ct_train_1013_image_frame_205.png", "ct_train_1007_image_frame_208.png", "ct_train_1004_image_frame_188.png", "ct_train_1013_image_frame_145.png", "ct_train_1007_image_frame_109.png", "ct_train_1013_image_frame_85.png", "ct_train_1007_image_frame_99.png", "ct_train_1004_image_frame_33.png", "ct_train_1007_image_frame_218.png", "ct_train_1013_image_frame_19.png", "ct_train_1013_image_frame_147.png", "ct_train_1007_image_frame_187.png", "ct_train_1007_image_frame_1.png", "ct_train_1004_image_frame_44.png", "ct_train_1004_image_frame_80.png", "ct_train_1013_image_frame_108.png", "ct_train_1013_image_frame_162.png", "ct_train_1007_image_frame_236.png", "ct_train_1013_image_frame_103.png", "ct_train_1013_image_frame_88.png", "ct_train_1007_image_frame_194.png", "ct_train_1013_image_frame_150.png", "ct_train_1007_image_frame_227.png", "ct_train_1013_image_frame_54.png", "ct_train_1013_image_frame_143.png", "ct_train_1004_image_frame_51.png", "ct_train_1013_image_frame_181.png", "ct_train_1004_image_frame_142.png", "ct_train_1007_image_frame_140.png", "ct_train_1007_image_frame_122.png", "ct_train_1007_image_frame_239.png", "ct_train_1013_image_frame_175.png", "ct_train_1007_image_frame_237.png", "ct_train_1013_image_frame_3.png", "ct_train_1004_image_frame_162.png", "ct_train_1004_image_frame_38.png", "ct_train_1013_image_frame_156.png", "ct_train_1004_image_frame_125.png", "ct_train_1007_image_frame_115.png", "ct_train_1013_image_frame_50.png", "ct_train_1013_image_frame_188.png", "ct_train_1007_image_frame_89.png", "ct_train_1013_image_frame_81.png", "ct_train_1013_image_frame_149.png", "ct_train_1007_image_frame_15.png", "ct_train_1004_image_frame_26.png", "ct_train_1004_image_frame_89.png", "ct_train_1013_image_frame_176.png", "ct_train_1004_image_frame_183.png", "ct_train_1004_image_frame_76.png", "ct_train_1013_image_frame_138.png", "ct_train_1013_image_frame_183.png", "ct_train_1013_image_frame_179.png", "ct_train_1007_image_frame_121.png", "ct_train_1007_image_frame_51.png", "ct_train_1004_image_frame_163.png", "ct_train_1013_image_frame_44.png", "ct_train_1007_image_frame_46.png", "ct_train_1004_image_frame_184.png", "ct_train_1004_image_frame_70.png", "ct_train_1007_image_frame_226.png", "ct_train_1013_image_frame_151.png", "ct_train_1007_image_frame_166.png", "ct_train_1013_image_frame_9.png", "ct_train_1004_image_frame_108.png", "ct_train_1007_image_frame_14.png", "ct_train_1007_image_frame_185.png", "ct_train_1007_image_frame_242.png", "ct_train_1004_image_frame_29.png", "ct_train_1007_image_frame_114.png", "ct_train_1007_image_frame_186.png", "ct_train_1004_image_frame_24.png", "ct_train_1004_image_frame_88.png", "ct_train_1013_image_frame_193.png", "ct_train_1007_image_frame_47.png", "ct_train_1007_image_frame_214.png", "ct_train_1007_image_frame_76.png", "ct_train_1004_image_frame_59.png", "ct_train_1013_image_frame_24.png", "ct_train_1004_image_frame_48.png", "ct_train_1007_image_frame_11.png", "ct_train_1004_image_frame_2.png", "ct_train_1013_image_frame_0.png", "ct_train_1007_image_frame_132.png", "ct_train_1013_image_frame_70.png", "ct_train_1013_image_frame_130.png", "ct_train_1007_image_frame_190.png", "ct_train_1007_image_frame_21.png", "ct_train_1004_image_frame_149.png", "ct_train_1007_image_frame_70.png", "ct_train_1007_image_frame_220.png", "ct_train_1013_image_frame_167.png", "ct_train_1004_image_frame_194.png", "ct_train_1004_image_frame_57.png", "ct_train_1004_image_frame_155.png", "ct_train_1004_image_frame_137.png", "ct_train_1004_image_frame_181.png", "ct_train_1007_image_frame_24.png", "ct_train_1004_image_frame_17.png", "ct_train_1007_image_frame_159.png", "ct_train_1013_image_frame_154.png", "ct_train_1004_image_frame_74.png", "ct_train_1013_image_frame_48.png", "ct_train_1007_image_frame_102.png", "ct_train_1013_image_frame_82.png", "ct_train_1013_image_frame_110.png", "ct_train_1004_image_frame_195.png", "ct_train_1013_image_frame_21.png", "ct_train_1007_image_frame_3.png", "ct_train_1007_image_frame_5.png", "ct_train_1004_image_frame_123.png", "ct_train_1004_image_frame_113.png", "ct_train_1004_image_frame_156.png", "ct_train_1007_image_frame_154.png", "ct_train_1013_image_frame_132.png", "ct_train_1004_image_frame_197.png", "ct_train_1004_image_frame_169.png", "ct_train_1013_image_frame_134.png", "ct_train_1004_image_frame_168.png", "ct_train_1004_image_frame_23.png", "ct_train_1004_image_frame_179.png", "ct_train_1007_image_frame_151.png", "ct_train_1007_image_frame_233.png", "ct_train_1013_image_frame_22.png", "ct_train_1007_image_frame_207.png", "ct_train_1004_image_frame_130.png", "ct_train_1007_image_frame_213.png", "ct_train_1004_image_frame_79.png", "ct_train_1013_image_frame_111.png", "ct_train_1004_image_frame_97.png", "ct_train_1013_image_frame_169.png", "ct_train_1007_image_frame_223.png", "ct_train_1004_image_frame_69.png", "ct_train_1013_image_frame_79.png", "ct_train_1013_image_frame_80.png", "ct_train_1007_image_frame_127.png", "ct_train_1004_image_frame_3.png", "ct_train_1007_image_frame_101.png", "ct_train_1013_image_frame_157.png", "ct_train_1013_image_frame_99.png", "ct_train_1013_image_frame_94.png", "ct_train_1013_image_frame_158.png", "ct_train_1007_image_frame_135.png", "ct_train_1007_image_frame_25.png", "ct_train_1007_image_frame_153.png", "ct_train_1004_image_frame_90.png", "ct_train_1004_image_frame_186.png", "ct_train_1004_image_frame_46.png", "ct_train_1004_image_frame_103.png", "ct_train_1007_image_frame_16.png", "ct_train_1004_image_frame_8.png", "ct_train_1013_image_frame_6.png", "ct_train_1007_image_frame_110.png", "ct_train_1007_image_frame_22.png", "ct_train_1007_image_frame_52.png", "ct_train_1007_image_frame_6.png", "ct_train_1013_image_frame_56.png", "ct_train_1004_image_frame_36.png", "ct_train_1004_image_frame_28.png", "ct_train_1007_image_frame_106.png", "ct_train_1004_image_frame_146.png", "ct_train_1013_image_frame_177.png", "ct_train_1007_image_frame_134.png", "ct_train_1007_image_frame_222.png", "ct_train_1013_image_frame_112.png", "ct_train_1013_image_frame_65.png", "ct_train_1013_image_frame_83.png", "ct_train_1013_image_frame_12.png", "ct_train_1013_image_frame_61.png", "ct_train_1013_image_frame_119.png", "ct_train_1004_image_frame_147.png", "ct_train_1013_image_frame_123.png", "ct_train_1004_image_frame_189.png", "ct_train_1004_image_frame_173.png", "ct_train_1007_image_frame_13.png", "ct_train_1007_image_frame_108.png", "ct_train_1013_image_frame_15.png", "ct_train_1007_image_frame_35.png", "ct_train_1007_image_frame_67.png", "ct_train_1013_image_frame_197.png", "ct_train_1007_image_frame_224.png", "ct_train_1007_image_frame_241.png", "ct_train_1007_image_frame_228.png", "ct_train_1004_image_frame_160.png", "ct_train_1013_image_frame_38.png", "ct_train_1007_image_frame_36.png", "ct_train_1007_image_frame_139.png", "ct_train_1004_image_frame_72.png", "ct_train_1007_image_frame_61.png", "ct_train_1013_image_frame_72.png", "ct_train_1007_image_frame_77.png", "ct_train_1013_image_frame_91.png", "ct_train_1013_image_frame_168.png", "ct_train_1004_image_frame_198.png", "ct_train_1004_image_frame_11.png", "ct_train_1007_image_frame_0.png", "ct_train_1004_image_frame_20.png", "ct_train_1004_image_frame_143.png", "ct_train_1013_image_frame_43.png", "ct_train_1004_image_frame_49.png", "ct_train_1013_image_frame_4.png", "ct_train_1004_image_frame_39.png", "ct_train_1007_image_frame_43.png", "ct_train_1004_image_frame_42.png", "ct_train_1013_image_frame_170.png", "ct_train_1007_image_frame_175.png", "ct_train_1007_image_frame_96.png", "ct_train_1013_image_frame_105.png", "ct_train_1007_image_frame_205.png", "ct_train_1013_image_frame_115.png", "ct_train_1007_image_frame_86.png", "ct_train_1013_image_frame_189.png", "ct_train_1007_image_frame_152.png", "ct_train_1013_image_frame_118.png", "ct_train_1013_image_frame_121.png", "ct_train_1013_image_frame_204.png", "ct_train_1004_image_frame_175.png", "ct_train_1004_image_frame_34.png", "ct_train_1007_image_frame_240.png", "ct_train_1007_image_frame_219.png", "ct_train_1004_image_frame_165.png", "ct_train_1013_image_frame_29.png", "ct_train_1013_image_frame_96.png", "ct_train_1013_image_frame_172.png", "ct_train_1013_image_frame_62.png", "ct_train_1004_image_frame_116.png", "ct_train_1007_image_frame_137.png", "ct_train_1007_image_frame_31.png", "ct_train_1007_image_frame_128.png", "ct_train_1004_image_frame_4.png", "ct_train_1013_image_frame_200.png", "ct_train_1004_image_frame_129.png", "ct_train_1007_image_frame_212.png", "ct_train_1007_image_frame_32.png", "ct_train_1007_image_frame_58.png", "ct_train_1004_image_frame_100.png", "ct_train_1013_image_frame_32.png", "ct_train_1013_image_frame_109.png", "ct_train_1004_image_frame_171.png", "ct_train_1007_image_frame_133.png", "ct_train_1004_image_frame_110.png", "ct_train_1007_image_frame_189.png", "ct_train_1013_image_frame_198.png", "ct_train_1013_image_frame_207.png", "ct_train_1013_image_frame_107.png", "ct_train_1013_image_frame_148.png", "ct_train_1004_image_frame_128.png", "ct_train_1007_image_frame_169.png", "ct_train_1013_image_frame_141.png", "ct_train_1004_image_frame_85.png", "ct_train_1007_image_frame_28.png", "ct_train_1007_image_frame_63.png", "ct_train_1004_image_frame_9.png", "ct_train_1013_image_frame_178.png", "ct_train_1007_image_frame_145.png", "ct_train_1004_image_frame_109.png", "ct_train_1004_image_frame_96.png", "ct_train_1007_image_frame_8.png", "ct_train_1007_image_frame_38.png", "ct_train_1004_image_frame_62.png", "ct_train_1013_image_frame_92.png", "ct_train_1004_image_frame_13.png", "ct_train_1004_image_frame_95.png", "ct_train_1013_image_frame_153.png", "ct_train_1007_image_frame_90.png", "ct_train_1004_image_frame_193.png", "ct_train_1013_image_frame_126.png", "ct_train_1013_image_frame_124.png", "ct_train_1007_image_frame_217.png", "ct_train_1007_image_frame_100.png", "ct_train_1013_image_frame_90.png", "ct_train_1007_image_frame_2.png", "ct_train_1004_image_frame_119.png", "ct_train_1004_image_frame_132.png", "ct_train_1007_image_frame_172.png", "ct_train_1007_image_frame_111.png", "ct_train_1007_image_frame_191.png", "ct_train_1007_image_frame_62.png", "ct_train_1004_image_frame_135.png", "ct_train_1007_image_frame_162.png", "ct_train_1007_image_frame_164.png", "ct_train_1004_image_frame_73.png", "ct_train_1004_image_frame_141.png", "ct_train_1013_image_frame_195.png", "ct_train_1004_image_frame_86.png", "ct_train_1007_image_frame_60.png", "ct_train_1013_image_frame_194.png", "ct_train_1013_image_frame_64.png", "ct_train_1013_image_frame_180.png", "ct_train_1004_image_frame_104.png", "ct_train_1013_image_frame_76.png", "ct_train_1004_image_frame_54.png", "ct_train_1004_image_frame_14.png", "ct_train_1007_image_frame_129.png", "ct_train_1007_image_frame_211.png", "ct_train_1004_image_frame_31.png", "ct_train_1007_image_frame_30.png", "ct_train_1013_image_frame_106.png", "ct_train_1007_image_frame_229.png", "ct_train_1007_image_frame_125.png", "ct_train_1004_image_frame_151.png", "ct_train_1013_image_frame_2.png", "ct_train_1007_image_frame_136.png", "ct_train_1004_image_frame_144.png", "ct_train_1007_image_frame_112.png", "ct_train_1004_image_frame_127.png", "ct_train_1007_image_frame_149.png", "ct_train_1013_image_frame_165.png", "ct_train_1013_image_frame_133.png", "ct_train_1013_image_frame_67.png", "ct_train_1013_image_frame_140.png", "ct_train_1007_image_frame_195.png", "ct_train_1007_image_frame_41.png", "ct_train_1004_image_frame_117.png", "ct_train_1007_image_frame_141.png", "ct_train_1004_image_frame_120.png", "ct_train_1007_image_frame_216.png", "ct_train_1004_image_frame_58.png", "ct_train_1004_image_frame_138.png", "ct_train_1004_image_frame_65.png", "ct_train_1013_image_frame_187.png", "ct_train_1004_image_frame_105.png", "ct_train_1004_image_frame_111.png", "ct_train_1013_image_frame_146.png", "ct_train_1007_image_frame_116.png", "ct_train_1004_image_frame_40.png", "ct_train_1007_image_frame_142.png", "ct_train_1004_image_frame_191.png", "ct_train_1004_image_frame_161.png", "ct_train_1007_image_frame_75.png", "ct_train_1013_image_frame_186.png", "ct_train_1007_image_frame_71.png", "ct_train_1013_image_frame_52.png", "ct_train_1007_image_frame_196.png", "ct_train_1007_image_frame_184.png", "ct_train_1004_image_frame_64.png", "ct_train_1004_image_frame_75.png", "ct_train_1007_image_frame_107.png", "ct_train_1007_image_frame_42.png", "ct_train_1013_image_frame_136.png", "ct_train_1007_image_frame_234.png", "ct_train_1004_image_frame_158.png", "ct_train_1004_image_frame_177.png", "ct_train_1007_image_frame_91.png", "ct_train_1007_image_frame_118.png", "ct_train_1007_image_frame_95.png", "ct_train_1004_image_frame_5.png", "ct_train_1004_image_frame_145.png", "ct_train_1007_image_frame_50.png", "ct_train_1007_image_frame_163.png", "ct_train_1013_image_frame_5.png", "ct_train_1004_image_frame_18.png", "ct_train_1004_image_frame_37.png", "ct_train_1007_image_frame_117.png", "ct_train_1007_image_frame_124.png", "ct_train_1013_image_frame_30.png", "ct_train_1013_image_frame_33.png", "ct_train_1013_image_frame_7.png", "ct_train_1007_image_frame_156.png", "ct_train_1004_image_frame_10.png", "ct_train_1007_image_frame_235.png", "ct_train_1007_image_frame_157.png", "ct_train_1007_image_frame_183.png", "ct_train_1004_image_frame_32.png", "ct_train_1004_image_frame_1.png", "ct_train_1004_image_frame_43.png", "ct_train_1007_image_frame_123.png", "ct_train_1004_image_frame_170.png", "ct_train_1004_image_frame_166.png", "ct_train_1007_image_frame_126.png", "ct_train_1007_image_frame_167.png", "ct_train_1007_image_frame_40.png", "ct_train_1013_image_frame_75.png", "ct_train_1007_image_frame_12.png", "ct_train_1004_image_frame_174.png", "ct_train_1013_image_frame_104.png", "ct_train_1004_image_frame_133.png", "ct_train_1013_image_frame_122.png", "ct_train_1007_image_frame_181.png", "ct_train_1004_image_frame_164.png", "ct_train_1007_image_frame_173.png", "ct_train_1007_image_frame_33.png", "ct_train_1007_image_frame_26.png", "ct_train_1007_image_frame_65.png", "ct_train_1004_image_frame_121.png", "ct_train_1013_image_frame_185.png", "ct_train_1007_image_frame_57.png", "ct_train_1013_image_frame_58.png", "ct_train_1013_image_frame_210.png", "ct_train_1007_image_frame_177.png", "ct_train_1004_image_frame_106.png", "ct_train_1004_image_frame_114.png", "ct_train_1013_image_frame_16.png", "ct_train_1007_image_frame_105.png", "ct_train_1013_image_frame_87.png", "ct_train_1013_image_frame_26.png", "ct_train_1007_image_frame_203.png", "ct_train_1007_image_frame_29.png", "ct_train_1013_image_frame_127.png", "ct_train_1007_image_frame_10.png", "ct_train_1013_image_frame_100.png", "ct_train_1013_image_frame_155.png", "ct_train_1004_image_frame_78.png", "ct_train_1004_image_frame_30.png", "ct_train_1004_image_frame_196.png", "ct_train_1013_image_frame_35.png", "ct_train_1013_image_frame_60.png", "ct_train_1007_image_frame_138.png", "ct_train_1013_image_frame_78.png", "ct_train_1007_image_frame_49.png", "ct_train_1007_image_frame_131.png", "ct_train_1004_image_frame_71.png", "ct_train_1013_image_frame_161.png", "ct_train_1004_image_frame_118.png", "ct_train_1007_image_frame_27.png", "ct_train_1013_image_frame_173.png", "ct_train_1004_image_frame_52.png", "ct_train_1007_image_frame_168.png", "ct_train_1004_image_frame_93.png", "ct_train_1007_image_frame_53.png", "ct_train_1007_image_frame_179.png", "ct_train_1007_image_frame_209.png", "ct_train_1013_image_frame_95.png", "ct_train_1013_image_frame_1.png", "ct_train_1004_image_frame_35.png", "ct_train_1013_image_frame_114.png", "ct_train_1013_image_frame_47.png", "ct_train_1004_image_frame_199.png", "ct_train_1007_image_frame_23.png", "ct_train_1007_image_frame_69.png", "ct_train_1013_image_frame_152.png", "ct_train_1013_image_frame_13.png", "ct_train_1013_image_frame_8.png", "ct_train_1004_image_frame_101.png", "ct_train_1013_image_frame_71.png", "ct_train_1013_image_frame_196.png", "ct_train_1007_image_frame_55.png", "ct_train_1004_image_frame_68.png", "ct_train_1007_image_frame_174.png", "ct_train_1007_image_frame_64.png", "ct_train_1007_image_frame_232.png", "ct_train_1007_image_frame_176.png", "ct_train_1013_image_frame_208.png", "ct_train_1013_image_frame_20.png", "ct_train_1004_image_frame_154.png", "ct_train_1013_image_frame_202.png", "ct_train_1004_image_frame_167.png", "ct_train_1004_image_frame_56.png", "ct_train_1007_image_frame_92.png", "ct_train_1007_image_frame_48.png", "ct_train_1004_image_frame_136.png", "ct_train_1013_image_frame_31.png", "ct_train_1007_image_frame_199.png", "ct_train_1007_image_frame_19.png", "ct_train_1013_image_frame_163.png", "ct_train_1007_image_frame_170.png", "ct_train_1007_image_frame_56.png", "ct_train_1004_image_frame_41.png", "ct_train_1007_image_frame_147.png", "ct_train_1004_image_frame_27.png", "ct_train_1007_image_frame_130.png", "ct_train_1004_image_frame_140.png", "ct_train_1013_image_frame_171.png", "ct_train_1007_image_frame_17.png", "ct_train_1013_image_frame_27.png", "ct_train_1007_image_frame_104.png", "ct_train_1013_image_frame_49.png", "ct_train_1004_image_frame_107.png", "ct_train_1013_image_frame_142.png", "ct_train_1007_image_frame_88.png", "ct_train_1007_image_frame_146.png", "ct_train_1004_image_frame_87.png", "ct_train_1007_image_frame_198.png", "ct_train_1004_image_frame_15.png", "ct_train_1013_image_frame_11.png", "ct_train_1004_image_frame_55.png", "ct_train_1007_image_frame_144.png", "ct_train_1004_image_frame_99.png", "ct_train_1013_image_frame_77.png", "ct_train_1007_image_frame_78.png", "ct_train_1004_image_frame_172.png", "ct_train_1013_image_frame_42.png", "ct_train_1013_image_frame_117.png", "ct_train_1004_image_frame_134.png", "ct_train_1007_image_frame_231.png", "ct_train_1007_image_frame_20.png", "ct_train_1013_image_frame_128.png", "ct_train_1013_image_frame_36.png", "ct_train_1007_image_frame_200.png", "ct_train_1007_image_frame_72.png", "ct_train_1013_image_frame_192.png", "ct_train_1007_image_frame_9.png", "ct_train_1013_image_frame_97.png", "ct_train_1013_image_frame_206.png", "ct_train_1013_image_frame_59.png", "ct_train_1013_image_frame_53.png", "ct_train_1007_image_frame_97.png", "ct_train_1004_image_frame_19.png", "ct_train_1004_image_frame_63.png", "ct_train_1013_image_frame_191.png", "ct_train_1007_image_frame_178.png", "ct_train_1007_image_frame_158.png", "ct_train_1007_image_frame_210.png", "ct_train_1007_image_frame_44.png", "ct_train_1013_image_frame_14.png", "ct_train_1007_image_frame_98.png", "ct_train_1013_image_frame_190.png", "ct_train_1013_image_frame_39.png", "ct_train_1013_image_frame_135.png", "ct_train_1013_image_frame_23.png", "ct_train_1007_image_frame_204.png", "ct_train_1013_image_frame_164.png", "ct_train_1007_image_frame_37.png", "ct_train_1004_image_frame_159.png", "ct_train_1004_image_frame_47.png", "ct_train_1007_image_frame_230.png", "ct_train_1013_image_frame_89.png", "ct_train_1007_image_frame_221.png", "ct_train_1007_image_frame_120.png", "ct_train_1004_image_frame_94.png", "ct_train_1004_image_frame_45.png", "ct_train_1004_image_frame_102.png", "ct_train_1007_image_frame_160.png", "ct_train_1007_image_frame_80.png", "ct_train_1004_image_frame_150.png", "ct_train_1013_image_frame_184.png", "ct_train_1004_image_frame_185.png", "ct_train_1007_image_frame_225.png", "ct_train_1004_image_frame_98.png", "ct_train_1007_image_frame_68.png", "ct_train_1004_image_frame_60.png", "ct_train_1007_image_frame_197.png", "ct_train_1013_image_frame_51.png", "ct_train_1004_image_frame_0.png", "ct_train_1013_image_frame_125.png", "ct_train_1013_image_frame_74.png", "ct_train_1007_image_frame_201.png", "ct_train_1007_image_frame_87.png", "ct_train_1007_image_frame_215.png", "ct_train_1004_image_frame_182.png", "ct_train_1004_image_frame_21.png", "ct_train_1004_image_frame_176.png", "ct_train_1013_image_frame_41.png", "ct_train_1013_image_frame_69.png", "ct_train_1007_image_frame_103.png", "ct_train_1007_image_frame_188.png", "ct_train_1004_image_frame_178.png", "ct_train_1013_image_frame_17.png", "ct_train_1004_image_frame_153.png", "ct_train_1007_image_frame_74.png", "ct_train_1013_image_frame_131.png", "ct_train_1004_image_frame_92.png", "ct_train_1007_image_frame_180.png", "ct_train_1004_image_frame_7.png", "ct_train_1007_image_frame_7.png", "ct_train_1004_image_frame_115.png", "ct_train_1004_image_frame_187.png", "ct_train_1013_image_frame_34.png", "ct_train_1013_image_frame_46.png", "ct_train_1007_image_frame_150.png", "ct_train_1007_image_frame_59.png", "ct_train_1013_image_frame_174.png", "ct_train_1007_image_frame_155.png", "ct_train_1004_image_frame_157.png", "ct_train_1013_image_frame_84.png", "ct_train_1004_image_frame_25.png", "ct_train_1013_image_frame_28.png", "ct_train_1004_image_frame_122.png", "ct_train_1004_image_frame_124.png", "ct_train_1013_image_frame_57.png", "ct_train_1013_image_frame_18.png", "ct_train_1004_image_frame_192.png", "ct_train_1013_image_frame_166.png", "ct_train_1007_image_frame_39.png", "ct_train_1013_image_frame_137.png", "ct_train_1013_image_frame_37.png", "ct_train_1013_image_frame_203.png", "ct_train_1007_image_frame_143.png", "ct_train_1004_image_frame_126.png", "ct_train_1004_image_frame_82.png", "ct_train_1007_image_frame_18.png", "ct_train_1004_image_frame_112.png", "ct_train_1007_image_frame_238.png", "ct_train_1013_image_frame_101.png", "ct_train_1013_image_frame_98.png", "ct_train_1007_image_frame_171.png", "ct_train_1013_image_frame_159.png", "ct_train_1007_image_frame_94.png", "ct_train_1004_image_frame_84.png", "ct_train_1013_image_frame_73.png", "ct_train_1004_image_frame_139.png", "ct_train_1004_image_frame_83.png", "ct_train_1004_image_frame_180.png", "ct_train_1007_image_frame_73.png", "ct_train_1007_image_frame_45.png", "ct_train_1007_image_frame_93.png", "ct_train_1013_image_frame_66.png", "ct_train_1007_image_frame_119.png", "ct_train_1004_image_frame_53.png", "ct_train_1007_image_frame_165.png", "ct_train_1013_image_frame_201.png", "ct_train_1013_image_frame_55.png", "ct_train_1013_image_frame_86.png", "ct_train_1004_image_frame_6.png", "ct_train_1013_image_frame_120.png", "ct_train_1007_image_frame_34.png", "ct_train_1004_image_frame_22.png", "ct_train_1004_image_frame_61.png", "ct_train_1004_image_frame_67.png", "ct_train_1007_image_frame_85.png"], "test": ["ct_train_1019_image_frame_29.png", "ct_train_1019_image_frame_257.png", "ct_train_1002_image_frame_218.png", "ct_train_1019_image_frame_52.png", "ct_train_1010_image_frame_151.png", "ct_train_1002_image_frame_173.png", "ct_train_1019_image_frame_220.png", "ct_train_1010_image_frame_142.png", "ct_train_1012_image_frame_106.png", "ct_train_1010_image_frame_74.png", "ct_train_1002_image_frame_75.png", "ct_train_1002_image_frame_167.png", "ct_train_1002_image_frame_80.png", "ct_train_1002_image_frame_219.png", "ct_train_1012_image_frame_5.png", "ct_train_1002_image_frame_132.png", "ct_train_1012_image_frame_154.png", "ct_train_1019_image_frame_112.png", "ct_train_1012_image_frame_59.png", "ct_train_1002_image_frame_28.png", "ct_train_1019_image_frame_206.png", "ct_train_1010_image_frame_202.png", "ct_train_1019_image_frame_225.png", "ct_train_1012_image_frame_73.png", "ct_train_1012_image_frame_77.png", "ct_train_1010_image_frame_29.png", "ct_train_1012_image_frame_149.png", "ct_train_1010_image_frame_175.png", "ct_train_1010_image_frame_87.png", "ct_train_1002_image_frame_184.png", "ct_train_1002_image_frame_194.png", "ct_train_1012_image_frame_56.png", "ct_train_1002_image_frame_116.png", "ct_train_1019_image_frame_262.png", "ct_train_1019_image_frame_97.png", "ct_train_1012_image_frame_139.png", "ct_train_1010_image_frame_225.png", "ct_train_1019_image_frame_234.png", "ct_train_1019_image_frame_296.png", "ct_train_1002_image_frame_85.png", "ct_train_1012_image_frame_1.png", "ct_train_1012_image_frame_96.png", "ct_train_1010_image_frame_208.png", "ct_train_1010_image_frame_129.png", "ct_train_1002_image_frame_24.png", "ct_train_1002_image_frame_86.png", "ct_train_1010_image_frame_266.png", "ct_train_1019_image_frame_7.png", "ct_train_1010_image_frame_124.png", "ct_train_1019_image_frame_293.png", "ct_train_1019_image_frame_173.png", "ct_train_1012_image_frame_159.png", "ct_train_1002_image_frame_32.png", "ct_train_1012_image_frame_166.png", "ct_train_1012_image_frame_175.png", "ct_train_1010_image_frame_38.png", "ct_train_1002_image_frame_44.png", "ct_train_1010_image_frame_27.png", "ct_train_1019_image_frame_148.png", "ct_train_1002_image_frame_208.png", "ct_train_1019_image_frame_75.png", "ct_train_1019_image_frame_196.png", "ct_train_1002_image_frame_40.png", "ct_train_1012_image_frame_140.png", "ct_train_1019_image_frame_26.png", "ct_train_1010_image_frame_139.png", "ct_train_1010_image_frame_176.png", "ct_train_1002_image_frame_49.png", "ct_train_1019_image_frame_247.png", "ct_train_1019_image_frame_226.png", "ct_train_1010_image_frame_190.png", "ct_train_1002_image_frame_238.png", "ct_train_1002_image_frame_178.png", "ct_train_1010_image_frame_56.png", "ct_train_1019_image_frame_142.png", "ct_train_1019_image_frame_114.png", "ct_train_1010_image_frame_95.png", "ct_train_1010_image_frame_133.png", "ct_train_1010_image_frame_98.png", "ct_train_1002_image_frame_152.png", "ct_train_1010_image_frame_42.png", "ct_train_1012_image_frame_74.png", "ct_train_1019_image_frame_132.png", "ct_train_1010_image_frame_221.png", "ct_train_1019_image_frame_238.png", "ct_train_1019_image_frame_241.png", "ct_train_1002_image_frame_61.png", "ct_train_1019_image_frame_33.png", "ct_train_1012_image_frame_2.png", "ct_train_1010_image_frame_163.png", "ct_train_1010_image_frame_160.png", "ct_train_1002_image_frame_220.png", "ct_train_1010_image_frame_168.png", "ct_train_1012_image_frame_124.png", "ct_train_1010_image_frame_35.png", "ct_train_1019_image_frame_113.png", "ct_train_1012_image_frame_53.png", "ct_train_1012_image_frame_27.png", "ct_train_1012_image_frame_16.png", "ct_train_1002_image_frame_21.png", "ct_train_1012_image_frame_138.png", "ct_train_1012_image_frame_0.png", "ct_train_1010_image_frame_228.png", "ct_train_1012_image_frame_115.png", "ct_train_1002_image_frame_165.png", "ct_train_1010_image_frame_114.png", "ct_train_1002_image_frame_84.png", "ct_train_1019_image_frame_245.png", "ct_train_1010_image_frame_136.png", "ct_train_1002_image_frame_52.png", "ct_train_1002_image_frame_229.png", "ct_train_1002_image_frame_109.png", "ct_train_1010_image_frame_192.png", "ct_train_1010_image_frame_30.png", "ct_train_1002_image_frame_98.png", "ct_train_1019_image_frame_198.png", "ct_train_1012_image_frame_161.png", "ct_train_1010_image_frame_59.png", "ct_train_1019_image_frame_174.png", "ct_train_1019_image_frame_90.png", "ct_train_1012_image_frame_64.png", "ct_train_1010_image_frame_72.png", "ct_train_1019_image_frame_109.png", "ct_train_1019_image_frame_100.png", "ct_train_1019_image_frame_64.png", "ct_train_1002_image_frame_104.png", "ct_train_1019_image_frame_2.png", "ct_train_1010_image_frame_140.png", "ct_train_1019_image_frame_94.png", "ct_train_1012_image_frame_60.png", "ct_train_1010_image_frame_201.png", "ct_train_1010_image_frame_215.png", "ct_train_1019_image_frame_9.png", "ct_train_1019_image_frame_268.png", "ct_train_1012_image_frame_57.png", "ct_train_1002_image_frame_170.png", "ct_train_1019_image_frame_118.png", "ct_train_1012_image_frame_70.png", "ct_train_1019_image_frame_275.png", "ct_train_1019_image_frame_18.png", "ct_train_1010_image_frame_53.png", "ct_train_1010_image_frame_75.png", "ct_train_1012_image_frame_137.png", "ct_train_1019_image_frame_235.png", "ct_train_1010_image_frame_177.png", "ct_train_1019_image_frame_145.png", "ct_train_1010_image_frame_210.png", "ct_train_1012_image_frame_52.png", "ct_train_1010_image_frame_198.png", "ct_train_1002_image_frame_62.png", "ct_train_1002_image_frame_159.png", "ct_train_1002_image_frame_72.png", "ct_train_1019_image_frame_119.png", "ct_train_1002_image_frame_144.png", "ct_train_1012_image_frame_83.png", "ct_train_1010_image_frame_99.png", "ct_train_1002_image_frame_25.png", "ct_train_1019_image_frame_254.png", "ct_train_1019_image_frame_215.png", "ct_train_1010_image_frame_134.png", "ct_train_1019_image_frame_164.png", "ct_train_1002_image_frame_30.png", "ct_train_1002_image_frame_94.png", "ct_train_1012_image_frame_112.png", "ct_train_1019_image_frame_185.png", "ct_train_1002_image_frame_168.png", "ct_train_1010_image_frame_265.png", "ct_train_1019_image_frame_144.png", "ct_train_1002_image_frame_115.png", "ct_train_1012_image_frame_26.png", "ct_train_1010_image_frame_8.png", "ct_train_1010_image_frame_222.png", "ct_train_1010_image_frame_66.png", "ct_train_1002_image_frame_23.png", "ct_train_1019_image_frame_68.png", "ct_train_1002_image_frame_162.png", "ct_train_1012_image_frame_94.png", "ct_train_1002_image_frame_12.png", "ct_train_1012_image_frame_66.png", "ct_train_1012_image_frame_86.png", "ct_train_1019_image_frame_76.png", "ct_train_1010_image_frame_238.png", "ct_train_1019_image_frame_195.png", "ct_train_1010_image_frame_9.png", "ct_train_1012_image_frame_80.png", "ct_train_1002_image_frame_64.png", "ct_train_1012_image_frame_131.png", "ct_train_1019_image_frame_267.png", "ct_train_1019_image_frame_172.png", "ct_train_1002_image_frame_133.png", "ct_train_1012_image_frame_116.png", "ct_train_1019_image_frame_17.png", "ct_train_1019_image_frame_199.png", "ct_train_1019_image_frame_186.png", "ct_train_1002_image_frame_14.png", "ct_train_1019_image_frame_279.png", "ct_train_1002_image_frame_225.png", "ct_train_1019_image_frame_8.png", "ct_train_1002_image_frame_223.png", "ct_train_1010_image_frame_46.png", "ct_train_1010_image_frame_157.png", "ct_train_1019_image_frame_283.png", "ct_train_1002_image_frame_205.png", "ct_train_1019_image_frame_129.png", "ct_train_1010_image_frame_164.png", "ct_train_1010_image_frame_251.png", "ct_train_1002_image_frame_146.png", "ct_train_1012_image_frame_38.png", "ct_train_1002_image_frame_163.png", "ct_train_1019_image_frame_42.png", "ct_train_1012_image_frame_117.png", "ct_train_1019_image_frame_44.png", "ct_train_1010_image_frame_197.png", "ct_train_1002_image_frame_78.png", "ct_train_1002_image_frame_233.png", "ct_train_1019_image_frame_207.png", "ct_train_1002_image_frame_180.png", "ct_train_1010_image_frame_0.png", "ct_train_1019_image_frame_150.png", "ct_train_1002_image_frame_156.png", "ct_train_1010_image_frame_245.png", "ct_train_1012_image_frame_93.png", "ct_train_1012_image_frame_67.png", "ct_train_1012_image_frame_32.png", "ct_train_1012_image_frame_44.png", "ct_train_1019_image_frame_38.png", "ct_train_1019_image_frame_204.png", "ct_train_1010_image_frame_101.png", "ct_train_1012_image_frame_23.png", "ct_train_1002_image_frame_71.png", "ct_train_1019_image_frame_5.png", "ct_train_1010_image_frame_23.png", "ct_train_1010_image_frame_128.png", "ct_train_1012_image_frame_170.png", "ct_train_1010_image_frame_43.png", "ct_train_1019_image_frame_81.png", "ct_train_1012_image_frame_99.png", "ct_train_1012_image_frame_168.png", "ct_train_1002_image_frame_150.png", "ct_train_1012_image_frame_34.png", "ct_train_1002_image_frame_183.png", "ct_train_1002_image_frame_185.png", "ct_train_1010_image_frame_256.png", "ct_train_1002_image_frame_188.png", "ct_train_1010_image_frame_16.png", "ct_train_1012_image_frame_176.png", "ct_train_1010_image_frame_267.png", "ct_train_1012_image_frame_147.png", "ct_train_1010_image_frame_273.png", "ct_train_1019_image_frame_83.png", "ct_train_1019_image_frame_149.png", "ct_train_1002_image_frame_187.png", "ct_train_1019_image_frame_187.png", "ct_train_1012_image_frame_113.png", "ct_train_1019_image_frame_278.png", "ct_train_1010_image_frame_191.png", "ct_train_1012_image_frame_110.png", "ct_train_1010_image_frame_250.png", "ct_train_1010_image_frame_106.png", "ct_train_1010_image_frame_15.png", "ct_train_1012_image_frame_119.png", "ct_train_1002_image_frame_142.png", "ct_train_1019_image_frame_179.png", "ct_train_1010_image_frame_186.png", "ct_train_1010_image_frame_5.png", "ct_train_1019_image_frame_111.png", "ct_train_1002_image_frame_65.png", "ct_train_1012_image_frame_173.png", "ct_train_1012_image_frame_145.png", "ct_train_1019_image_frame_63.png", "ct_train_1019_image_frame_79.png", "ct_train_1019_image_frame_155.png", "ct_train_1019_image_frame_180.png", "ct_train_1019_image_frame_23.png", "ct_train_1019_image_frame_128.png", "ct_train_1002_image_frame_31.png", "ct_train_1019_image_frame_181.png", "ct_train_1010_image_frame_204.png", "ct_train_1019_image_frame_290.png", "ct_train_1002_image_frame_47.png", "ct_train_1010_image_frame_169.png", "ct_train_1019_image_frame_34.png", "ct_train_1002_image_frame_121.png", "ct_train_1012_image_frame_50.png", "ct_train_1019_image_frame_280.png", "ct_train_1012_image_frame_92.png", "ct_train_1012_image_frame_19.png", "ct_train_1019_image_frame_239.png", "ct_train_1002_image_frame_221.png", "ct_train_1012_image_frame_127.png", "ct_train_1010_image_frame_52.png", "ct_train_1010_image_frame_183.png", "ct_train_1010_image_frame_167.png", "ct_train_1012_image_frame_167.png", "ct_train_1010_image_frame_37.png", "ct_train_1019_image_frame_89.png", "ct_train_1019_image_frame_116.png", "ct_train_1010_image_frame_179.png", "ct_train_1010_image_frame_148.png", "ct_train_1002_image_frame_81.png", "ct_train_1012_image_frame_82.png", "ct_train_1010_image_frame_156.png", "ct_train_1012_image_frame_12.png", "ct_train_1010_image_frame_152.png", "ct_train_1010_image_frame_119.png", "ct_train_1002_image_frame_35.png", "ct_train_1019_image_frame_138.png", "ct_train_1010_image_frame_203.png", "ct_train_1010_image_frame_207.png", "ct_train_1012_image_frame_125.png", "ct_train_1002_image_frame_22.png", "ct_train_1002_image_frame_124.png", "ct_train_1019_image_frame_121.png", "ct_train_1010_image_frame_150.png", "ct_train_1010_image_frame_48.png", "ct_train_1002_image_frame_87.png", "ct_train_1010_image_frame_264.png", "ct_train_1002_image_frame_228.png", "ct_train_1010_image_frame_1.png", "ct_train_1012_image_frame_65.png", "ct_train_1019_image_frame_115.png", "ct_train_1019_image_frame_190.png", "ct_train_1019_image_frame_261.png", "ct_train_1019_image_frame_45.png", "ct_train_1002_image_frame_201.png", "ct_train_1010_image_frame_257.png", "ct_train_1019_image_frame_265.png", "ct_train_1010_image_frame_86.png", "ct_train_1010_image_frame_247.png", "ct_train_1019_image_frame_27.png", "ct_train_1012_image_frame_85.png", "ct_train_1002_image_frame_143.png", "ct_train_1010_image_frame_49.png", "ct_train_1019_image_frame_205.png", "ct_train_1019_image_frame_134.png", "ct_train_1010_image_frame_211.png", "ct_train_1012_image_frame_3.png", "ct_train_1002_image_frame_125.png", "ct_train_1012_image_frame_152.png", "ct_train_1002_image_frame_195.png", "ct_train_1010_image_frame_212.png", "ct_train_1010_image_frame_234.png", "ct_train_1019_image_frame_270.png", "ct_train_1019_image_frame_39.png", "ct_train_1010_image_frame_82.png", "ct_train_1012_image_frame_142.png", "ct_train_1019_image_frame_240.png", "ct_train_1019_image_frame_175.png", "ct_train_1019_image_frame_11.png", "ct_train_1002_image_frame_60.png", "ct_train_1019_image_frame_295.png", "ct_train_1012_image_frame_62.png", "ct_train_1002_image_frame_126.png", "ct_train_1012_image_frame_172.png", "ct_train_1019_image_frame_12.png", "ct_train_1019_image_frame_217.png", "ct_train_1012_image_frame_88.png", "ct_train_1019_image_frame_288.png", "ct_train_1019_image_frame_151.png", "ct_train_1012_image_frame_151.png", "ct_train_1019_image_frame_210.png", "ct_train_1019_image_frame_264.png", "ct_train_1012_image_frame_14.png", "ct_train_1012_image_frame_91.png", "ct_train_1012_image_frame_69.png", "ct_train_1010_image_frame_135.png", "ct_train_1019_image_frame_106.png", "ct_train_1019_image_frame_135.png", "ct_train_1002_image_frame_11.png", "ct_train_1019_image_frame_287.png", "ct_train_1002_image_frame_174.png", "ct_train_1019_image_frame_21.png", "ct_train_1019_image_frame_221.png", "ct_train_1002_image_frame_153.png", "ct_train_1002_image_frame_10.png", "ct_train_1002_image_frame_193.png", "ct_train_1010_image_frame_263.png", "ct_train_1019_image_frame_182.png", "ct_train_1002_image_frame_36.png", "ct_train_1002_image_frame_191.png", "ct_train_1010_image_frame_199.png", "ct_train_1019_image_frame_13.png", "ct_train_1002_image_frame_131.png", "ct_train_1010_image_frame_236.png", "ct_train_1019_image_frame_91.png", "ct_train_1012_image_frame_4.png", "ct_train_1012_image_frame_76.png", "ct_train_1019_image_frame_41.png", "ct_train_1012_image_frame_45.png", "ct_train_1010_image_frame_83.png", "ct_train_1010_image_frame_220.png", "ct_train_1019_image_frame_277.png", "ct_train_1002_image_frame_9.png", "ct_train_1019_image_frame_184.png", "ct_train_1010_image_frame_145.png", "ct_train_1012_image_frame_101.png", "ct_train_1002_image_frame_177.png", "ct_train_1019_image_frame_93.png", "ct_train_1019_image_frame_242.png", "ct_train_1002_image_frame_157.png", "ct_train_1002_image_frame_207.png", "ct_train_1010_image_frame_253.png", "ct_train_1010_image_frame_102.png", "ct_train_1002_image_frame_92.png", "ct_train_1002_image_frame_1.png", "ct_train_1010_image_frame_248.png", "ct_train_1019_image_frame_127.png", "ct_train_1019_image_frame_120.png", "ct_train_1019_image_frame_233.png", "ct_train_1012_image_frame_61.png", "ct_train_1019_image_frame_117.png", "ct_train_1012_image_frame_109.png", "ct_train_1002_image_frame_136.png", "ct_train_1010_image_frame_78.png", "ct_train_1002_image_frame_196.png", "ct_train_1002_image_frame_189.png", "ct_train_1010_image_frame_161.png", "ct_train_1019_image_frame_123.png", "ct_train_1010_image_frame_113.png", "ct_train_1019_image_frame_130.png", "ct_train_1019_image_frame_85.png", "ct_train_1019_image_frame_271.png", "ct_train_1002_image_frame_38.png", "ct_train_1002_image_frame_151.png", "ct_train_1019_image_frame_178.png", "ct_train_1019_image_frame_230.png", "ct_train_1012_image_frame_31.png", "ct_train_1010_image_frame_223.png", "ct_train_1010_image_frame_11.png", "ct_train_1002_image_frame_227.png", "ct_train_1019_image_frame_10.png", "ct_train_1010_image_frame_20.png", "ct_train_1019_image_frame_92.png", "ct_train_1010_image_frame_268.png", "ct_train_1012_image_frame_81.png", "ct_train_1010_image_frame_241.png", "ct_train_1019_image_frame_0.png", "ct_train_1019_image_frame_49.png", "ct_train_1012_image_frame_35.png", "ct_train_1010_image_frame_165.png", "ct_train_1012_image_frame_132.png", "ct_train_1002_image_frame_110.png", "ct_train_1002_image_frame_123.png", "ct_train_1019_image_frame_269.png", "ct_train_1012_image_frame_39.png", "ct_train_1010_image_frame_209.png", "ct_train_1019_image_frame_141.png", "ct_train_1012_image_frame_47.png", "ct_train_1019_image_frame_65.png", "ct_train_1010_image_frame_249.png", "ct_train_1019_image_frame_165.png", "ct_train_1010_image_frame_76.png", "ct_train_1019_image_frame_70.png", "ct_train_1002_image_frame_74.png", "ct_train_1019_image_frame_227.png", "ct_train_1012_image_frame_51.png", "ct_train_1002_image_frame_186.png", "ct_train_1012_image_frame_49.png", "ct_train_1002_image_frame_198.png", "ct_train_1012_image_frame_105.png", "ct_train_1002_image_frame_203.png", "ct_train_1002_image_frame_175.png", "ct_train_1010_image_frame_258.png", "ct_train_1019_image_frame_218.png", "ct_train_1002_image_frame_172.png", "ct_train_1012_image_frame_133.png", "ct_train_1010_image_frame_125.png", "ct_train_1012_image_frame_46.png", "ct_train_1010_image_frame_171.png", "ct_train_1010_image_frame_41.png", "ct_train_1002_image_frame_204.png", "ct_train_1010_image_frame_254.png", "ct_train_1002_image_frame_4.png", "ct_train_1010_image_frame_121.png", "ct_train_1002_image_frame_63.png", "ct_train_1002_image_frame_176.png", "ct_train_1002_image_frame_119.png", "ct_train_1019_image_frame_284.png", "ct_train_1010_image_frame_34.png", "ct_train_1019_image_frame_152.png", "ct_train_1002_image_frame_105.png", "ct_train_1012_image_frame_148.png", "ct_train_1019_image_frame_6.png", "ct_train_1012_image_frame_123.png", "ct_train_1019_image_frame_136.png", "ct_train_1002_image_frame_6.png", "ct_train_1002_image_frame_134.png", "ct_train_1002_image_frame_154.png", "ct_train_1019_image_frame_243.png", "ct_train_1010_image_frame_240.png", "ct_train_1010_image_frame_92.png", "ct_train_1002_image_frame_232.png", "ct_train_1012_image_frame_28.png", "ct_train_1010_image_frame_54.png", "ct_train_1019_image_frame_55.png", "ct_train_1019_image_frame_222.png", "ct_train_1010_image_frame_271.png", "ct_train_1002_image_frame_0.png", "ct_train_1010_image_frame_205.png", "ct_train_1002_image_frame_107.png", "ct_train_1012_image_frame_136.png", "ct_train_1012_image_frame_130.png", "ct_train_1019_image_frame_98.png", "ct_train_1002_image_frame_182.png", "ct_train_1019_image_frame_122.png", "ct_train_1012_image_frame_10.png", "ct_train_1019_image_frame_102.png", "ct_train_1002_image_frame_90.png", "ct_train_1019_image_frame_105.png", "ct_train_1012_image_frame_37.png", "ct_train_1012_image_frame_174.png", "ct_train_1010_image_frame_237.png", "ct_train_1010_image_frame_6.png", "ct_train_1002_image_frame_149.png", "ct_train_1002_image_frame_111.png", "ct_train_1010_image_frame_219.png", "ct_train_1010_image_frame_230.png", "ct_train_1019_image_frame_294.png", "ct_train_1019_image_frame_202.png", "ct_train_1019_image_frame_224.png", "ct_train_1010_image_frame_79.png", "ct_train_1002_image_frame_54.png", "ct_train_1010_image_frame_118.png", "ct_train_1010_image_frame_178.png", "ct_train_1019_image_frame_59.png", "ct_train_1002_image_frame_5.png", "ct_train_1002_image_frame_100.png", "ct_train_1019_image_frame_191.png", "ct_train_1010_image_frame_84.png", "ct_train_1019_image_frame_255.png", "ct_train_1010_image_frame_4.png", "ct_train_1012_image_frame_63.png", "ct_train_1010_image_frame_159.png", "ct_train_1019_image_frame_200.png", "ct_train_1019_image_frame_51.png", "ct_train_1019_image_frame_74.png", "ct_train_1019_image_frame_69.png", "ct_train_1012_image_frame_171.png", "ct_train_1012_image_frame_20.png", "ct_train_1002_image_frame_69.png", "ct_train_1002_image_frame_230.png", "ct_train_1002_image_frame_181.png", "ct_train_1012_image_frame_156.png", "ct_train_1010_image_frame_24.png", "ct_train_1010_image_frame_80.png", "ct_train_1002_image_frame_214.png", "ct_train_1019_image_frame_54.png", "ct_train_1019_image_frame_163.png", "ct_train_1010_image_frame_155.png", "ct_train_1010_image_frame_63.png", "ct_train_1019_image_frame_263.png", "ct_train_1010_image_frame_58.png", "ct_train_1019_image_frame_259.png", "ct_train_1002_image_frame_7.png", "ct_train_1002_image_frame_33.png", "ct_train_1010_image_frame_10.png", "ct_train_1019_image_frame_244.png", "ct_train_1012_image_frame_165.png", "ct_train_1019_image_frame_60.png", "ct_train_1010_image_frame_94.png", "ct_train_1012_image_frame_55.png", "ct_train_1002_image_frame_67.png", "ct_train_1019_image_frame_161.png", "ct_train_1019_image_frame_87.png", "ct_train_1019_image_frame_47.png", "ct_train_1010_image_frame_3.png", "ct_train_1019_image_frame_77.png", "ct_train_1010_image_frame_244.png", "ct_train_1010_image_frame_123.png", "ct_train_1019_image_frame_232.png", "ct_train_1010_image_frame_200.png", "ct_train_1012_image_frame_146.png", "ct_train_1002_image_frame_27.png", "ct_train_1019_image_frame_153.png", "ct_train_1010_image_frame_31.png", "ct_train_1010_image_frame_120.png", "ct_train_1019_image_frame_285.png", "ct_train_1019_image_frame_126.png", "ct_train_1010_image_frame_131.png", "ct_train_1012_image_frame_72.png", "ct_train_1002_image_frame_209.png", "ct_train_1019_image_frame_143.png", "ct_train_1012_image_frame_24.png", "ct_train_1019_image_frame_266.png", "ct_train_1012_image_frame_122.png", "ct_train_1002_image_frame_118.png", "ct_train_1019_image_frame_214.png", "ct_train_1012_image_frame_98.png", "ct_train_1002_image_frame_190.png", "ct_train_1010_image_frame_262.png", "ct_train_1002_image_frame_20.png", "ct_train_1019_image_frame_139.png", "ct_train_1010_image_frame_195.png", "ct_train_1010_image_frame_60.png", "ct_train_1002_image_frame_46.png", "ct_train_1010_image_frame_36.png", "ct_train_1002_image_frame_200.png", "ct_train_1010_image_frame_50.png", "ct_train_1002_image_frame_224.png", "ct_train_1010_image_frame_91.png", "ct_train_1012_image_frame_135.png", "ct_train_1019_image_frame_66.png", "ct_train_1019_image_frame_62.png", "ct_train_1019_image_frame_31.png", "ct_train_1019_image_frame_189.png", "ct_train_1012_image_frame_78.png", "ct_train_1019_image_frame_168.png", "ct_train_1012_image_frame_111.png", "ct_train_1019_image_frame_25.png", "ct_train_1019_image_frame_236.png", "ct_train_1012_image_frame_108.png", "ct_train_1019_image_frame_258.png", "ct_train_1002_image_frame_216.png", "ct_train_1010_image_frame_182.png", "ct_train_1002_image_frame_51.png", "ct_train_1019_image_frame_197.png", "ct_train_1002_image_frame_147.png", "ct_train_1002_image_frame_57.png", "ct_train_1010_image_frame_85.png", "ct_train_1010_image_frame_21.png", "ct_train_1019_image_frame_108.png", "ct_train_1010_image_frame_103.png", "ct_train_1010_image_frame_187.png", "ct_train_1019_image_frame_57.png", "ct_train_1012_image_frame_103.png", "ct_train_1012_image_frame_128.png", "ct_train_1012_image_frame_126.png", "ct_train_1002_image_frame_148.png", "ct_train_1019_image_frame_229.png", "ct_train_1002_image_frame_68.png", "ct_train_1019_image_frame_273.png", "ct_train_1010_image_frame_67.png", "ct_train_1012_image_frame_153.png", "ct_train_1019_image_frame_86.png", "ct_train_1002_image_frame_83.png", "ct_train_1002_image_frame_73.png", "ct_train_1002_image_frame_43.png", "ct_train_1010_image_frame_40.png", "ct_train_1002_image_frame_169.png", "ct_train_1019_image_frame_1.png", "ct_train_1012_image_frame_58.png", "ct_train_1002_image_frame_66.png", "ct_train_1010_image_frame_172.png", "ct_train_1019_image_frame_158.png", "ct_train_1019_image_frame_160.png", "ct_train_1010_image_frame_259.png", "ct_train_1002_image_frame_103.png", "ct_train_1010_image_frame_73.png", "ct_train_1010_image_frame_206.png", "ct_train_1010_image_frame_252.png", "ct_train_1019_image_frame_252.png", "ct_train_1010_image_frame_235.png", "ct_train_1019_image_frame_193.png", "ct_train_1019_image_frame_37.png", "ct_train_1002_image_frame_213.png", "ct_train_1012_image_frame_144.png", "ct_train_1002_image_frame_192.png", "ct_train_1010_image_frame_89.png", "ct_train_1019_image_frame_253.png", "ct_train_1010_image_frame_17.png", "ct_train_1002_image_frame_39.png", "ct_train_1002_image_frame_128.png", "ct_train_1012_image_frame_9.png", "ct_train_1019_image_frame_103.png", "ct_train_1010_image_frame_196.png", "ct_train_1019_image_frame_147.png", "ct_train_1002_image_frame_88.png", "ct_train_1019_image_frame_20.png", "ct_train_1010_image_frame_181.png", "ct_train_1019_image_frame_157.png", "ct_train_1019_image_frame_95.png", "ct_train_1012_image_frame_29.png", "ct_train_1010_image_frame_22.png", "ct_train_1002_image_frame_29.png", "ct_train_1002_image_frame_108.png", "ct_train_1019_image_frame_282.png", "ct_train_1019_image_frame_177.png", "ct_train_1010_image_frame_81.png", "ct_train_1019_image_frame_53.png", "ct_train_1010_image_frame_174.png", "ct_train_1010_image_frame_7.png", "ct_train_1019_image_frame_46.png", "ct_train_1010_image_frame_141.png", "ct_train_1010_image_frame_132.png", "ct_train_1019_image_frame_125.png", "ct_train_1002_image_frame_48.png", "ct_train_1019_image_frame_43.png", "ct_train_1010_image_frame_77.png", "ct_train_1010_image_frame_104.png", "ct_train_1012_image_frame_22.png", "ct_train_1002_image_frame_226.png", "ct_train_1002_image_frame_210.png", "ct_train_1019_image_frame_291.png", "ct_train_1010_image_frame_100.png", "ct_train_1019_image_frame_30.png", "ct_train_1010_image_frame_107.png", "ct_train_1010_image_frame_184.png", "ct_train_1002_image_frame_217.png", "ct_train_1010_image_frame_44.png", "ct_train_1019_image_frame_251.png", "ct_train_1012_image_frame_40.png", "ct_train_1019_image_frame_276.png", "ct_train_1010_image_frame_39.png", "ct_train_1010_image_frame_272.png", "ct_train_1002_image_frame_179.png", "ct_train_1002_image_frame_59.png", "ct_train_1002_image_frame_99.png", "ct_train_1002_image_frame_106.png", "ct_train_1010_image_frame_243.png", "ct_train_1010_image_frame_115.png", "ct_train_1010_image_frame_2.png", "ct_train_1010_image_frame_137.png", "ct_train_1002_image_frame_58.png", "ct_train_1019_image_frame_73.png", "ct_train_1019_image_frame_14.png", "ct_train_1002_image_frame_45.png", "ct_train_1002_image_frame_3.png", "ct_train_1002_image_frame_56.png", "ct_train_1002_image_frame_41.png", "ct_train_1012_image_frame_71.png", "ct_train_1010_image_frame_229.png", "ct_train_1002_image_frame_141.png", "ct_train_1010_image_frame_239.png", "ct_train_1019_image_frame_170.png", "ct_train_1012_image_frame_107.png", "ct_train_1019_image_frame_4.png", "ct_train_1012_image_frame_42.png", "ct_train_1019_image_frame_56.png", "ct_train_1002_image_frame_82.png", "ct_train_1010_image_frame_224.png", "ct_train_1019_image_frame_213.png", "ct_train_1012_image_frame_75.png", "ct_train_1019_image_frame_58.png", "ct_train_1010_image_frame_232.png", "ct_train_1010_image_frame_227.png", "ct_train_1012_image_frame_169.png", "ct_train_1019_image_frame_15.png", "ct_train_1012_image_frame_95.png", "ct_train_1010_image_frame_70.png", "ct_train_1002_image_frame_13.png", "ct_train_1012_image_frame_157.png", "ct_train_1019_image_frame_159.png", "ct_train_1002_image_frame_158.png", "ct_train_1002_image_frame_138.png", "ct_train_1002_image_frame_129.png", "ct_train_1002_image_frame_19.png", "ct_train_1002_image_frame_235.png", "ct_train_1010_image_frame_55.png", "ct_train_1019_image_frame_107.png", "ct_train_1002_image_frame_122.png", "ct_train_1019_image_frame_250.png", "ct_train_1010_image_frame_28.png", "ct_train_1012_image_frame_30.png", "ct_train_1010_image_frame_143.png", "ct_train_1010_image_frame_217.png", "ct_train_1002_image_frame_96.png", "ct_train_1010_image_frame_214.png", "ct_train_1010_image_frame_122.png", "ct_train_1002_image_frame_113.png", "ct_train_1010_image_frame_108.png", "ct_train_1010_image_frame_233.png", "ct_train_1019_image_frame_80.png", "ct_train_1019_image_frame_28.png", "ct_train_1002_image_frame_79.png", "ct_train_1002_image_frame_93.png", "ct_train_1019_image_frame_101.png", "ct_train_1002_image_frame_95.png", "ct_train_1002_image_frame_135.png", "ct_train_1002_image_frame_77.png", "ct_train_1012_image_frame_155.png", "ct_train_1012_image_frame_89.png", "ct_train_1010_image_frame_51.png", "ct_train_1019_image_frame_166.png", "ct_train_1002_image_frame_215.png", "ct_train_1010_image_frame_12.png", "ct_train_1019_image_frame_211.png", "ct_train_1012_image_frame_54.png", "ct_train_1019_image_frame_131.png", "ct_train_1019_image_frame_169.png", "ct_train_1019_image_frame_61.png", "ct_train_1010_image_frame_162.png", "ct_train_1010_image_frame_255.png", "ct_train_1010_image_frame_269.png", "ct_train_1019_image_frame_176.png", "ct_train_1002_image_frame_2.png", "ct_train_1002_image_frame_114.png", "ct_train_1002_image_frame_34.png", "ct_train_1010_image_frame_216.png", "ct_train_1002_image_frame_50.png", "ct_train_1019_image_frame_201.png", "ct_train_1002_image_frame_117.png", "ct_train_1002_image_frame_222.png", "ct_train_1019_image_frame_209.png", "ct_train_1019_image_frame_22.png", "ct_train_1002_image_frame_202.png", "ct_train_1019_image_frame_183.png", "ct_train_1010_image_frame_170.png", "ct_train_1012_image_frame_21.png", "ct_train_1019_image_frame_167.png", "ct_train_1010_image_frame_57.png", "ct_train_1002_image_frame_145.png", "ct_train_1012_image_frame_33.png", "ct_train_1010_image_frame_130.png", "ct_train_1019_image_frame_171.png", "ct_train_1010_image_frame_112.png", "ct_train_1010_image_frame_147.png", "ct_train_1002_image_frame_211.png", "ct_train_1010_image_frame_180.png", "ct_train_1019_image_frame_24.png", "ct_train_1019_image_frame_256.png", "ct_train_1002_image_frame_212.png", "ct_train_1010_image_frame_146.png", "ct_train_1002_image_frame_112.png", "ct_train_1010_image_frame_45.png", "ct_train_1002_image_frame_70.png", "ct_train_1019_image_frame_96.png", "ct_train_1012_image_frame_90.png", "ct_train_1010_image_frame_33.png", "ct_train_1019_image_frame_84.png", "ct_train_1002_image_frame_140.png", "ct_train_1019_image_frame_32.png", "ct_train_1010_image_frame_116.png", "ct_train_1012_image_frame_163.png", "ct_train_1019_image_frame_286.png", "ct_train_1002_image_frame_197.png", "ct_train_1012_image_frame_17.png", "ct_train_1012_image_frame_87.png", "ct_train_1012_image_frame_97.png", "ct_train_1019_image_frame_133.png", "ct_train_1010_image_frame_61.png", "ct_train_1002_image_frame_236.png", "ct_train_1010_image_frame_127.png", "ct_train_1002_image_frame_42.png", "ct_train_1012_image_frame_18.png", "ct_train_1002_image_frame_120.png", "ct_train_1019_image_frame_124.png", "ct_train_1019_image_frame_192.png", "ct_train_1012_image_frame_7.png", "ct_train_1012_image_frame_121.png", "ct_train_1010_image_frame_71.png", "ct_train_1002_image_frame_171.png", "ct_train_1002_image_frame_76.png", "ct_train_1010_image_frame_14.png", "ct_train_1019_image_frame_248.png", "ct_train_1010_image_frame_188.png", "ct_train_1010_image_frame_261.png", "ct_train_1010_image_frame_153.png", "ct_train_1019_image_frame_3.png", "ct_train_1019_image_frame_78.png", "ct_train_1012_image_frame_79.png", "ct_train_1012_image_frame_129.png", "ct_train_1010_image_frame_65.png", "ct_train_1010_image_frame_19.png", "ct_train_1010_image_frame_96.png", "ct_train_1012_image_frame_68.png", "ct_train_1012_image_frame_36.png", "ct_train_1012_image_frame_141.png", "ct_train_1002_image_frame_160.png", "ct_train_1002_image_frame_55.png", "ct_train_1010_image_frame_109.png", "ct_train_1002_image_frame_37.png", "ct_train_1010_image_frame_213.png", "ct_train_1002_image_frame_102.png", "ct_train_1012_image_frame_25.png", "ct_train_1019_image_frame_16.png", "ct_train_1019_image_frame_36.png", "ct_train_1012_image_frame_134.png", "ct_train_1019_image_frame_50.png", "ct_train_1002_image_frame_91.png", "ct_train_1012_image_frame_162.png", "ct_train_1019_image_frame_67.png", "ct_train_1010_image_frame_69.png", "ct_train_1012_image_frame_150.png", "ct_train_1012_image_frame_114.png", "ct_train_1019_image_frame_104.png", "ct_train_1010_image_frame_105.png", "ct_train_1019_image_frame_208.png", "ct_train_1019_image_frame_281.png", "ct_train_1002_image_frame_206.png", "ct_train_1010_image_frame_126.png", "ct_train_1002_image_frame_137.png", "ct_train_1019_image_frame_19.png", "ct_train_1002_image_frame_8.png", "ct_train_1010_image_frame_242.png", "ct_train_1019_image_frame_88.png", "ct_train_1019_image_frame_140.png", "ct_train_1002_image_frame_234.png", "ct_train_1010_image_frame_47.png", "ct_train_1010_image_frame_32.png", "ct_train_1019_image_frame_40.png", "ct_train_1002_image_frame_89.png", "ct_train_1019_image_frame_154.png", "ct_train_1010_image_frame_110.png", "ct_train_1010_image_frame_18.png", "ct_train_1010_image_frame_88.png", "ct_train_1010_image_frame_154.png", "ct_train_1010_image_frame_231.png", "ct_train_1019_image_frame_228.png", "ct_train_1019_image_frame_272.png", "ct_train_1002_image_frame_161.png", "ct_train_1010_image_frame_25.png", "ct_train_1019_image_frame_82.png", "ct_train_1010_image_frame_158.png", "ct_train_1019_image_frame_289.png", "ct_train_1002_image_frame_16.png", "ct_train_1010_image_frame_138.png", "ct_train_1010_image_frame_173.png", "ct_train_1019_image_frame_99.png", "ct_train_1002_image_frame_237.png", "ct_train_1012_image_frame_158.png", "ct_train_1002_image_frame_155.png", "ct_train_1012_image_frame_13.png", "ct_train_1010_image_frame_144.png", "ct_train_1012_image_frame_104.png", "ct_train_1010_image_frame_226.png", "ct_train_1002_image_frame_18.png", "ct_train_1010_image_frame_218.png", "ct_train_1019_image_frame_219.png", "ct_train_1002_image_frame_231.png", "ct_train_1012_image_frame_15.png", "ct_train_1010_image_frame_93.png", "ct_train_1019_image_frame_137.png", "ct_train_1019_image_frame_188.png", "ct_train_1002_image_frame_130.png", "ct_train_1019_image_frame_72.png", "ct_train_1019_image_frame_223.png", "ct_train_1012_image_frame_8.png", "ct_train_1002_image_frame_53.png", "ct_train_1010_image_frame_149.png", "ct_train_1012_image_frame_100.png", "ct_train_1002_image_frame_199.png", "ct_train_1002_image_frame_164.png", "ct_train_1002_image_frame_139.png", "ct_train_1010_image_frame_270.png", "ct_train_1019_image_frame_216.png", "ct_train_1002_image_frame_26.png", "ct_train_1012_image_frame_11.png", "ct_train_1012_image_frame_102.png", "ct_train_1019_image_frame_292.png", "ct_train_1019_image_frame_146.png", "ct_train_1010_image_frame_246.png", "ct_train_1010_image_frame_26.png", "ct_train_1019_image_frame_249.png", "ct_train_1019_image_frame_48.png", "ct_train_1010_image_frame_90.png", "ct_train_1019_image_frame_194.png", "ct_train_1002_image_frame_15.png", "ct_train_1012_image_frame_164.png", "ct_train_1012_image_frame_6.png", "ct_train_1002_image_frame_97.png", "ct_train_1010_image_frame_13.png", "ct_train_1002_image_frame_101.png", "ct_train_1019_image_frame_71.png", "ct_train_1019_image_frame_35.png", "ct_train_1019_image_frame_246.png", "ct_train_1010_image_frame_62.png", "ct_train_1010_image_frame_97.png", "ct_train_1002_image_frame_166.png", "ct_train_1010_image_frame_166.png", "ct_train_1002_image_frame_127.png", "ct_train_1019_image_frame_212.png", "ct_train_1010_image_frame_189.png", "ct_train_1019_image_frame_260.png", "ct_train_1010_image_frame_194.png", "ct_train_1012_image_frame_160.png", "ct_train_1010_image_frame_185.png", "ct_train_1012_image_frame_120.png", "ct_train_1019_image_frame_156.png", "ct_train_1012_image_frame_48.png", "ct_train_1012_image_frame_41.png", "ct_train_1019_image_frame_203.png", "ct_train_1012_image_frame_143.png", "ct_train_1002_image_frame_17.png", "ct_train_1019_image_frame_274.png", "ct_train_1019_image_frame_237.png", "ct_train_1010_image_frame_68.png", "ct_train_1010_image_frame_117.png", "ct_train_1019_image_frame_231.png", "ct_train_1012_image_frame_84.png", "ct_train_1019_image_frame_110.png", "ct_train_1010_image_frame_193.png", "ct_train_1019_image_frame_162.png", "ct_train_1010_image_frame_111.png", "ct_train_1012_image_frame_118.png", "ct_train_1010_image_frame_64.png", "ct_train_1010_image_frame_260.png", "ct_train_1012_image_frame_43.png"], "train_labels": ["ct_train_1008_label_frame_178.png", "ct_train_1018_label_frame_5.png", "ct_train_1008_label_frame_69.png", "ct_train_1001_label_frame_122.png", "ct_train_1003_label_frame_186.png", "ct_train_1001_label_frame_124.png", "ct_train_1014_label_frame_61.png", "ct_train_1015_label_frame_195.png", "ct_train_1003_label_frame_95.png", "ct_train_1011_label_frame_235.png", "ct_train_1011_label_frame_29.png", "ct_train_1015_label_frame_229.png", "ct_train_1015_label_frame_158.png", "ct_train_1018_label_frame_90.png", "ct_train_1011_label_frame_99.png", "ct_train_1014_label_frame_191.png", "ct_train_1001_label_frame_69.png", "ct_train_1016_label_frame_40.png", "ct_train_1001_label_frame_192.png", "ct_train_1008_label_frame_73.png", "ct_train_1015_label_frame_162.png", "ct_train_1003_label_frame_164.png", "ct_train_1016_label_frame_26.png", "ct_train_1018_label_frame_86.png", "ct_train_1016_label_frame_310.png", "ct_train_1006_label_frame_237.png", "ct_train_1020_label_frame_314.png", "ct_train_1017_label_frame_223.png", "ct_train_1015_label_frame_214.png", "ct_train_1015_label_frame_12.png", "ct_train_1018_label_frame_185.png", "ct_train_1006_label_frame_227.png", "ct_train_1005_label_frame_176.png", "ct_train_1011_label_frame_44.png", "ct_train_1006_label_frame_128.png", "ct_train_1018_label_frame_2.png", "ct_train_1009_label_frame_18.png", "ct_train_1020_label_frame_232.png", "ct_train_1003_label_frame_218.png", "ct_train_1001_label_frame_161.png", "ct_train_1009_label_frame_7.png", "ct_train_1011_label_frame_51.png", "ct_train_1014_label_frame_64.png", "ct_train_1003_label_frame_284.png", "ct_train_1008_label_frame_47.png", "ct_train_1016_label_frame_236.png", "ct_train_1018_label_frame_91.png", "ct_train_1020_label_frame_334.png", "ct_train_1016_label_frame_282.png", "ct_train_1020_label_frame_263.png", "ct_train_1018_label_frame_124.png", "ct_train_1003_label_frame_135.png", "ct_train_1005_label_frame_73.png", "ct_train_1003_label_frame_131.png", "ct_train_1016_label_frame_0.png", "ct_train_1006_label_frame_90.png", "ct_train_1009_label_frame_224.png", "ct_train_1008_label_frame_115.png", "ct_train_1001_label_frame_82.png", "ct_train_1008_label_frame_38.png", "ct_train_1014_label_frame_5.png", "ct_train_1011_label_frame_174.png", "ct_train_1001_label_frame_264.png", "ct_train_1017_label_frame_145.png", "ct_train_1020_label_frame_155.png", "ct_train_1018_label_frame_56.png", "ct_train_1020_label_frame_359.png", "ct_train_1015_label_frame_42.png", "ct_train_1001_label_frame_98.png", "ct_train_1016_label_frame_41.png", "ct_train_1011_label_frame_205.png", "ct_train_1014_label_frame_171.png", "ct_train_1008_label_frame_199.png", "ct_train_1016_label_frame_102.png", "ct_train_1020_label_frame_42.png", "ct_train_1020_label_frame_247.png", "ct_train_1006_label_frame_214.png", "ct_train_1011_label_frame_21.png", "ct_train_1009_label_frame_285.png", "ct_train_1017_label_frame_235.png", "ct_train_1015_label_frame_108.png", "ct_train_1014_label_frame_176.png", "ct_train_1001_label_frame_94.png", "ct_train_1008_label_frame_133.png", "ct_train_1008_label_frame_114.png", "ct_train_1003_label_frame_237.png", "ct_train_1008_label_frame_175.png", "ct_train_1006_label_frame_72.png", "ct_train_1017_label_frame_282.png", "ct_train_1020_label_frame_266.png", "ct_train_1016_label_frame_270.png", "ct_train_1016_label_frame_332.png", "ct_train_1014_label_frame_124.png", "ct_train_1014_label_frame_284.png", "ct_train_1006_label_frame_162.png", "ct_train_1014_label_frame_270.png", "ct_train_1018_label_frame_55.png", "ct_train_1016_label_frame_89.png", "ct_train_1020_label_frame_18.png", "ct_train_1016_label_frame_186.png", "ct_train_1020_label_frame_63.png", "ct_train_1017_label_frame_179.png", "ct_train_1018_label_frame_72.png", "ct_train_1016_label_frame_114.png", "ct_train_1014_label_frame_121.png", "ct_train_1015_label_frame_65.png", "ct_train_1017_label_frame_61.png", "ct_train_1001_label_frame_269.png", "ct_train_1001_label_frame_171.png", "ct_train_1011_label_frame_28.png", "ct_train_1001_label_frame_86.png", "ct_train_1020_label_frame_174.png", "ct_train_1005_label_frame_11.png", "ct_train_1018_label_frame_184.png", "ct_train_1017_label_frame_186.png", "ct_train_1014_label_frame_20.png", "ct_train_1008_label_frame_75.png", "ct_train_1015_label_frame_169.png", "ct_train_1020_label_frame_54.png", "ct_train_1015_label_frame_238.png", "ct_train_1014_label_frame_226.png", "ct_train_1011_label_frame_217.png", "ct_train_1009_label_frame_202.png", "ct_train_1011_label_frame_82.png", "ct_train_1006_label_frame_131.png", "ct_train_1018_label_frame_186.png", "ct_train_1001_label_frame_158.png", "ct_train_1020_label_frame_103.png", "ct_train_1015_label_frame_283.png", "ct_train_1018_label_frame_89.png", "ct_train_1015_label_frame_3.png", "ct_train_1003_label_frame_126.png", "ct_train_1020_label_frame_251.png", "ct_train_1001_label_frame_355.png", "ct_train_1003_label_frame_143.png", "ct_train_1001_label_frame_209.png", "ct_train_1006_label_frame_84.png", "ct_train_1020_label_frame_191.png", "ct_train_1014_label_frame_56.png", "ct_train_1015_label_frame_174.png", "ct_train_1018_label_frame_37.png", "ct_train_1001_label_frame_172.png", "ct_train_1005_label_frame_42.png", "ct_train_1014_label_frame_83.png", "ct_train_1006_label_frame_153.png", "ct_train_1018_label_frame_12.png", "ct_train_1015_label_frame_244.png", "ct_train_1017_label_frame_236.png", "ct_train_1020_label_frame_157.png", "ct_train_1009_label_frame_178.png", "ct_train_1003_label_frame_29.png", "ct_train_1017_label_frame_14.png", "ct_train_1016_label_frame_85.png", "ct_train_1003_label_frame_116.png", "ct_train_1020_label_frame_101.png", "ct_train_1016_label_frame_63.png", "ct_train_1006_label_frame_241.png", "ct_train_1014_label_frame_183.png", "ct_train_1016_label_frame_158.png", "ct_train_1018_label_frame_33.png", "ct_train_1020_label_frame_108.png", "ct_train_1014_label_frame_161.png", "ct_train_1014_label_frame_93.png", "ct_train_1015_label_frame_266.png", "ct_train_1017_label_frame_161.png", "ct_train_1006_label_frame_230.png", "ct_train_1017_label_frame_187.png", "ct_train_1011_label_frame_46.png", "ct_train_1009_label_frame_12.png", "ct_train_1005_label_frame_65.png", "ct_train_1003_label_frame_47.png", "ct_train_1014_label_frame_282.png", "ct_train_1008_label_frame_64.png", "ct_train_1006_label_frame_197.png", "ct_train_1006_label_frame_146.png", "ct_train_1016_label_frame_327.png", "ct_train_1005_label_frame_129.png", "ct_train_1001_label_frame_48.png", "ct_train_1003_label_frame_269.png", "ct_train_1014_label_frame_43.png", "ct_train_1014_label_frame_324.png", "ct_train_1020_label_frame_84.png", "ct_train_1011_label_frame_149.png", "ct_train_1005_label_frame_110.png", "ct_train_1003_label_frame_200.png", "ct_train_1003_label_frame_58.png", "ct_train_1001_label_frame_6.png", "ct_train_1003_label_frame_44.png", "ct_train_1015_label_frame_92.png", "ct_train_1001_label_frame_157.png", "ct_train_1005_label_frame_107.png", "ct_train_1014_label_frame_136.png", "ct_train_1020_label_frame_351.png", "ct_train_1009_label_frame_270.png", "ct_train_1006_label_frame_18.png", "ct_train_1011_label_frame_223.png", "ct_train_1017_label_frame_252.png", "ct_train_1005_label_frame_136.png", "ct_train_1003_label_frame_161.png", "ct_train_1016_label_frame_174.png", "ct_train_1014_label_frame_12.png", "ct_train_1018_label_frame_161.png", "ct_train_1020_label_frame_111.png", "ct_train_1006_label_frame_73.png", "ct_train_1006_label_frame_210.png", "ct_train_1017_label_frame_255.png", "ct_train_1014_label_frame_175.png", "ct_train_1014_label_frame_118.png", "ct_train_1008_label_frame_210.png", "ct_train_1014_label_frame_225.png", "ct_train_1009_label_frame_173.png", "ct_train_1014_label_frame_204.png", "ct_train_1003_label_frame_109.png", "ct_train_1016_label_frame_197.png", "ct_train_1003_label_frame_35.png", "ct_train_1011_label_frame_236.png", "ct_train_1005_label_frame_5.png", "ct_train_1017_label_frame_166.png", "ct_train_1001_label_frame_67.png", "ct_train_1015_label_frame_21.png", "ct_train_1017_label_frame_40.png", "ct_train_1008_label_frame_87.png", "ct_train_1011_label_frame_151.png", "ct_train_1006_label_frame_65.png", "ct_train_1020_label_frame_64.png", "ct_train_1003_label_frame_91.png", "ct_train_1005_label_frame_115.png", "ct_train_1015_label_frame_211.png", "ct_train_1008_label_frame_61.png", "ct_train_1016_label_frame_146.png", "ct_train_1008_label_frame_19.png", "ct_train_1001_label_frame_62.png", "ct_train_1009_label_frame_169.png", "ct_train_1017_label_frame_117.png", "ct_train_1011_label_frame_40.png", "ct_train_1014_label_frame_156.png", "ct_train_1017_label_frame_78.png", "ct_train_1020_label_frame_332.png", "ct_train_1009_label_frame_113.png", "ct_train_1001_label_frame_151.png", "ct_train_1001_label_frame_195.png", "ct_train_1020_label_frame_126.png", "ct_train_1014_label_frame_76.png", "ct_train_1014_label_frame_207.png", "ct_train_1015_label_frame_13.png", "ct_train_1001_label_frame_245.png", "ct_train_1017_label_frame_165.png", "ct_train_1015_label_frame_72.png", "ct_train_1015_label_frame_100.png", "ct_train_1017_label_frame_105.png", "ct_train_1009_label_frame_108.png", "ct_train_1014_label_frame_78.png", "ct_train_1005_label_frame_105.png", "ct_train_1020_label_frame_292.png", "ct_train_1003_label_frame_282.png", "ct_train_1001_label_frame_13.png", "ct_train_1008_label_frame_129.png", "ct_train_1015_label_frame_243.png", "ct_train_1014_label_frame_15.png", "ct_train_1009_label_frame_46.png", "ct_train_1006_label_frame_66.png", "ct_train_1009_label_frame_184.png", "ct_train_1020_label_frame_107.png", "ct_train_1015_label_frame_137.png", "ct_train_1017_label_frame_132.png", "ct_train_1001_label_frame_47.png", "ct_train_1016_label_frame_162.png", "ct_train_1003_label_frame_89.png", "ct_train_1011_label_frame_20.png", "ct_train_1003_label_frame_223.png", "ct_train_1001_label_frame_197.png", "ct_train_1020_label_frame_32.png", "ct_train_1009_label_frame_167.png", "ct_train_1001_label_frame_249.png", "ct_train_1006_label_frame_242.png", "ct_train_1003_label_frame_265.png", "ct_train_1003_label_frame_16.png", "ct_train_1009_label_frame_225.png", "ct_train_1014_label_frame_219.png", "ct_train_1001_label_frame_54.png", "ct_train_1014_label_frame_296.png", "ct_train_1020_label_frame_274.png", "ct_train_1006_label_frame_5.png", "ct_train_1001_label_frame_57.png", "ct_train_1001_label_frame_119.png", "ct_train_1018_label_frame_118.png", "ct_train_1020_label_frame_315.png", "ct_train_1001_label_frame_252.png", "ct_train_1017_label_frame_203.png", "ct_train_1014_label_frame_108.png", "ct_train_1015_label_frame_51.png", "ct_train_1009_label_frame_214.png", "ct_train_1006_label_frame_190.png", "ct_train_1006_label_frame_48.png", "ct_train_1011_label_frame_210.png", "ct_train_1001_label_frame_100.png", "ct_train_1005_label_frame_163.png", "ct_train_1018_label_frame_76.png", "ct_train_1005_label_frame_147.png", "ct_train_1001_label_frame_17.png", "ct_train_1011_label_frame_85.png", "ct_train_1016_label_frame_194.png", "ct_train_1018_label_frame_92.png", "ct_train_1014_label_frame_306.png", "ct_train_1015_label_frame_194.png", "ct_train_1014_label_frame_86.png", "ct_train_1020_label_frame_118.png", "ct_train_1017_label_frame_59.png", "ct_train_1018_label_frame_138.png", "ct_train_1008_label_frame_31.png", "ct_train_1020_label_frame_169.png", "ct_train_1011_label_frame_154.png", "ct_train_1006_label_frame_175.png", "ct_train_1009_label_frame_69.png", "ct_train_1008_label_frame_215.png", "ct_train_1015_label_frame_38.png", "ct_train_1016_label_frame_95.png", "ct_train_1006_label_frame_215.png", "ct_train_1017_label_frame_218.png", "ct_train_1003_label_frame_25.png", "ct_train_1003_label_frame_154.png", "ct_train_1017_label_frame_202.png", "ct_train_1020_label_frame_102.png", "ct_train_1009_label_frame_1.png", "ct_train_1006_label_frame_64.png", "ct_train_1016_label_frame_305.png", "ct_train_1016_label_frame_234.png", "ct_train_1003_label_frame_61.png", "ct_train_1020_label_frame_276.png", "ct_train_1003_label_frame_146.png", "ct_train_1003_label_frame_174.png", "ct_train_1006_label_frame_44.png", "ct_train_1014_label_frame_13.png", "ct_train_1015_label_frame_104.png", "ct_train_1020_label_frame_127.png", "ct_train_1001_label_frame_345.png", "ct_train_1008_label_frame_117.png", "ct_train_1016_label_frame_161.png", "ct_train_1011_label_frame_7.png", "ct_train_1006_label_frame_143.png", "ct_train_1017_label_frame_247.png", "ct_train_1016_label_frame_267.png", "ct_train_1001_label_frame_248.png", "ct_train_1003_label_frame_198.png", "ct_train_1001_label_frame_237.png", "ct_train_1015_label_frame_178.png", "ct_train_1016_label_frame_108.png", "ct_train_1009_label_frame_183.png", "ct_train_1006_label_frame_34.png", "ct_train_1020_label_frame_162.png", "ct_train_1006_label_frame_49.png", "ct_train_1017_label_frame_90.png", "ct_train_1001_label_frame_260.png", "ct_train_1016_label_frame_34.png", "ct_train_1008_label_frame_176.png", "ct_train_1001_label_frame_290.png", "ct_train_1011_label_frame_207.png", "ct_train_1001_label_frame_83.png", "ct_train_1009_label_frame_219.png", "ct_train_1006_label_frame_100.png", "ct_train_1006_label_frame_138.png", "ct_train_1018_label_frame_133.png", "ct_train_1018_label_frame_175.png", "ct_train_1020_label_frame_199.png", "ct_train_1014_label_frame_258.png", "ct_train_1003_label_frame_32.png", "ct_train_1011_label_frame_49.png", "ct_train_1001_label_frame_156.png", "ct_train_1020_label_frame_0.png", "ct_train_1014_label_frame_260.png", "ct_train_1020_label_frame_341.png", "ct_train_1020_label_frame_227.png", "ct_train_1016_label_frame_104.png", "ct_train_1006_label_frame_54.png", "ct_train_1015_label_frame_98.png", "ct_train_1001_label_frame_8.png", "ct_train_1015_label_frame_216.png", "ct_train_1005_label_frame_68.png", "ct_train_1018_label_frame_176.png", "ct_train_1014_label_frame_82.png", "ct_train_1011_label_frame_124.png", "ct_train_1015_label_frame_30.png", "ct_train_1016_label_frame_198.png", "ct_train_1008_label_frame_59.png", "ct_train_1020_label_frame_281.png", "ct_train_1008_label_frame_90.png", "ct_train_1016_label_frame_274.png", "ct_train_1020_label_frame_41.png", "ct_train_1003_label_frame_252.png", "ct_train_1008_label_frame_55.png", "ct_train_1001_label_frame_299.png", "ct_train_1017_label_frame_39.png", "ct_train_1005_label_frame_139.png", "ct_train_1008_label_frame_4.png", "ct_train_1014_label_frame_328.png", "ct_train_1016_label_frame_51.png", "ct_train_1018_label_frame_105.png", "ct_train_1017_label_frame_274.png", "ct_train_1001_label_frame_276.png", "ct_train_1008_label_frame_164.png", "ct_train_1016_label_frame_159.png", "ct_train_1020_label_frame_261.png", "ct_train_1001_label_frame_180.png", "ct_train_1017_label_frame_55.png", "ct_train_1006_label_frame_158.png", "ct_train_1006_label_frame_41.png", "ct_train_1006_label_frame_194.png", "ct_train_1020_label_frame_68.png", "ct_train_1006_label_frame_4.png", "ct_train_1017_label_frame_190.png", "ct_train_1001_label_frame_64.png", "ct_train_1020_label_frame_216.png", "ct_train_1009_label_frame_88.png", "ct_train_1014_label_frame_19.png", "ct_train_1015_label_frame_173.png", "ct_train_1020_label_frame_89.png", "ct_train_1014_label_frame_119.png", "ct_train_1011_label_frame_182.png", "ct_train_1015_label_frame_22.png", "ct_train_1008_label_frame_23.png", "ct_train_1001_label_frame_70.png", "ct_train_1015_label_frame_203.png", "ct_train_1001_label_frame_2.png", "ct_train_1016_label_frame_10.png", "ct_train_1011_label_frame_189.png", "ct_train_1016_label_frame_113.png", "ct_train_1015_label_frame_77.png", "ct_train_1015_label_frame_228.png", "ct_train_1015_label_frame_154.png", "ct_train_1020_label_frame_188.png", "ct_train_1005_label_frame_111.png", "ct_train_1016_label_frame_84.png", "ct_train_1020_label_frame_163.png", "ct_train_1014_label_frame_47.png", "ct_train_1015_label_frame_167.png", "ct_train_1011_label_frame_94.png", "ct_train_1008_label_frame_144.png", "ct_train_1020_label_frame_353.png", "ct_train_1005_label_frame_150.png", "ct_train_1016_label_frame_304.png", "ct_train_1016_label_frame_213.png", "ct_train_1014_label_frame_147.png", "ct_train_1014_label_frame_145.png", "ct_train_1011_label_frame_238.png", "ct_train_1006_label_frame_202.png", "ct_train_1003_label_frame_222.png", "ct_train_1017_label_frame_148.png", "ct_train_1020_label_frame_176.png", "ct_train_1017_label_frame_100.png", "ct_train_1014_label_frame_196.png", "ct_train_1001_label_frame_118.png", "ct_train_1015_label_frame_34.png", "ct_train_1006_label_frame_139.png", "ct_train_1020_label_frame_264.png", "ct_train_1001_label_frame_236.png", "ct_train_1006_label_frame_189.png", "ct_train_1008_label_frame_121.png", "ct_train_1009_label_frame_8.png", "ct_train_1017_label_frame_163.png", "ct_train_1014_label_frame_7.png", "ct_train_1016_label_frame_230.png", "ct_train_1018_label_frame_137.png", "ct_train_1016_label_frame_82.png", "ct_train_1016_label_frame_19.png", "ct_train_1015_label_frame_62.png", "ct_train_1017_label_frame_254.png", "ct_train_1009_label_frame_90.png", "ct_train_1017_label_frame_66.png", "ct_train_1017_label_frame_28.png", "ct_train_1015_label_frame_177.png", "ct_train_1020_label_frame_90.png", "ct_train_1015_label_frame_36.png", "ct_train_1008_label_frame_190.png", "ct_train_1006_label_frame_193.png", "ct_train_1009_label_frame_126.png", "ct_train_1009_label_frame_186.png", "ct_train_1001_label_frame_123.png", "ct_train_1015_label_frame_296.png", "ct_train_1005_label_frame_83.png", "ct_train_1016_label_frame_321.png", "ct_train_1009_label_frame_44.png", "ct_train_1015_label_frame_297.png", "ct_train_1015_label_frame_274.png", "ct_train_1001_label_frame_203.png", "ct_train_1016_label_frame_253.png", "ct_train_1003_label_frame_151.png", "ct_train_1005_label_frame_51.png", "ct_train_1005_label_frame_14.png", "ct_train_1011_label_frame_195.png", "ct_train_1017_label_frame_230.png", "ct_train_1016_label_frame_322.png", "ct_train_1001_label_frame_344.png", "ct_train_1016_label_frame_271.png", "ct_train_1003_label_frame_187.png", "ct_train_1006_label_frame_75.png", "ct_train_1016_label_frame_65.png", "ct_train_1009_label_frame_195.png", "ct_train_1008_label_frame_198.png", "ct_train_1014_label_frame_254.png", "ct_train_1020_label_frame_153.png", "ct_train_1016_label_frame_136.png", "ct_train_1015_label_frame_242.png", "ct_train_1017_label_frame_57.png", "ct_train_1006_label_frame_196.png", "ct_train_1009_label_frame_268.png", "ct_train_1016_label_frame_326.png", "ct_train_1003_label_frame_268.png", "ct_train_1005_label_frame_35.png", "ct_train_1009_label_frame_125.png", "ct_train_1016_label_frame_141.png", "ct_train_1003_label_frame_31.png", "ct_train_1006_label_frame_223.png", "ct_train_1003_label_frame_293.png", "ct_train_1008_label_frame_3.png", "ct_train_1001_label_frame_292.png", "ct_train_1006_label_frame_203.png", "ct_train_1020_label_frame_170.png", "ct_train_1009_label_frame_110.png", "ct_train_1011_label_frame_225.png", "ct_train_1016_label_frame_218.png", "ct_train_1016_label_frame_252.png", "ct_train_1008_label_frame_11.png", "ct_train_1020_label_frame_330.png", "ct_train_1017_label_frame_119.png", "ct_train_1001_label_frame_146.png", "ct_train_1001_label_frame_347.png", "ct_train_1014_label_frame_129.png", "ct_train_1015_label_frame_258.png", "ct_train_1015_label_frame_60.png", "ct_train_1014_label_frame_349.png", "ct_train_1015_label_frame_236.png", "ct_train_1011_label_frame_65.png", "ct_train_1014_label_frame_230.png", "ct_train_1018_label_frame_16.png", "ct_train_1009_label_frame_279.png", "ct_train_1001_label_frame_178.png", "ct_train_1009_label_frame_144.png", "ct_train_1020_label_frame_19.png", "ct_train_1016_label_frame_71.png", "ct_train_1009_label_frame_261.png", "ct_train_1005_label_frame_131.png", "ct_train_1003_label_frame_15.png", "ct_train_1016_label_frame_318.png", "ct_train_1017_label_frame_75.png", "ct_train_1003_label_frame_73.png", "ct_train_1006_label_frame_106.png", "ct_train_1016_label_frame_7.png", "ct_train_1015_label_frame_234.png", "ct_train_1009_label_frame_116.png", "ct_train_1003_label_frame_271.png", "ct_train_1015_label_frame_286.png", "ct_train_1017_label_frame_197.png", "ct_train_1014_label_frame_32.png", "ct_train_1009_label_frame_38.png", "ct_train_1009_label_frame_252.png", "ct_train_1014_label_frame_202.png", "ct_train_1020_label_frame_152.png", "ct_train_1005_label_frame_19.png", "ct_train_1017_label_frame_259.png", "ct_train_1018_label_frame_165.png", "ct_train_1001_label_frame_288.png", "ct_train_1015_label_frame_105.png", "ct_train_1014_label_frame_292.png", "ct_train_1008_label_frame_7.png", "ct_train_1015_label_frame_241.png", "ct_train_1016_label_frame_166.png", "ct_train_1006_label_frame_39.png", "ct_train_1001_label_frame_19.png", "ct_train_1018_label_frame_1.png", "ct_train_1017_label_frame_137.png", "ct_train_1018_label_frame_131.png", "ct_train_1001_label_frame_14.png", "ct_train_1016_label_frame_181.png", "ct_train_1015_label_frame_262.png", "ct_train_1003_label_frame_5.png", "ct_train_1014_label_frame_157.png", "ct_train_1001_label_frame_186.png", "ct_train_1016_label_frame_312.png", "ct_train_1020_label_frame_12.png", "ct_train_1020_label_frame_291.png", "ct_train_1020_label_frame_145.png", "ct_train_1001_label_frame_149.png", "ct_train_1015_label_frame_257.png", "ct_train_1008_label_frame_110.png", "ct_train_1011_label_frame_237.png", "ct_train_1016_label_frame_66.png", "ct_train_1001_label_frame_218.png", "ct_train_1009_label_frame_257.png", "ct_train_1009_label_frame_249.png", "ct_train_1020_label_frame_35.png", "ct_train_1020_label_frame_326.png", "ct_train_1003_label_frame_88.png", "ct_train_1003_label_frame_227.png", "ct_train_1016_label_frame_216.png", "ct_train_1006_label_frame_46.png", "ct_train_1009_label_frame_75.png", "ct_train_1016_label_frame_106.png", "ct_train_1014_label_frame_277.png", "ct_train_1017_label_frame_99.png", "ct_train_1003_label_frame_179.png", "ct_train_1018_label_frame_110.png", "ct_train_1016_label_frame_145.png", "ct_train_1011_label_frame_212.png", "ct_train_1005_label_frame_26.png", "ct_train_1008_label_frame_81.png", "ct_train_1017_label_frame_104.png", "ct_train_1016_label_frame_323.png", "ct_train_1015_label_frame_122.png", "ct_train_1016_label_frame_325.png", "ct_train_1015_label_frame_90.png", "ct_train_1001_label_frame_71.png", "ct_train_1001_label_frame_341.png", "ct_train_1009_label_frame_80.png", "ct_train_1016_label_frame_2.png", "ct_train_1001_label_frame_141.png", "ct_train_1014_label_frame_269.png", "ct_train_1011_label_frame_71.png", "ct_train_1005_label_frame_21.png", "ct_train_1020_label_frame_301.png", "ct_train_1001_label_frame_165.png", "ct_train_1016_label_frame_273.png", "ct_train_1009_label_frame_79.png", "ct_train_1001_label_frame_65.png", "ct_train_1011_label_frame_80.png", "ct_train_1014_label_frame_248.png", "ct_train_1020_label_frame_234.png", "ct_train_1016_label_frame_125.png", "ct_train_1014_label_frame_249.png", "ct_train_1011_label_frame_35.png", "ct_train_1014_label_frame_220.png", "ct_train_1001_label_frame_4.png", "ct_train_1001_label_frame_282.png", "ct_train_1014_label_frame_159.png", "ct_train_1015_label_frame_44.png", "ct_train_1003_label_frame_261.png", "ct_train_1011_label_frame_155.png", "ct_train_1018_label_frame_78.png", "ct_train_1001_label_frame_136.png", "ct_train_1016_label_frame_16.png", "ct_train_1006_label_frame_114.png", "ct_train_1017_label_frame_8.png", "ct_train_1005_label_frame_47.png", "ct_train_1014_label_frame_235.png", "ct_train_1020_label_frame_49.png", "ct_train_1016_label_frame_83.png", "ct_train_1016_label_frame_255.png", "ct_train_1015_label_frame_76.png", "ct_train_1001_label_frame_30.png", "ct_train_1009_label_frame_142.png", "ct_train_1009_label_frame_265.png", "ct_train_1003_label_frame_259.png", "ct_train_1016_label_frame_129.png", "ct_train_1009_label_frame_165.png", "ct_train_1011_label_frame_150.png", "ct_train_1006_label_frame_113.png", "ct_train_1014_label_frame_141.png", "ct_train_1009_label_frame_170.png", "ct_train_1006_label_frame_21.png", "ct_train_1020_label_frame_286.png", "ct_train_1018_label_frame_103.png", "ct_train_1001_label_frame_210.png", "ct_train_1018_label_frame_17.png", "ct_train_1016_label_frame_191.png", "ct_train_1014_label_frame_291.png", "ct_train_1005_label_frame_165.png", "ct_train_1001_label_frame_155.png", "ct_train_1008_label_frame_72.png", "ct_train_1003_label_frame_67.png", "ct_train_1006_label_frame_78.png", "ct_train_1017_label_frame_159.png", "ct_train_1008_label_frame_151.png", "ct_train_1001_label_frame_145.png", "ct_train_1009_label_frame_25.png", "ct_train_1006_label_frame_208.png", "ct_train_1020_label_frame_95.png", "ct_train_1001_label_frame_175.png", "ct_train_1008_label_frame_37.png", "ct_train_1003_label_frame_255.png", "ct_train_1020_label_frame_81.png", "ct_train_1016_label_frame_21.png", "ct_train_1008_label_frame_79.png", "ct_train_1011_label_frame_62.png", "ct_train_1011_label_frame_158.png", "ct_train_1015_label_frame_144.png", "ct_train_1017_label_frame_4.png", "ct_train_1003_label_frame_41.png", "ct_train_1020_label_frame_14.png", "ct_train_1003_label_frame_286.png", "ct_train_1011_label_frame_153.png", "ct_train_1018_label_frame_31.png", "ct_train_1009_label_frame_119.png", "ct_train_1014_label_frame_23.png", "ct_train_1017_label_frame_209.png", "ct_train_1020_label_frame_138.png", "ct_train_1009_label_frame_211.png", "ct_train_1016_label_frame_23.png", "ct_train_1005_label_frame_7.png", "ct_train_1016_label_frame_55.png", "ct_train_1016_label_frame_247.png", "ct_train_1018_label_frame_0.png", "ct_train_1014_label_frame_302.png", "ct_train_1011_label_frame_159.png", "ct_train_1006_label_frame_231.png", "ct_train_1008_label_frame_97.png", "ct_train_1020_label_frame_44.png", "ct_train_1016_label_frame_263.png", "ct_train_1018_label_frame_173.png", "ct_train_1017_label_frame_48.png", "ct_train_1017_label_frame_34.png", "ct_train_1003_label_frame_236.png", "ct_train_1014_label_frame_286.png", "ct_train_1017_label_frame_45.png", "ct_train_1016_label_frame_4.png", "ct_train_1008_label_frame_27.png", "ct_train_1014_label_frame_125.png", "ct_train_1020_label_frame_306.png", "ct_train_1015_label_frame_224.png", "ct_train_1015_label_frame_99.png", "ct_train_1003_label_frame_201.png", "ct_train_1011_label_frame_134.png", "ct_train_1001_label_frame_29.png", "ct_train_1006_label_frame_224.png", "ct_train_1005_label_frame_119.png", "ct_train_1008_label_frame_213.png", "ct_train_1020_label_frame_131.png", "ct_train_1014_label_frame_9.png", "ct_train_1003_label_frame_184.png", "ct_train_1006_label_frame_184.png", "ct_train_1009_label_frame_34.png", "ct_train_1008_label_frame_182.png", "ct_train_1006_label_frame_1.png", "ct_train_1016_label_frame_171.png", "ct_train_1016_label_frame_324.png", "ct_train_1015_label_frame_40.png", "ct_train_1018_label_frame_163.png", "ct_train_1020_label_frame_208.png", "ct_train_1016_label_frame_92.png", "ct_train_1006_label_frame_89.png", "ct_train_1016_label_frame_266.png", "ct_train_1014_label_frame_319.png", "ct_train_1014_label_frame_343.png", "ct_train_1011_label_frame_60.png", "ct_train_1011_label_frame_41.png", "ct_train_1015_label_frame_82.png", "ct_train_1009_label_frame_23.png", "ct_train_1006_label_frame_96.png", "ct_train_1014_label_frame_330.png", "ct_train_1001_label_frame_112.png", "ct_train_1011_label_frame_141.png", "ct_train_1001_label_frame_306.png", "ct_train_1009_label_frame_33.png", "ct_train_1001_label_frame_3.png", "ct_train_1006_label_frame_81.png", "ct_train_1020_label_frame_214.png", "ct_train_1006_label_frame_19.png", "ct_train_1014_label_frame_289.png", "ct_train_1017_label_frame_233.png", "ct_train_1003_label_frame_40.png", "ct_train_1017_label_frame_7.png", "ct_train_1014_label_frame_38.png", "ct_train_1014_label_frame_253.png", "ct_train_1001_label_frame_256.png", "ct_train_1005_label_frame_39.png", "ct_train_1001_label_frame_81.png", "ct_train_1011_label_frame_103.png", "ct_train_1017_label_frame_86.png", "ct_train_1006_label_frame_154.png", "ct_train_1009_label_frame_52.png", "ct_train_1009_label_frame_156.png", "ct_train_1016_label_frame_215.png", "ct_train_1015_label_frame_190.png", "ct_train_1020_label_frame_172.png", "ct_train_1017_label_frame_19.png", "ct_train_1014_label_frame_354.png", "ct_train_1009_label_frame_93.png", "ct_train_1009_label_frame_288.png", "ct_train_1009_label_frame_65.png", "ct_train_1020_label_frame_327.png", "ct_train_1017_label_frame_206.png", "ct_train_1015_label_frame_86.png", "ct_train_1009_label_frame_109.png", "ct_train_1017_label_frame_263.png", "ct_train_1001_label_frame_20.png", "ct_train_1003_label_frame_72.png", "ct_train_1005_label_frame_8.png", "ct_train_1001_label_frame_246.png", "ct_train_1003_label_frame_276.png", "ct_train_1008_label_frame_36.png", "ct_train_1011_label_frame_107.png", "ct_train_1011_label_frame_136.png", "ct_train_1011_label_frame_128.png", "ct_train_1006_label_frame_53.png", "ct_train_1020_label_frame_58.png", "ct_train_1001_label_frame_25.png", "ct_train_1009_label_frame_240.png", "ct_train_1003_label_frame_253.png", "ct_train_1017_label_frame_189.png", "ct_train_1020_label_frame_190.png", "ct_train_1005_label_frame_153.png", "ct_train_1011_label_frame_47.png", "ct_train_1016_label_frame_211.png", "ct_train_1009_label_frame_180.png", "ct_train_1008_label_frame_123.png", "ct_train_1018_label_frame_9.png", "ct_train_1020_label_frame_7.png", "ct_train_1001_label_frame_220.png", "ct_train_1001_label_frame_189.png", "ct_train_1020_label_frame_3.png", "ct_train_1020_label_frame_165.png", "ct_train_1016_label_frame_308.png", "ct_train_1006_label_frame_177.png", "ct_train_1016_label_frame_35.png", "ct_train_1015_label_frame_263.png", "ct_train_1008_label_frame_200.png", "ct_train_1014_label_frame_257.png", "ct_train_1016_label_frame_5.png", "ct_train_1020_label_frame_206.png", "ct_train_1020_label_frame_26.png", "ct_train_1011_label_frame_13.png", "ct_train_1015_label_frame_285.png", "ct_train_1008_label_frame_104.png", "ct_train_1009_label_frame_196.png", "ct_train_1015_label_frame_261.png", "ct_train_1018_label_frame_125.png", "ct_train_1014_label_frame_70.png", "ct_train_1016_label_frame_265.png", "ct_train_1016_label_frame_189.png", "ct_train_1020_label_frame_329.png", "ct_train_1015_label_frame_107.png", "ct_train_1011_label_frame_77.png", "ct_train_1020_label_frame_211.png", "ct_train_1011_label_frame_102.png", "ct_train_1005_label_frame_90.png", "ct_train_1006_label_frame_82.png", "ct_train_1017_label_frame_109.png", "ct_train_1008_label_frame_156.png", "ct_train_1008_label_frame_22.png", "ct_train_1011_label_frame_112.png", "ct_train_1014_label_frame_272.png", "ct_train_1005_label_frame_161.png", "ct_train_1016_label_frame_288.png", "ct_train_1014_label_frame_211.png", "ct_train_1014_label_frame_59.png", "ct_train_1006_label_frame_167.png", "ct_train_1020_label_frame_300.png", "ct_train_1016_label_frame_223.png", "ct_train_1006_label_frame_103.png", "ct_train_1020_label_frame_47.png", "ct_train_1001_label_frame_27.png", "ct_train_1016_label_frame_76.png", "ct_train_1017_label_frame_199.png", "ct_train_1020_label_frame_148.png", "ct_train_1009_label_frame_5.png", "ct_train_1011_label_frame_152.png", "ct_train_1020_label_frame_344.png", "ct_train_1014_label_frame_2.png", "ct_train_1015_label_frame_279.png", "ct_train_1020_label_frame_114.png", "ct_train_1005_label_frame_44.png", "ct_train_1020_label_frame_203.png", "ct_train_1018_label_frame_143.png", "ct_train_1005_label_frame_71.png", "ct_train_1016_label_frame_207.png", "ct_train_1011_label_frame_84.png", "ct_train_1016_label_frame_93.png", "ct_train_1011_label_frame_190.png", "ct_train_1001_label_frame_310.png", "ct_train_1005_label_frame_99.png", "ct_train_1005_label_frame_109.png", "ct_train_1014_label_frame_100.png", "ct_train_1011_label_frame_131.png", "ct_train_1014_label_frame_35.png", "ct_train_1009_label_frame_149.png", "ct_train_1017_label_frame_118.png", "ct_train_1016_label_frame_163.png", "ct_train_1003_label_frame_241.png", "ct_train_1011_label_frame_100.png", "ct_train_1020_label_frame_60.png", "ct_train_1017_label_frame_192.png", "ct_train_1018_label_frame_108.png", "ct_train_1017_label_frame_51.png", "ct_train_1008_label_frame_172.png", "ct_train_1016_label_frame_116.png", "ct_train_1011_label_frame_93.png", "ct_train_1009_label_frame_281.png", "ct_train_1008_label_frame_93.png", "ct_train_1009_label_frame_255.png", "ct_train_1016_label_frame_233.png", "ct_train_1017_label_frame_92.png", "ct_train_1020_label_frame_78.png", "ct_train_1014_label_frame_89.png", "ct_train_1008_label_frame_150.png", "ct_train_1020_label_frame_139.png", "ct_train_1017_label_frame_27.png", "ct_train_1017_label_frame_22.png", "ct_train_1014_label_frame_30.png", "ct_train_1003_label_frame_290.png", "ct_train_1014_label_frame_244.png", "ct_train_1003_label_frame_74.png", "ct_train_1020_label_frame_146.png", "ct_train_1008_label_frame_194.png", "ct_train_1020_label_frame_52.png", "ct_train_1020_label_frame_125.png", "ct_train_1015_label_frame_11.png", "ct_train_1015_label_frame_129.png", "ct_train_1020_label_frame_4.png", "ct_train_1009_label_frame_254.png", "ct_train_1017_label_frame_251.png", "ct_train_1001_label_frame_225.png", "ct_train_1001_label_frame_133.png", "ct_train_1014_label_frame_77.png", "ct_train_1014_label_frame_301.png", "ct_train_1011_label_frame_45.png", "ct_train_1006_label_frame_173.png", "ct_train_1018_label_frame_41.png", "ct_train_1009_label_frame_256.png", "ct_train_1014_label_frame_63.png", "ct_train_1014_label_frame_283.png", "ct_train_1006_label_frame_206.png", "ct_train_1008_label_frame_174.png", "ct_train_1014_label_frame_240.png", "ct_train_1016_label_frame_205.png", "ct_train_1015_label_frame_189.png", "ct_train_1008_label_frame_57.png", "ct_train_1014_label_frame_180.png", "ct_train_1020_label_frame_245.png", "ct_train_1006_label_frame_228.png", "ct_train_1015_label_frame_246.png", "ct_train_1017_label_frame_127.png", "ct_train_1014_label_frame_251.png", "ct_train_1006_label_frame_245.png", "ct_train_1003_label_frame_128.png", "ct_train_1011_label_frame_39.png", "ct_train_1016_label_frame_184.png", "ct_train_1001_label_frame_115.png", "ct_train_1003_label_frame_233.png", "ct_train_1009_label_frame_41.png", "ct_train_1001_label_frame_152.png", "ct_train_1016_label_frame_242.png", "ct_train_1011_label_frame_66.png", "ct_train_1016_label_frame_192.png", "ct_train_1017_label_frame_89.png", "ct_train_1005_label_frame_92.png", "ct_train_1015_label_frame_113.png", "ct_train_1017_label_frame_95.png", "ct_train_1008_label_frame_126.png", "ct_train_1018_label_frame_158.png", "ct_train_1006_label_frame_174.png", "ct_train_1016_label_frame_168.png", "ct_train_1015_label_frame_5.png", "ct_train_1016_label_frame_330.png", "ct_train_1005_label_frame_58.png", "ct_train_1011_label_frame_176.png", "ct_train_1003_label_frame_8.png", "ct_train_1018_label_frame_112.png", "ct_train_1001_label_frame_268.png", "ct_train_1017_label_frame_234.png", "ct_train_1016_label_frame_200.png", "ct_train_1017_label_frame_177.png", "ct_train_1020_label_frame_29.png", "ct_train_1016_label_frame_243.png", "ct_train_1017_label_frame_23.png", "ct_train_1016_label_frame_107.png", "ct_train_1008_label_frame_142.png", "ct_train_1011_label_frame_17.png", "ct_train_1014_label_frame_75.png", "ct_train_1020_label_frame_345.png", "ct_train_1016_label_frame_228.png", "ct_train_1011_label_frame_144.png", "ct_train_1016_label_frame_232.png", "ct_train_1003_label_frame_104.png", "ct_train_1003_label_frame_80.png", "ct_train_1009_label_frame_114.png", "ct_train_1014_label_frame_326.png", "ct_train_1009_label_frame_96.png", "ct_train_1016_label_frame_286.png", "ct_train_1008_label_frame_201.png", "ct_train_1003_label_frame_97.png", "ct_train_1018_label_frame_120.png", "ct_train_1006_label_frame_181.png", "ct_train_1014_label_frame_14.png", "ct_train_1014_label_frame_229.png", "ct_train_1015_label_frame_219.png", "ct_train_1016_label_frame_112.png", "ct_train_1016_label_frame_13.png", "ct_train_1020_label_frame_354.png", "ct_train_1020_label_frame_109.png", "ct_train_1015_label_frame_290.png", "ct_train_1020_label_frame_88.png", "ct_train_1005_label_frame_152.png", "ct_train_1017_label_frame_207.png", "ct_train_1005_label_frame_155.png", "ct_train_1015_label_frame_88.png", "ct_train_1001_label_frame_309.png", "ct_train_1001_label_frame_352.png", "ct_train_1005_label_frame_164.png", "ct_train_1016_label_frame_179.png", "ct_train_1005_label_frame_132.png", "ct_train_1001_label_frame_261.png", "ct_train_1005_label_frame_91.png", "ct_train_1006_label_frame_57.png", "ct_train_1001_label_frame_267.png", "ct_train_1003_label_frame_0.png", "ct_train_1001_label_frame_330.png", "ct_train_1016_label_frame_258.png", "ct_train_1008_label_frame_206.png", "ct_train_1018_label_frame_42.png", "ct_train_1003_label_frame_57.png", "ct_train_1017_label_frame_256.png", "ct_train_1008_label_frame_9.png", "ct_train_1017_label_frame_129.png", "ct_train_1014_label_frame_143.png", "ct_train_1001_label_frame_96.png", "ct_train_1020_label_frame_346.png", "ct_train_1009_label_frame_63.png", "ct_train_1005_label_frame_4.png", "ct_train_1011_label_frame_97.png", "ct_train_1020_label_frame_242.png", "ct_train_1001_label_frame_160.png", "ct_train_1001_label_frame_321.png", "ct_train_1017_label_frame_88.png", "ct_train_1020_label_frame_67.png", "ct_train_1003_label_frame_152.png", "ct_train_1018_label_frame_7.png", "ct_train_1009_label_frame_234.png", "ct_train_1006_label_frame_0.png", "ct_train_1016_label_frame_79.png", "ct_train_1009_label_frame_282.png", "ct_train_1016_label_frame_101.png", "ct_train_1009_label_frame_230.png", "ct_train_1016_label_frame_176.png", "ct_train_1008_label_frame_214.png", "ct_train_1015_label_frame_260.png", "ct_train_1001_label_frame_59.png", "ct_train_1006_label_frame_8.png", "ct_train_1020_label_frame_218.png", "ct_train_1003_label_frame_194.png", "ct_train_1008_label_frame_98.png", "ct_train_1014_label_frame_259.png", "ct_train_1011_label_frame_12.png", "ct_train_1016_label_frame_9.png", "ct_train_1009_label_frame_218.png", "ct_train_1017_label_frame_37.png", "ct_train_1005_label_frame_60.png", "ct_train_1018_label_frame_51.png", "ct_train_1009_label_frame_210.png", "ct_train_1001_label_frame_271.png", "ct_train_1011_label_frame_74.png", "ct_train_1014_label_frame_148.png", "ct_train_1015_label_frame_57.png", "ct_train_1015_label_frame_293.png", "ct_train_1017_label_frame_96.png", "ct_train_1008_label_frame_54.png", "ct_train_1015_label_frame_84.png", "ct_train_1009_label_frame_290.png", "ct_train_1014_label_frame_42.png", "ct_train_1014_label_frame_165.png", "ct_train_1008_label_frame_136.png", "ct_train_1011_label_frame_89.png", "ct_train_1015_label_frame_91.png", "ct_train_1011_label_frame_201.png", "ct_train_1003_label_frame_214.png", "ct_train_1011_label_frame_186.png", "ct_train_1001_label_frame_56.png", "ct_train_1016_label_frame_275.png", "ct_train_1016_label_frame_306.png", "ct_train_1005_label_frame_121.png", "ct_train_1016_label_frame_206.png", "ct_train_1001_label_frame_311.png", "ct_train_1008_label_frame_193.png", "ct_train_1017_label_frame_211.png", "ct_train_1017_label_frame_264.png", "ct_train_1014_label_frame_274.png", "ct_train_1001_label_frame_110.png", "ct_train_1011_label_frame_55.png", "ct_train_1015_label_frame_64.png", "ct_train_1018_label_frame_164.png", "ct_train_1014_label_frame_53.png", "ct_train_1020_label_frame_233.png", "ct_train_1006_label_frame_32.png", "ct_train_1020_label_frame_252.png", "ct_train_1001_label_frame_338.png", "ct_train_1009_label_frame_83.png", "ct_train_1020_label_frame_50.png", "ct_train_1003_label_frame_68.png", "ct_train_1001_label_frame_362.png", "ct_train_1006_label_frame_71.png", "ct_train_1016_label_frame_315.png", "ct_train_1016_label_frame_187.png", "ct_train_1020_label_frame_1.png", "ct_train_1001_label_frame_138.png", "ct_train_1001_label_frame_97.png", "ct_train_1015_label_frame_54.png", "ct_train_1015_label_frame_18.png", "ct_train_1018_label_frame_38.png", "ct_train_1016_label_frame_195.png", "ct_train_1011_label_frame_213.png", "ct_train_1016_label_frame_241.png", "ct_train_1001_label_frame_89.png", "ct_train_1016_label_frame_110.png", "ct_train_1014_label_frame_36.png", "ct_train_1003_label_frame_71.png", "ct_train_1006_label_frame_74.png", "ct_train_1005_label_frame_24.png", "ct_train_1009_label_frame_232.png", "ct_train_1020_label_frame_254.png", "ct_train_1016_label_frame_229.png", "ct_train_1014_label_frame_184.png", "ct_train_1001_label_frame_300.png", "ct_train_1015_label_frame_212.png", "ct_train_1001_label_frame_303.png", "ct_train_1009_label_frame_197.png", "ct_train_1009_label_frame_250.png", "ct_train_1003_label_frame_183.png", "ct_train_1014_label_frame_341.png", "ct_train_1016_label_frame_12.png", "ct_train_1001_label_frame_85.png", "ct_train_1017_label_frame_151.png", "ct_train_1020_label_frame_130.png", "ct_train_1016_label_frame_155.png", "ct_train_1014_label_frame_104.png", "ct_train_1018_label_frame_107.png", "ct_train_1003_label_frame_234.png", "ct_train_1014_label_frame_239.png", "ct_train_1014_label_frame_123.png", "ct_train_1020_label_frame_213.png", "ct_train_1001_label_frame_40.png", "ct_train_1014_label_frame_314.png", "ct_train_1006_label_frame_37.png", "ct_train_1001_label_frame_111.png", "ct_train_1016_label_frame_188.png", "ct_train_1017_label_frame_232.png", "ct_train_1018_label_frame_98.png", "ct_train_1020_label_frame_40.png", "ct_train_1011_label_frame_133.png", "ct_train_1006_label_frame_51.png", "ct_train_1006_label_frame_160.png", "ct_train_1009_label_frame_162.png", "ct_train_1020_label_frame_136.png", "ct_train_1015_label_frame_198.png", "ct_train_1014_label_frame_246.png", "ct_train_1014_label_frame_62.png", "ct_train_1011_label_frame_222.png", "ct_train_1011_label_frame_229.png", "ct_train_1005_label_frame_135.png", "ct_train_1003_label_frame_292.png", "ct_train_1001_label_frame_335.png", "ct_train_1014_label_frame_221.png", "ct_train_1014_label_frame_266.png", "ct_train_1015_label_frame_275.png", "ct_train_1017_label_frame_248.png", "ct_train_1006_label_frame_163.png", "ct_train_1020_label_frame_207.png", "ct_train_1016_label_frame_309.png", "ct_train_1015_label_frame_284.png", "ct_train_1015_label_frame_26.png", "ct_train_1003_label_frame_87.png", "ct_train_1017_label_frame_195.png", "ct_train_1009_label_frame_131.png", "ct_train_1001_label_frame_293.png", "ct_train_1008_label_frame_207.png", "ct_train_1020_label_frame_310.png", "ct_train_1015_label_frame_267.png", "ct_train_1015_label_frame_185.png", "ct_train_1016_label_frame_320.png", "ct_train_1017_label_frame_115.png", "ct_train_1011_label_frame_4.png", "ct_train_1008_label_frame_221.png", "ct_train_1006_label_frame_234.png", "ct_train_1008_label_frame_217.png", "ct_train_1008_label_frame_111.png", "ct_train_1006_label_frame_115.png", "ct_train_1008_label_frame_102.png", "ct_train_1005_label_frame_151.png", "ct_train_1005_label_frame_10.png", "ct_train_1014_label_frame_216.png", "ct_train_1003_label_frame_175.png", "ct_train_1008_label_frame_92.png", "ct_train_1009_label_frame_57.png", "ct_train_1015_label_frame_220.png", "ct_train_1016_label_frame_43.png", "ct_train_1017_label_frame_42.png", "ct_train_1011_label_frame_61.png", "ct_train_1016_label_frame_222.png", "ct_train_1020_label_frame_91.png", "ct_train_1018_label_frame_11.png", "ct_train_1017_label_frame_94.png", "ct_train_1016_label_frame_32.png", "ct_train_1018_label_frame_93.png", "ct_train_1014_label_frame_48.png", "ct_train_1006_label_frame_117.png", "ct_train_1014_label_frame_293.png", "ct_train_1001_label_frame_73.png", "ct_train_1008_label_frame_77.png", "ct_train_1011_label_frame_137.png", "ct_train_1015_label_frame_151.png", "ct_train_1005_label_frame_63.png", "ct_train_1017_label_frame_205.png", "ct_train_1017_label_frame_122.png", "ct_train_1006_label_frame_172.png", "ct_train_1017_label_frame_273.png", "ct_train_1006_label_frame_9.png", "ct_train_1015_label_frame_112.png", "ct_train_1016_label_frame_185.png", "ct_train_1001_label_frame_183.png", "ct_train_1011_label_frame_173.png", "ct_train_1008_label_frame_76.png", "ct_train_1006_label_frame_229.png", "ct_train_1017_label_frame_271.png", "ct_train_1006_label_frame_134.png", "ct_train_1014_label_frame_201.png", "ct_train_1001_label_frame_196.png", "ct_train_1017_label_frame_80.png", "ct_train_1011_label_frame_38.png", "ct_train_1003_label_frame_232.png", "ct_train_1008_label_frame_157.png", "ct_train_1016_label_frame_329.png", "ct_train_1003_label_frame_118.png", "ct_train_1011_label_frame_198.png", "ct_train_1017_label_frame_64.png", "ct_train_1017_label_frame_228.png", "ct_train_1014_label_frame_95.png", "ct_train_1003_label_frame_119.png", "ct_train_1009_label_frame_213.png", "ct_train_1015_label_frame_161.png", "ct_train_1014_label_frame_339.png", "ct_train_1003_label_frame_169.png", "ct_train_1014_label_frame_208.png", "ct_train_1016_label_frame_134.png", "ct_train_1006_label_frame_68.png", "ct_train_1005_label_frame_84.png", "ct_train_1020_label_frame_159.png", "ct_train_1017_label_frame_24.png", "ct_train_1001_label_frame_127.png", "ct_train_1015_label_frame_273.png", "ct_train_1020_label_frame_116.png", "ct_train_1001_label_frame_348.png", "ct_train_1018_label_frame_81.png", "ct_train_1020_label_frame_296.png", "ct_train_1017_label_frame_125.png", "ct_train_1008_label_frame_141.png", "ct_train_1015_label_frame_288.png", "ct_train_1015_label_frame_25.png", "ct_train_1014_label_frame_199.png", "ct_train_1006_label_frame_105.png", "ct_train_1003_label_frame_153.png", "ct_train_1014_label_frame_138.png", "ct_train_1011_label_frame_23.png", "ct_train_1015_label_frame_292.png", "ct_train_1016_label_frame_167.png", "ct_train_1018_label_frame_69.png", "ct_train_1011_label_frame_27.png", "ct_train_1017_label_frame_144.png", "ct_train_1001_label_frame_32.png", "ct_train_1020_label_frame_196.png", "ct_train_1006_label_frame_221.png", "ct_train_1009_label_frame_226.png", "ct_train_1003_label_frame_107.png", "ct_train_1015_label_frame_155.png", "ct_train_1020_label_frame_328.png", "ct_train_1011_label_frame_142.png", "ct_train_1016_label_frame_91.png", "ct_train_1008_label_frame_211.png", "ct_train_1006_label_frame_218.png", "ct_train_1020_label_frame_76.png", "ct_train_1006_label_frame_45.png", "ct_train_1001_label_frame_126.png", "ct_train_1003_label_frame_206.png", "ct_train_1009_label_frame_59.png", "ct_train_1014_label_frame_3.png", "ct_train_1009_label_frame_70.png", "ct_train_1017_label_frame_258.png", "ct_train_1009_label_frame_4.png", "ct_train_1001_label_frame_51.png", "ct_train_1014_label_frame_338.png", "ct_train_1016_label_frame_81.png", "ct_train_1001_label_frame_170.png", "ct_train_1008_label_frame_216.png", "ct_train_1014_label_frame_346.png", "ct_train_1006_label_frame_179.png", "ct_train_1006_label_frame_198.png", "ct_train_1009_label_frame_105.png", "ct_train_1008_label_frame_177.png", "ct_train_1008_label_frame_26.png", "ct_train_1015_label_frame_103.png", "ct_train_1011_label_frame_2.png", "ct_train_1015_label_frame_55.png", "ct_train_1006_label_frame_86.png", "ct_train_1001_label_frame_337.png", "ct_train_1003_label_frame_60.png", "ct_train_1009_label_frame_21.png", "ct_train_1016_label_frame_150.png", "ct_train_1001_label_frame_148.png", "ct_train_1015_label_frame_10.png", "ct_train_1015_label_frame_280.png", "ct_train_1015_label_frame_32.png", "ct_train_1005_label_frame_170.png", "ct_train_1003_label_frame_133.png", "ct_train_1017_label_frame_238.png", "ct_train_1006_label_frame_149.png", "ct_train_1001_label_frame_150.png", "ct_train_1003_label_frame_34.png", "ct_train_1017_label_frame_281.png", "ct_train_1005_label_frame_127.png", "ct_train_1017_label_frame_110.png", "ct_train_1003_label_frame_177.png", "ct_train_1005_label_frame_142.png", "ct_train_1008_label_frame_159.png", "ct_train_1011_label_frame_9.png", "ct_train_1009_label_frame_203.png", "ct_train_1014_label_frame_6.png", "ct_train_1014_label_frame_49.png", "ct_train_1005_label_frame_144.png", "ct_train_1008_label_frame_161.png", "ct_train_1016_label_frame_14.png", "ct_train_1009_label_frame_182.png", "ct_train_1009_label_frame_171.png", "ct_train_1017_label_frame_168.png", "ct_train_1014_label_frame_357.png", "ct_train_1015_label_frame_45.png", "ct_train_1017_label_frame_146.png", "ct_train_1005_label_frame_146.png", "ct_train_1015_label_frame_272.png", "ct_train_1015_label_frame_47.png", "ct_train_1011_label_frame_226.png", "ct_train_1001_label_frame_34.png", "ct_train_1001_label_frame_250.png", "ct_train_1017_label_frame_68.png", "ct_train_1011_label_frame_233.png", "ct_train_1008_label_frame_106.png", "ct_train_1009_label_frame_284.png", "ct_train_1009_label_frame_277.png", "ct_train_1014_label_frame_105.png", "ct_train_1014_label_frame_335.png", "ct_train_1020_label_frame_221.png", "ct_train_1008_label_frame_96.png", "ct_train_1001_label_frame_350.png", "ct_train_1011_label_frame_171.png", "ct_train_1020_label_frame_255.png", "ct_train_1001_label_frame_109.png", "ct_train_1003_label_frame_278.png", "ct_train_1008_label_frame_173.png", "ct_train_1006_label_frame_83.png", "ct_train_1008_label_frame_191.png", "ct_train_1015_label_frame_276.png", "ct_train_1020_label_frame_308.png", "ct_train_1018_label_frame_24.png", "ct_train_1014_label_frame_243.png", "ct_train_1009_label_frame_106.png", "ct_train_1016_label_frame_28.png", "ct_train_1014_label_frame_189.png", "ct_train_1018_label_frame_101.png", "ct_train_1020_label_frame_338.png", "ct_train_1008_label_frame_8.png", "ct_train_1006_label_frame_40.png", "ct_train_1009_label_frame_102.png", "ct_train_1008_label_frame_167.png", "ct_train_1016_label_frame_111.png", "ct_train_1014_label_frame_325.png", "ct_train_1011_label_frame_109.png", "ct_train_1015_label_frame_140.png", "ct_train_1001_label_frame_193.png", "ct_train_1017_label_frame_178.png", "ct_train_1008_label_frame_113.png", "ct_train_1020_label_frame_160.png", "ct_train_1011_label_frame_105.png", "ct_train_1006_label_frame_7.png", "ct_train_1015_label_frame_233.png", "ct_train_1015_label_frame_239.png", "ct_train_1014_label_frame_255.png", "ct_train_1016_label_frame_278.png", "ct_train_1015_label_frame_225.png", "ct_train_1020_label_frame_239.png", "ct_train_1017_label_frame_107.png", "ct_train_1018_label_frame_3.png", "ct_train_1009_label_frame_245.png", "ct_train_1015_label_frame_281.png", "ct_train_1017_label_frame_240.png", "ct_train_1003_label_frame_193.png", "ct_train_1008_label_frame_1.png", "ct_train_1006_label_frame_235.png", "ct_train_1014_label_frame_265.png", "ct_train_1014_label_frame_223.png", "ct_train_1017_label_frame_216.png", "ct_train_1008_label_frame_148.png", "ct_train_1014_label_frame_177.png", "ct_train_1005_label_frame_41.png", "ct_train_1018_label_frame_8.png", "ct_train_1006_label_frame_159.png", "ct_train_1009_label_frame_132.png", "ct_train_1003_label_frame_263.png", "ct_train_1001_label_frame_181.png", "ct_train_1014_label_frame_25.png", "ct_train_1005_label_frame_0.png", "ct_train_1020_label_frame_69.png", "ct_train_1015_label_frame_79.png", "ct_train_1014_label_frame_168.png", "ct_train_1003_label_frame_96.png", "ct_train_1001_label_frame_314.png", "ct_train_1008_label_frame_103.png", "ct_train_1001_label_frame_216.png", "ct_train_1020_label_frame_180.png", "ct_train_1015_label_frame_152.png", "ct_train_1006_label_frame_77.png", "ct_train_1008_label_frame_170.png", "ct_train_1014_label_frame_182.png", "ct_train_1009_label_frame_55.png", "ct_train_1006_label_frame_55.png", "ct_train_1009_label_frame_82.png", "ct_train_1009_label_frame_51.png", "ct_train_1005_label_frame_101.png", "ct_train_1016_label_frame_156.png", "ct_train_1001_label_frame_107.png", "ct_train_1014_label_frame_127.png", "ct_train_1005_label_frame_61.png", "ct_train_1020_label_frame_288.png", "ct_train_1016_label_frame_131.png", "ct_train_1018_label_frame_100.png", "ct_train_1008_label_frame_12.png", "ct_train_1008_label_frame_28.png", "ct_train_1017_label_frame_155.png", "ct_train_1003_label_frame_162.png", "ct_train_1001_label_frame_154.png", "ct_train_1018_label_frame_68.png", "ct_train_1003_label_frame_38.png", "ct_train_1009_label_frame_164.png", "ct_train_1020_label_frame_93.png", "ct_train_1015_label_frame_61.png", "ct_train_1014_label_frame_210.png", "ct_train_1016_label_frame_97.png", "ct_train_1006_label_frame_50.png", "ct_train_1020_label_frame_171.png", "ct_train_1015_label_frame_97.png", "ct_train_1006_label_frame_23.png", "ct_train_1014_label_frame_84.png", "ct_train_1014_label_frame_164.png", "ct_train_1016_label_frame_226.png", "ct_train_1017_label_frame_6.png", "ct_train_1003_label_frame_166.png", "ct_train_1015_label_frame_118.png", "ct_train_1018_label_frame_25.png", "ct_train_1014_label_frame_290.png", "ct_train_1005_label_frame_38.png", "ct_train_1011_label_frame_115.png", "ct_train_1020_label_frame_269.png", "ct_train_1017_label_frame_65.png", "ct_train_1014_label_frame_200.png", "ct_train_1008_label_frame_33.png", "ct_train_1006_label_frame_156.png", "ct_train_1016_label_frame_3.png", "ct_train_1020_label_frame_349.png", "ct_train_1015_label_frame_227.png", "ct_train_1001_label_frame_346.png", "ct_train_1014_label_frame_128.png", "ct_train_1009_label_frame_272.png", "ct_train_1015_label_frame_217.png", "ct_train_1014_label_frame_295.png", "ct_train_1015_label_frame_124.png", "ct_train_1008_label_frame_208.png", "ct_train_1006_label_frame_205.png", "ct_train_1016_label_frame_143.png", "ct_train_1014_label_frame_348.png", "ct_train_1011_label_frame_227.png", "ct_train_1009_label_frame_189.png", "ct_train_1003_label_frame_246.png", "ct_train_1011_label_frame_121.png", "ct_train_1017_label_frame_226.png", "ct_train_1011_label_frame_8.png", "ct_train_1009_label_frame_30.png", "ct_train_1018_label_frame_154.png", "ct_train_1001_label_frame_147.png", "ct_train_1014_label_frame_256.png", "ct_train_1009_label_frame_215.png", "ct_train_1003_label_frame_297.png", "ct_train_1015_label_frame_75.png", "ct_train_1014_label_frame_110.png", "ct_train_1017_label_frame_198.png", "ct_train_1011_label_frame_3.png", "ct_train_1001_label_frame_318.png", "ct_train_1001_label_frame_174.png", "ct_train_1015_label_frame_287.png", "ct_train_1015_label_frame_252.png", "ct_train_1009_label_frame_137.png", "ct_train_1014_label_frame_91.png", "ct_train_1001_label_frame_159.png", "ct_train_1009_label_frame_95.png", "ct_train_1018_label_frame_48.png", "ct_train_1001_label_frame_187.png", "ct_train_1011_label_frame_14.png", "ct_train_1001_label_frame_317.png", "ct_train_1015_label_frame_147.png", "ct_train_1014_label_frame_312.png", "ct_train_1015_label_frame_193.png", "ct_train_1008_label_frame_16.png", "ct_train_1016_label_frame_68.png", "ct_train_1018_label_frame_15.png", "ct_train_1020_label_frame_343.png", "ct_train_1009_label_frame_47.png", "ct_train_1003_label_frame_114.png", "ct_train_1003_label_frame_66.png", "ct_train_1020_label_frame_320.png", "ct_train_1015_label_frame_148.png", "ct_train_1020_label_frame_258.png", "ct_train_1015_label_frame_277.png", "ct_train_1011_label_frame_63.png", "ct_train_1018_label_frame_79.png", "ct_train_1001_label_frame_234.png", "ct_train_1017_label_frame_32.png", "ct_train_1016_label_frame_210.png", "ct_train_1015_label_frame_160.png", "ct_train_1009_label_frame_77.png", "ct_train_1008_label_frame_65.png", "ct_train_1009_label_frame_31.png", "ct_train_1015_label_frame_269.png", "ct_train_1016_label_frame_154.png", "ct_train_1003_label_frame_289.png", "ct_train_1015_label_frame_184.png", "ct_train_1001_label_frame_320.png", "ct_train_1016_label_frame_72.png", "ct_train_1015_label_frame_207.png", "ct_train_1009_label_frame_104.png", "ct_train_1016_label_frame_292.png", "ct_train_1015_label_frame_163.png", "ct_train_1018_label_frame_139.png", "ct_train_1017_label_frame_156.png", "ct_train_1016_label_frame_144.png", "ct_train_1001_label_frame_176.png", "ct_train_1009_label_frame_122.png", "ct_train_1018_label_frame_119.png", "ct_train_1020_label_frame_253.png", "ct_train_1017_label_frame_149.png", "ct_train_1018_label_frame_128.png", "ct_train_1014_label_frame_276.png", "ct_train_1008_label_frame_163.png", "ct_train_1009_label_frame_49.png", "ct_train_1015_label_frame_170.png", "ct_train_1006_label_frame_99.png", "ct_train_1005_label_frame_18.png", "ct_train_1017_label_frame_266.png", "ct_train_1015_label_frame_245.png", "ct_train_1003_label_frame_230.png", "ct_train_1017_label_frame_12.png", "ct_train_1015_label_frame_4.png", "ct_train_1003_label_frame_291.png", "ct_train_1020_label_frame_293.png", "ct_train_1003_label_frame_132.png", "ct_train_1009_label_frame_244.png", "ct_train_1015_label_frame_265.png", "ct_train_1003_label_frame_49.png", "ct_train_1020_label_frame_277.png", "ct_train_1001_label_frame_263.png", "ct_train_1014_label_frame_57.png", "ct_train_1016_label_frame_264.png", "ct_train_1014_label_frame_153.png", "ct_train_1003_label_frame_170.png", "ct_train_1014_label_frame_34.png", "ct_train_1016_label_frame_75.png", "ct_train_1015_label_frame_74.png", "ct_train_1008_label_frame_204.png", "ct_train_1003_label_frame_178.png", "ct_train_1008_label_frame_149.png", "ct_train_1009_label_frame_139.png", "ct_train_1014_label_frame_308.png", "ct_train_1009_label_frame_133.png", "ct_train_1009_label_frame_135.png", "ct_train_1016_label_frame_224.png", "ct_train_1006_label_frame_192.png", "ct_train_1015_label_frame_31.png", "ct_train_1006_label_frame_240.png", "ct_train_1017_label_frame_229.png", "ct_train_1005_label_frame_15.png", "ct_train_1009_label_frame_117.png", "ct_train_1011_label_frame_5.png", "ct_train_1003_label_frame_136.png", "ct_train_1009_label_frame_89.png", "ct_train_1005_label_frame_57.png", "ct_train_1020_label_frame_113.png", "ct_train_1009_label_frame_163.png", "ct_train_1017_label_frame_183.png", "ct_train_1017_label_frame_43.png", "ct_train_1011_label_frame_129.png", "ct_train_1015_label_frame_33.png", "ct_train_1001_label_frame_60.png", "ct_train_1008_label_frame_29.png", "ct_train_1020_label_frame_120.png", "ct_train_1009_label_frame_29.png", "ct_train_1016_label_frame_142.png", "ct_train_1009_label_frame_145.png", "ct_train_1014_label_frame_351.png", "ct_train_1016_label_frame_212.png", "ct_train_1011_label_frame_48.png", "ct_train_1005_label_frame_66.png", "ct_train_1006_label_frame_222.png", "ct_train_1014_label_frame_33.png", "ct_train_1011_label_frame_114.png", "ct_train_1003_label_frame_27.png", "ct_train_1018_label_frame_85.png", "ct_train_1018_label_frame_123.png", "ct_train_1011_label_frame_147.png", "ct_train_1011_label_frame_214.png", "ct_train_1020_label_frame_225.png", "ct_train_1006_label_frame_98.png", "ct_train_1014_label_frame_98.png", "ct_train_1015_label_frame_206.png", "ct_train_1003_label_frame_142.png", "ct_train_1005_label_frame_76.png", "ct_train_1001_label_frame_206.png", "ct_train_1014_label_frame_242.png", "ct_train_1016_label_frame_17.png", "ct_train_1009_label_frame_161.png", "ct_train_1015_label_frame_264.png", "ct_train_1017_label_frame_33.png", "ct_train_1014_label_frame_66.png", "ct_train_1020_label_frame_173.png", "ct_train_1015_label_frame_164.png", "ct_train_1014_label_frame_60.png", "ct_train_1018_label_frame_140.png", "ct_train_1008_label_frame_41.png", "ct_train_1001_label_frame_296.png", "ct_train_1020_label_frame_72.png", "ct_train_1003_label_frame_138.png", "ct_train_1009_label_frame_0.png", "ct_train_1017_label_frame_112.png", "ct_train_1006_label_frame_239.png", "ct_train_1014_label_frame_206.png", "ct_train_1015_label_frame_157.png", "ct_train_1014_label_frame_166.png", "ct_train_1014_label_frame_337.png", "ct_train_1003_label_frame_275.png", "ct_train_1001_label_frame_55.png", "ct_train_1009_label_frame_42.png", "ct_train_1011_label_frame_50.png", "ct_train_1008_label_frame_100.png", "ct_train_1017_label_frame_1.png", "ct_train_1017_label_frame_175.png", "ct_train_1005_label_frame_162.png", "ct_train_1020_label_frame_312.png", "ct_train_1009_label_frame_123.png", "ct_train_1009_label_frame_273.png", "ct_train_1006_label_frame_199.png", "ct_train_1020_label_frame_192.png", "ct_train_1016_label_frame_256.png", "ct_train_1003_label_frame_78.png", "ct_train_1014_label_frame_187.png", "ct_train_1014_label_frame_31.png", "ct_train_1018_label_frame_39.png", "ct_train_1011_label_frame_25.png", "ct_train_1020_label_frame_122.png", "ct_train_1015_label_frame_43.png", "ct_train_1001_label_frame_241.png", "ct_train_1018_label_frame_114.png", "ct_train_1008_label_frame_220.png", "ct_train_1001_label_frame_179.png", "ct_train_1015_label_frame_102.png", "ct_train_1016_label_frame_46.png", "ct_train_1015_label_frame_237.png", "ct_train_1017_label_frame_72.png", "ct_train_1003_label_frame_160.png", "ct_train_1003_label_frame_176.png", "ct_train_1017_label_frame_261.png", "ct_train_1001_label_frame_308.png", "ct_train_1011_label_frame_145.png", "ct_train_1008_label_frame_181.png", "ct_train_1014_label_frame_288.png", "ct_train_1015_label_frame_139.png", "ct_train_1014_label_frame_197.png", "ct_train_1014_label_frame_90.png", "ct_train_1006_label_frame_127.png", "ct_train_1003_label_frame_171.png", "ct_train_1020_label_frame_46.png", "ct_train_1008_label_frame_165.png", "ct_train_1017_label_frame_150.png", "ct_train_1017_label_frame_188.png", "ct_train_1017_label_frame_85.png", "ct_train_1003_label_frame_202.png", "ct_train_1014_label_frame_140.png", "ct_train_1009_label_frame_54.png", "ct_train_1017_label_frame_138.png", "ct_train_1005_label_frame_40.png", "ct_train_1020_label_frame_244.png", "ct_train_1016_label_frame_307.png", "ct_train_1011_label_frame_1.png", "ct_train_1020_label_frame_75.png", "ct_train_1003_label_frame_9.png", "ct_train_1017_label_frame_83.png", "ct_train_1015_label_frame_58.png", "ct_train_1008_label_frame_108.png", "ct_train_1005_label_frame_123.png", "ct_train_1001_label_frame_120.png", "ct_train_1005_label_frame_89.png", "ct_train_1014_label_frame_194.png", "ct_train_1001_label_frame_26.png", "ct_train_1008_label_frame_180.png", "ct_train_1014_label_frame_181.png", "ct_train_1016_label_frame_117.png", "ct_train_1011_label_frame_197.png", "ct_train_1014_label_frame_68.png", "ct_train_1003_label_frame_257.png", "ct_train_1008_label_frame_135.png", "ct_train_1006_label_frame_145.png", "ct_train_1005_label_frame_37.png", "ct_train_1006_label_frame_60.png", "ct_train_1020_label_frame_271.png", "ct_train_1009_label_frame_50.png", "ct_train_1005_label_frame_55.png", "ct_train_1020_label_frame_133.png", "ct_train_1017_label_frame_98.png", "ct_train_1016_label_frame_276.png", "ct_train_1001_label_frame_272.png", "ct_train_1020_label_frame_99.png", "ct_train_1016_label_frame_24.png", "ct_train_1016_label_frame_54.png", "ct_train_1014_label_frame_0.png", "ct_train_1014_label_frame_213.png", "ct_train_1003_label_frame_93.png", "ct_train_1006_label_frame_219.png", "ct_train_1005_label_frame_158.png", "ct_train_1006_label_frame_217.png", "ct_train_1009_label_frame_177.png", "ct_train_1014_label_frame_55.png", "ct_train_1020_label_frame_2.png", "ct_train_1014_label_frame_250.png", "ct_train_1001_label_frame_106.png", "ct_train_1020_label_frame_177.png", "ct_train_1020_label_frame_168.png", "ct_train_1003_label_frame_55.png", "ct_train_1005_label_frame_141.png", "ct_train_1003_label_frame_243.png", "ct_train_1020_label_frame_8.png", "ct_train_1014_label_frame_300.png", "ct_train_1017_label_frame_134.png", "ct_train_1018_label_frame_27.png", "ct_train_1017_label_frame_128.png", "ct_train_1017_label_frame_49.png", "ct_train_1020_label_frame_140.png", "ct_train_1011_label_frame_202.png", "ct_train_1015_label_frame_226.png", "ct_train_1011_label_frame_161.png", "ct_train_1011_label_frame_206.png", "ct_train_1001_label_frame_66.png", "ct_train_1020_label_frame_297.png", "ct_train_1020_label_frame_56.png", "ct_train_1008_label_frame_63.png", "ct_train_1001_label_frame_270.png", "ct_train_1006_label_frame_220.png", "ct_train_1020_label_frame_268.png", "ct_train_1020_label_frame_230.png", "ct_train_1014_label_frame_37.png", "ct_train_1006_label_frame_6.png", "ct_train_1008_label_frame_99.png", "ct_train_1016_label_frame_128.png", "ct_train_1017_label_frame_176.png", "ct_train_1020_label_frame_311.png", "ct_train_1001_label_frame_265.png", "ct_train_1020_label_frame_200.png", "ct_train_1018_label_frame_104.png", "ct_train_1009_label_frame_14.png", "ct_train_1006_label_frame_244.png", "ct_train_1015_label_frame_240.png", "ct_train_1017_label_frame_158.png", "ct_train_1003_label_frame_19.png", "ct_train_1015_label_frame_68.png", "ct_train_1020_label_frame_285.png", "ct_train_1014_label_frame_58.png", "ct_train_1009_label_frame_100.png", "ct_train_1014_label_frame_115.png", "ct_train_1015_label_frame_213.png", "ct_train_1003_label_frame_148.png", "ct_train_1016_label_frame_259.png", "ct_train_1009_label_frame_86.png", "ct_train_1006_label_frame_170.png", "ct_train_1015_label_frame_186.png", "ct_train_1011_label_frame_167.png", "ct_train_1009_label_frame_91.png", "ct_train_1018_label_frame_49.png", "ct_train_1014_label_frame_331.png", "ct_train_1020_label_frame_119.png", "ct_train_1017_label_frame_79.png", "ct_train_1001_label_frame_166.png", "ct_train_1009_label_frame_159.png", "ct_train_1014_label_frame_111.png", "ct_train_1001_label_frame_324.png", "ct_train_1005_label_frame_106.png", "ct_train_1003_label_frame_277.png", "ct_train_1008_label_frame_86.png", "ct_train_1008_label_frame_10.png", "ct_train_1011_label_frame_125.png", "ct_train_1009_label_frame_209.png", "ct_train_1008_label_frame_58.png", "ct_train_1001_label_frame_251.png", "ct_train_1003_label_frame_69.png", "ct_train_1020_label_frame_262.png", "ct_train_1018_label_frame_162.png", "ct_train_1020_label_frame_322.png", "ct_train_1015_label_frame_115.png", "ct_train_1017_label_frame_237.png", "ct_train_1009_label_frame_130.png", "ct_train_1011_label_frame_146.png", "ct_train_1017_label_frame_160.png", "ct_train_1020_label_frame_337.png", "ct_train_1006_label_frame_157.png", "ct_train_1001_label_frame_95.png", "ct_train_1009_label_frame_247.png", "ct_train_1009_label_frame_66.png", "ct_train_1011_label_frame_59.png", "ct_train_1005_label_frame_17.png", "ct_train_1001_label_frame_38.png", "ct_train_1015_label_frame_14.png", "ct_train_1003_label_frame_43.png", "ct_train_1006_label_frame_3.png", "ct_train_1009_label_frame_150.png", "ct_train_1020_label_frame_105.png", "ct_train_1015_label_frame_205.png", "ct_train_1008_label_frame_50.png", "ct_train_1017_label_frame_63.png", "ct_train_1003_label_frame_150.png", "ct_train_1008_label_frame_60.png", "ct_train_1001_label_frame_41.png", "ct_train_1006_label_frame_91.png", "ct_train_1014_label_frame_298.png", "ct_train_1016_label_frame_98.png", "ct_train_1009_label_frame_85.png", "ct_train_1018_label_frame_70.png", "ct_train_1015_label_frame_247.png", "ct_train_1005_label_frame_16.png", "ct_train_1005_label_frame_169.png", "ct_train_1006_label_frame_24.png", "ct_train_1001_label_frame_291.png", "ct_train_1015_label_frame_141.png", "ct_train_1014_label_frame_310.png", "ct_train_1006_label_frame_76.png", "ct_train_1008_label_frame_42.png", "ct_train_1008_label_frame_84.png", "ct_train_1009_label_frame_16.png", "ct_train_1001_label_frame_298.png", "ct_train_1003_label_frame_108.png", "ct_train_1017_label_frame_153.png", "ct_train_1006_label_frame_20.png", "ct_train_1014_label_frame_178.png", "ct_train_1009_label_frame_68.png", "ct_train_1005_label_frame_128.png", "ct_train_1009_label_frame_199.png", "ct_train_1005_label_frame_64.png", "ct_train_1014_label_frame_152.png", "ct_train_1020_label_frame_317.png", "ct_train_1006_label_frame_95.png", "ct_train_1006_label_frame_200.png", "ct_train_1006_label_frame_142.png", "ct_train_1018_label_frame_65.png", "ct_train_1011_label_frame_0.png", "ct_train_1018_label_frame_109.png", "ct_train_1015_label_frame_27.png", "ct_train_1011_label_frame_75.png", "ct_train_1001_label_frame_266.png", "ct_train_1018_label_frame_94.png", "ct_train_1020_label_frame_110.png", "ct_train_1015_label_frame_253.png", "ct_train_1014_label_frame_304.png", "ct_train_1017_label_frame_38.png", "ct_train_1020_label_frame_313.png", "ct_train_1017_label_frame_219.png", "ct_train_1016_label_frame_133.png", "ct_train_1020_label_frame_342.png", "ct_train_1001_label_frame_0.png", "ct_train_1011_label_frame_117.png", "ct_train_1009_label_frame_128.png", "ct_train_1015_label_frame_179.png", "ct_train_1017_label_frame_84.png", "ct_train_1020_label_frame_237.png", "ct_train_1003_label_frame_295.png", "ct_train_1015_label_frame_127.png", "ct_train_1020_label_frame_59.png", "ct_train_1003_label_frame_39.png", "ct_train_1003_label_frame_173.png", "ct_train_1009_label_frame_237.png", "ct_train_1016_label_frame_8.png", "ct_train_1015_label_frame_116.png", "ct_train_1001_label_frame_114.png", "ct_train_1014_label_frame_228.png", "ct_train_1003_label_frame_90.png", "ct_train_1016_label_frame_130.png", "ct_train_1014_label_frame_160.png", "ct_train_1001_label_frame_212.png", "ct_train_1016_label_frame_314.png", "ct_train_1005_label_frame_81.png", "ct_train_1016_label_frame_119.png", "ct_train_1001_label_frame_217.png", "ct_train_1017_label_frame_53.png", "ct_train_1014_label_frame_22.png", "ct_train_1015_label_frame_83.png", "ct_train_1016_label_frame_214.png", "ct_train_1014_label_frame_122.png", "ct_train_1001_label_frame_22.png", "ct_train_1009_label_frame_278.png", "ct_train_1008_label_frame_179.png", "ct_train_1017_label_frame_97.png", "ct_train_1017_label_frame_31.png", "ct_train_1017_label_frame_123.png", "ct_train_1020_label_frame_83.png", "ct_train_1018_label_frame_29.png", "ct_train_1008_label_frame_197.png", "ct_train_1017_label_frame_212.png", "ct_train_1018_label_frame_126.png", "ct_train_1015_label_frame_235.png", "ct_train_1014_label_frame_27.png", "ct_train_1001_label_frame_188.png", "ct_train_1006_label_frame_201.png", "ct_train_1014_label_frame_344.png", "ct_train_1001_label_frame_286.png", "ct_train_1020_label_frame_305.png", "ct_train_1006_label_frame_43.png", "ct_train_1003_label_frame_46.png", "ct_train_1003_label_frame_23.png", "ct_train_1009_label_frame_32.png", "ct_train_1014_label_frame_172.png", "ct_train_1020_label_frame_287.png", "ct_train_1020_label_frame_219.png", "ct_train_1001_label_frame_36.png", "ct_train_1008_label_frame_127.png", "ct_train_1017_label_frame_9.png", "ct_train_1001_label_frame_226.png", "ct_train_1005_label_frame_122.png", "ct_train_1011_label_frame_178.png", "ct_train_1003_label_frame_267.png", "ct_train_1016_label_frame_297.png", "ct_train_1014_label_frame_336.png", "ct_train_1020_label_frame_250.png", "ct_train_1014_label_frame_311.png", "ct_train_1020_label_frame_282.png", "ct_train_1009_label_frame_71.png", "ct_train_1006_label_frame_80.png", "ct_train_1020_label_frame_39.png", "ct_train_1006_label_frame_38.png", "ct_train_1005_label_frame_130.png", "ct_train_1009_label_frame_185.png", "ct_train_1017_label_frame_54.png", "ct_train_1015_label_frame_17.png", "ct_train_1001_label_frame_316.png", "ct_train_1009_label_frame_236.png", "ct_train_1020_label_frame_9.png", "ct_train_1020_label_frame_164.png", "ct_train_1017_label_frame_170.png", "ct_train_1009_label_frame_74.png", "ct_train_1016_label_frame_31.png", "ct_train_1001_label_frame_15.png", "ct_train_1001_label_frame_153.png", "ct_train_1017_label_frame_196.png", "ct_train_1006_label_frame_116.png", "ct_train_1018_label_frame_20.png", "ct_train_1014_label_frame_69.png", "ct_train_1011_label_frame_224.png", "ct_train_1016_label_frame_94.png", "ct_train_1008_label_frame_82.png", "ct_train_1016_label_frame_148.png", "ct_train_1018_label_frame_179.png", "ct_train_1014_label_frame_97.png", "ct_train_1014_label_frame_41.png", "ct_train_1020_label_frame_117.png", "ct_train_1003_label_frame_229.png", "ct_train_1018_label_frame_52.png", "ct_train_1009_label_frame_112.png", "ct_train_1001_label_frame_49.png", "ct_train_1018_label_frame_14.png", "ct_train_1006_label_frame_122.png", "ct_train_1020_label_frame_333.png", "ct_train_1015_label_frame_138.png", "ct_train_1006_label_frame_135.png", "ct_train_1014_label_frame_241.png", "ct_train_1016_label_frame_170.png", "ct_train_1003_label_frame_240.png", "ct_train_1018_label_frame_44.png", "ct_train_1016_label_frame_248.png", "ct_train_1011_label_frame_126.png", "ct_train_1016_label_frame_27.png", "ct_train_1006_label_frame_109.png", "ct_train_1015_label_frame_172.png", "ct_train_1015_label_frame_81.png", "ct_train_1018_label_frame_111.png", "ct_train_1006_label_frame_11.png", "ct_train_1001_label_frame_255.png", "ct_train_1011_label_frame_42.png", "ct_train_1016_label_frame_70.png", "ct_train_1003_label_frame_189.png", "ct_train_1016_label_frame_152.png", "ct_train_1003_label_frame_256.png", "ct_train_1001_label_frame_258.png", "ct_train_1017_label_frame_157.png", "ct_train_1003_label_frame_157.png", "ct_train_1016_label_frame_157.png", "ct_train_1020_label_frame_321.png", "ct_train_1009_label_frame_253.png", "ct_train_1020_label_frame_324.png", "ct_train_1016_label_frame_268.png", "ct_train_1006_label_frame_10.png", "ct_train_1011_label_frame_188.png", "ct_train_1011_label_frame_164.png", "ct_train_1006_label_frame_161.png", "ct_train_1003_label_frame_270.png", "ct_train_1001_label_frame_105.png", "ct_train_1016_label_frame_132.png", "ct_train_1016_label_frame_328.png", "ct_train_1003_label_frame_208.png", "ct_train_1015_label_frame_69.png", "ct_train_1003_label_frame_103.png", "ct_train_1017_label_frame_147.png", "ct_train_1020_label_frame_129.png", "ct_train_1006_label_frame_168.png", "ct_train_1016_label_frame_25.png", "ct_train_1015_label_frame_66.png", "ct_train_1018_label_frame_115.png", "ct_train_1008_label_frame_40.png", "ct_train_1020_label_frame_209.png", "ct_train_1003_label_frame_239.png", "ct_train_1001_label_frame_326.png", "ct_train_1020_label_frame_360.png", "ct_train_1009_label_frame_266.png", "ct_train_1001_label_frame_253.png", "ct_train_1001_label_frame_229.png", "ct_train_1008_label_frame_74.png", "ct_train_1003_label_frame_137.png", "ct_train_1003_label_frame_6.png", "ct_train_1018_label_frame_63.png", "ct_train_1014_label_frame_334.png", "ct_train_1011_label_frame_172.png", "ct_train_1006_label_frame_12.png", "ct_train_1001_label_frame_242.png", "ct_train_1014_label_frame_10.png", "ct_train_1005_label_frame_32.png", "ct_train_1020_label_frame_167.png", "ct_train_1016_label_frame_193.png", "ct_train_1009_label_frame_176.png", "ct_train_1008_label_frame_219.png", "ct_train_1005_label_frame_67.png", "ct_train_1006_label_frame_238.png", "ct_train_1003_label_frame_235.png", "ct_train_1001_label_frame_125.png", "ct_train_1018_label_frame_144.png", "ct_train_1020_label_frame_267.png", "ct_train_1017_label_frame_174.png", "ct_train_1001_label_frame_128.png", "ct_train_1017_label_frame_180.png", "ct_train_1009_label_frame_175.png", "ct_train_1020_label_frame_100.png", "ct_train_1018_label_frame_87.png", "ct_train_1003_label_frame_122.png", "ct_train_1017_label_frame_152.png", "ct_train_1017_label_frame_114.png", "ct_train_1005_label_frame_138.png", "ct_train_1008_label_frame_209.png", "ct_train_1001_label_frame_76.png", "ct_train_1018_label_frame_169.png", "ct_train_1017_label_frame_279.png", "ct_train_1015_label_frame_73.png", "ct_train_1016_label_frame_48.png", "ct_train_1016_label_frame_39.png", "ct_train_1003_label_frame_115.png", "ct_train_1009_label_frame_62.png", "ct_train_1014_label_frame_320.png", "ct_train_1008_label_frame_140.png", "ct_train_1011_label_frame_88.png", "ct_train_1011_label_frame_72.png", "ct_train_1006_label_frame_232.png", "ct_train_1009_label_frame_274.png", "ct_train_1009_label_frame_118.png", "ct_train_1001_label_frame_12.png", "ct_train_1006_label_frame_87.png", "ct_train_1001_label_frame_104.png", "ct_train_1016_label_frame_180.png", "ct_train_1009_label_frame_238.png", "ct_train_1003_label_frame_283.png", "ct_train_1018_label_frame_182.png", "ct_train_1001_label_frame_134.png", "ct_train_1001_label_frame_315.png", "ct_train_1018_label_frame_102.png", "ct_train_1018_label_frame_84.png", "ct_train_1009_label_frame_193.png", "ct_train_1001_label_frame_294.png", "ct_train_1003_label_frame_110.png", "ct_train_1015_label_frame_210.png", "ct_train_1020_label_frame_6.png", "ct_train_1015_label_frame_126.png", "ct_train_1003_label_frame_250.png", "ct_train_1001_label_frame_223.png", "ct_train_1016_label_frame_60.png", "ct_train_1014_label_frame_278.png", "ct_train_1020_label_frame_85.png", "ct_train_1017_label_frame_13.png", "ct_train_1015_label_frame_23.png", "ct_train_1011_label_frame_101.png", "ct_train_1011_label_frame_203.png", "ct_train_1016_label_frame_237.png", "ct_train_1003_label_frame_213.png", "ct_train_1014_label_frame_275.png", "ct_train_1018_label_frame_136.png", "ct_train_1014_label_frame_231.png", "ct_train_1020_label_frame_137.png", "ct_train_1003_label_frame_217.png", "ct_train_1008_label_frame_32.png", "ct_train_1018_label_frame_156.png", "ct_train_1020_label_frame_166.png", "ct_train_1003_label_frame_279.png", "ct_train_1003_label_frame_251.png", "ct_train_1011_label_frame_22.png", "ct_train_1001_label_frame_58.png", "ct_train_1001_label_frame_304.png", "ct_train_1003_label_frame_33.png", "ct_train_1003_label_frame_191.png", "ct_train_1006_label_frame_225.png", "ct_train_1011_label_frame_163.png", "ct_train_1016_label_frame_262.png", "ct_train_1001_label_frame_214.png", "ct_train_1017_label_frame_173.png", "ct_train_1014_label_frame_252.png", "ct_train_1018_label_frame_13.png", "ct_train_1015_label_frame_256.png", "ct_train_1016_label_frame_33.png", "ct_train_1009_label_frame_148.png", "ct_train_1017_label_frame_10.png", "ct_train_1001_label_frame_221.png", "ct_train_1014_label_frame_313.png", "ct_train_1017_label_frame_244.png", "ct_train_1001_label_frame_90.png", "ct_train_1003_label_frame_62.png", "ct_train_1003_label_frame_20.png", "ct_train_1005_label_frame_103.png", "ct_train_1017_label_frame_74.png", "ct_train_1008_label_frame_35.png", "ct_train_1003_label_frame_258.png", "ct_train_1018_label_frame_36.png", "ct_train_1020_label_frame_241.png", "ct_train_1014_label_frame_247.png", "ct_train_1006_label_frame_61.png", "ct_train_1020_label_frame_62.png", "ct_train_1006_label_frame_36.png", "ct_train_1005_label_frame_159.png", "ct_train_1008_label_frame_2.png", "ct_train_1011_label_frame_209.png", "ct_train_1020_label_frame_270.png", "ct_train_1006_label_frame_169.png", "ct_train_1006_label_frame_93.png", "ct_train_1017_label_frame_277.png", "ct_train_1017_label_frame_272.png", "ct_train_1014_label_frame_103.png", "ct_train_1015_label_frame_218.png", "ct_train_1001_label_frame_77.png", "ct_train_1006_label_frame_243.png", "ct_train_1009_label_frame_37.png", "ct_train_1003_label_frame_280.png", "ct_train_1005_label_frame_133.png", "ct_train_1014_label_frame_352.png", "ct_train_1005_label_frame_117.png", "ct_train_1014_label_frame_72.png", "ct_train_1018_label_frame_106.png", "ct_train_1003_label_frame_24.png", "ct_train_1009_label_frame_153.png", "ct_train_1001_label_frame_349.png", "ct_train_1003_label_frame_147.png", "ct_train_1020_label_frame_11.png", "ct_train_1015_label_frame_299.png", "ct_train_1018_label_frame_168.png", "ct_train_1001_label_frame_35.png", "ct_train_1017_label_frame_50.png", "ct_train_1001_label_frame_23.png", "ct_train_1017_label_frame_215.png", "ct_train_1003_label_frame_216.png", "ct_train_1020_label_frame_224.png", "ct_train_1020_label_frame_272.png", "ct_train_1015_label_frame_199.png", "ct_train_1020_label_frame_38.png", "ct_train_1005_label_frame_112.png", "ct_train_1017_label_frame_44.png", "ct_train_1020_label_frame_53.png", "ct_train_1008_label_frame_187.png", "ct_train_1003_label_frame_59.png", "ct_train_1018_label_frame_116.png", "ct_train_1001_label_frame_340.png", "ct_train_1020_label_frame_280.png", "ct_train_1014_label_frame_174.png", "ct_train_1016_label_frame_61.png", "ct_train_1009_label_frame_251.png", "ct_train_1018_label_frame_122.png", "ct_train_1009_label_frame_174.png", "ct_train_1005_label_frame_20.png", "ct_train_1016_label_frame_109.png", "ct_train_1014_label_frame_18.png", "ct_train_1011_label_frame_106.png", "ct_train_1009_label_frame_101.png", "ct_train_1001_label_frame_1.png", "ct_train_1005_label_frame_167.png", "ct_train_1015_label_frame_282.png", "ct_train_1008_label_frame_116.png", "ct_train_1005_label_frame_56.png", "ct_train_1003_label_frame_192.png", "ct_train_1015_label_frame_268.png", "ct_train_1006_label_frame_2.png", "ct_train_1008_label_frame_89.png", "ct_train_1011_label_frame_53.png", "ct_train_1008_label_frame_202.png", "ct_train_1009_label_frame_280.png", "ct_train_1008_label_frame_139.png", "ct_train_1014_label_frame_198.png", "ct_train_1015_label_frame_94.png", "ct_train_1003_label_frame_262.png", "ct_train_1008_label_frame_6.png", "ct_train_1001_label_frame_232.png", "ct_train_1017_label_frame_269.png", "ct_train_1020_label_frame_97.png", "ct_train_1001_label_frame_280.png", "ct_train_1020_label_frame_28.png", "ct_train_1009_label_frame_136.png", "ct_train_1018_label_frame_171.png", "ct_train_1015_label_frame_63.png", "ct_train_1011_label_frame_169.png", "ct_train_1001_label_frame_297.png", "ct_train_1006_label_frame_132.png", "ct_train_1001_label_frame_325.png", "ct_train_1003_label_frame_130.png", "ct_train_1015_label_frame_128.png", "ct_train_1020_label_frame_348.png", "ct_train_1016_label_frame_139.png", "ct_train_1009_label_frame_97.png", "ct_train_1009_label_frame_204.png", "ct_train_1003_label_frame_13.png", "ct_train_1008_label_frame_83.png", "ct_train_1016_label_frame_1.png", "ct_train_1001_label_frame_328.png", "ct_train_1001_label_frame_205.png", "ct_train_1005_label_frame_100.png", "ct_train_1003_label_frame_127.png", "ct_train_1015_label_frame_204.png", "ct_train_1009_label_frame_158.png", "ct_train_1009_label_frame_289.png", "ct_train_1014_label_frame_356.png", "ct_train_1014_label_frame_137.png", "ct_train_1003_label_frame_121.png", "ct_train_1018_label_frame_150.png", "ct_train_1011_label_frame_175.png", "ct_train_1016_label_frame_138.png", "ct_train_1011_label_frame_180.png", "ct_train_1003_label_frame_79.png", "ct_train_1008_label_frame_17.png", "ct_train_1001_label_frame_202.png", "ct_train_1020_label_frame_55.png", "ct_train_1003_label_frame_36.png", "ct_train_1018_label_frame_135.png", "ct_train_1011_label_frame_113.png", "ct_train_1006_label_frame_180.png", "ct_train_1016_label_frame_42.png", "ct_train_1005_label_frame_160.png", "ct_train_1001_label_frame_277.png", "ct_train_1014_label_frame_81.png", "ct_train_1020_label_frame_98.png", "ct_train_1008_label_frame_18.png", "ct_train_1016_label_frame_38.png", "ct_train_1020_label_frame_187.png", "ct_train_1006_label_frame_62.png", "ct_train_1018_label_frame_130.png", "ct_train_1001_label_frame_273.png", "ct_train_1011_label_frame_90.png", "ct_train_1011_label_frame_57.png", "ct_train_1005_label_frame_120.png", "ct_train_1003_label_frame_226.png", "ct_train_1016_label_frame_299.png", "ct_train_1018_label_frame_180.png", "ct_train_1016_label_frame_240.png", "ct_train_1009_label_frame_262.png", "ct_train_1006_label_frame_30.png", "ct_train_1005_label_frame_29.png", "ct_train_1017_label_frame_121.png", "ct_train_1014_label_frame_318.png", "ct_train_1001_label_frame_312.png", "ct_train_1009_label_frame_235.png", "ct_train_1001_label_frame_182.png", "ct_train_1020_label_frame_175.png", "ct_train_1015_label_frame_111.png", "ct_train_1016_label_frame_47.png", "ct_train_1011_label_frame_92.png", "ct_train_1009_label_frame_187.png", "ct_train_1005_label_frame_125.png", "ct_train_1006_label_frame_191.png", "ct_train_1014_label_frame_149.png", "ct_train_1006_label_frame_97.png", "ct_train_1009_label_frame_221.png", "ct_train_1017_label_frame_106.png", "ct_train_1015_label_frame_181.png", "ct_train_1003_label_frame_99.png", "ct_train_1016_label_frame_149.png", "ct_train_1014_label_frame_74.png", "ct_train_1003_label_frame_48.png", "ct_train_1017_label_frame_82.png", "ct_train_1014_label_frame_347.png", "ct_train_1008_label_frame_107.png", "ct_train_1006_label_frame_17.png", "ct_train_1020_label_frame_223.png", "ct_train_1018_label_frame_80.png", "ct_train_1003_label_frame_77.png", "ct_train_1020_label_frame_339.png", "ct_train_1014_label_frame_237.png", "ct_train_1009_label_frame_201.png", "ct_train_1018_label_frame_170.png", "ct_train_1003_label_frame_65.png", "ct_train_1014_label_frame_8.png", "ct_train_1006_label_frame_14.png", "ct_train_1020_label_frame_80.png", "ct_train_1020_label_frame_302.png", "ct_train_1009_label_frame_172.png", "ct_train_1014_label_frame_26.png", "ct_train_1016_label_frame_300.png", "ct_train_1018_label_frame_153.png", "ct_train_1001_label_frame_360.png", "ct_train_1014_label_frame_179.png", "ct_train_1020_label_frame_246.png", "ct_train_1011_label_frame_228.png", "ct_train_1009_label_frame_147.png", "ct_train_1014_label_frame_309.png", "ct_train_1003_label_frame_224.png", "ct_train_1020_label_frame_186.png", "ct_train_1009_label_frame_64.png", "ct_train_1001_label_frame_42.png", "ct_train_1003_label_frame_53.png", "ct_train_1008_label_frame_158.png", "ct_train_1006_label_frame_140.png", "ct_train_1014_label_frame_80.png", "ct_train_1014_label_frame_40.png", "ct_train_1016_label_frame_100.png", "ct_train_1016_label_frame_118.png", "ct_train_1003_label_frame_231.png", "ct_train_1006_label_frame_141.png", "ct_train_1020_label_frame_144.png", "ct_train_1014_label_frame_205.png", "ct_train_1015_label_frame_46.png", "ct_train_1003_label_frame_287.png", "ct_train_1015_label_frame_176.png", "ct_train_1014_label_frame_135.png", "ct_train_1011_label_frame_140.png", "ct_train_1020_label_frame_304.png", "ct_train_1003_label_frame_18.png", "ct_train_1017_label_frame_130.png", "ct_train_1015_label_frame_114.png", "ct_train_1018_label_frame_40.png", "ct_train_1009_label_frame_2.png", "ct_train_1016_label_frame_120.png", "ct_train_1001_label_frame_259.png", "ct_train_1006_label_frame_28.png", "ct_train_1020_label_frame_275.png", "ct_train_1020_label_frame_331.png", "ct_train_1015_label_frame_110.png", "ct_train_1018_label_frame_151.png", "ct_train_1005_label_frame_45.png", "ct_train_1020_label_frame_318.png", "ct_train_1009_label_frame_111.png", "ct_train_1005_label_frame_126.png", "ct_train_1008_label_frame_43.png", "ct_train_1001_label_frame_283.png", "ct_train_1006_label_frame_126.png", "ct_train_1017_label_frame_135.png", "ct_train_1001_label_frame_289.png", "ct_train_1015_label_frame_223.png", "ct_train_1020_label_frame_34.png", "ct_train_1020_label_frame_361.png", "ct_train_1020_label_frame_307.png", "ct_train_1006_label_frame_233.png", "ct_train_1014_label_frame_203.png", "ct_train_1011_label_frame_111.png", "ct_train_1003_label_frame_254.png", "ct_train_1005_label_frame_30.png", "ct_train_1001_label_frame_361.png", "ct_train_1020_label_frame_362.png", "ct_train_1020_label_frame_243.png", "ct_train_1003_label_frame_105.png", "ct_train_1018_label_frame_159.png", "ct_train_1008_label_frame_160.png", "ct_train_1009_label_frame_22.png", "ct_train_1009_label_frame_283.png", "ct_train_1017_label_frame_91.png", "ct_train_1008_label_frame_25.png", "ct_train_1005_label_frame_148.png", "ct_train_1014_label_frame_71.png", "ct_train_1009_label_frame_212.png", "ct_train_1020_label_frame_141.png", "ct_train_1003_label_frame_84.png", "ct_train_1017_label_frame_250.png", "ct_train_1008_label_frame_196.png", "ct_train_1005_label_frame_59.png", "ct_train_1017_label_frame_111.png", "ct_train_1020_label_frame_30.png", "ct_train_1014_label_frame_88.png", "ct_train_1008_label_frame_15.png", "ct_train_1011_label_frame_116.png", "ct_train_1005_label_frame_94.png", "ct_train_1018_label_frame_167.png", "ct_train_1005_label_frame_22.png", "ct_train_1020_label_frame_115.png", "ct_train_1011_label_frame_64.png", "ct_train_1003_label_frame_195.png", "ct_train_1011_label_frame_104.png", "ct_train_1001_label_frame_329.png", "ct_train_1009_label_frame_292.png", "ct_train_1006_label_frame_213.png", "ct_train_1003_label_frame_204.png", "ct_train_1009_label_frame_269.png", "ct_train_1006_label_frame_183.png", "ct_train_1014_label_frame_39.png", "ct_train_1011_label_frame_183.png", "ct_train_1005_label_frame_12.png", "ct_train_1020_label_frame_352.png", "ct_train_1014_label_frame_142.png", "ct_train_1017_label_frame_225.png", "ct_train_1016_label_frame_239.png", "ct_train_1017_label_frame_81.png", "ct_train_1016_label_frame_190.png", "ct_train_1008_label_frame_45.png", "ct_train_1016_label_frame_58.png", "ct_train_1017_label_frame_5.png", "ct_train_1001_label_frame_332.png", "ct_train_1005_label_frame_175.png", "ct_train_1001_label_frame_68.png", "ct_train_1005_label_frame_9.png", "ct_train_1005_label_frame_75.png", "ct_train_1001_label_frame_279.png", "ct_train_1014_label_frame_217.png", "ct_train_1020_label_frame_17.png", "ct_train_1020_label_frame_87.png", "ct_train_1018_label_frame_54.png", "ct_train_1015_label_frame_150.png", "ct_train_1015_label_frame_85.png", "ct_train_1003_label_frame_111.png", "ct_train_1005_label_frame_87.png", "ct_train_1015_label_frame_20.png", "ct_train_1003_label_frame_7.png", "ct_train_1001_label_frame_33.png", "ct_train_1014_label_frame_29.png", "ct_train_1011_label_frame_221.png", "ct_train_1015_label_frame_96.png", "ct_train_1017_label_frame_268.png", "ct_train_1017_label_frame_210.png", "ct_train_1020_label_frame_198.png", "ct_train_1016_label_frame_296.png", "ct_train_1016_label_frame_177.png", "ct_train_1003_label_frame_30.png", "ct_train_1006_label_frame_52.png", "ct_train_1011_label_frame_18.png", "ct_train_1005_label_frame_36.png", "ct_train_1005_label_frame_149.png", "ct_train_1018_label_frame_157.png", "ct_train_1018_label_frame_147.png", "ct_train_1017_label_frame_260.png", "ct_train_1016_label_frame_73.png", "ct_train_1014_label_frame_67.png", "ct_train_1001_label_frame_213.png", "ct_train_1003_label_frame_101.png", "ct_train_1003_label_frame_3.png", "ct_train_1009_label_frame_181.png", "ct_train_1009_label_frame_73.png", "ct_train_1017_label_frame_214.png", "ct_train_1011_label_frame_56.png", "ct_train_1005_label_frame_96.png", "ct_train_1005_label_frame_124.png", "ct_train_1011_label_frame_231.png", "ct_train_1001_label_frame_207.png", "ct_train_1008_label_frame_20.png", "ct_train_1009_label_frame_264.png", "ct_train_1015_label_frame_231.png", "ct_train_1011_label_frame_123.png", "ct_train_1001_label_frame_16.png", "ct_train_1009_label_frame_120.png", "ct_train_1011_label_frame_191.png", "ct_train_1008_label_frame_44.png", "ct_train_1017_label_frame_204.png", "ct_train_1006_label_frame_118.png", "ct_train_1003_label_frame_112.png", "ct_train_1006_label_frame_121.png", "ct_train_1014_label_frame_234.png", "ct_train_1011_label_frame_68.png", "ct_train_1009_label_frame_287.png", "ct_train_1003_label_frame_228.png", "ct_train_1020_label_frame_121.png", "ct_train_1009_label_frame_206.png", "ct_train_1006_label_frame_155.png", "ct_train_1018_label_frame_57.png", "ct_train_1018_label_frame_47.png", "ct_train_1020_label_frame_184.png", "ct_train_1014_label_frame_169.png", "ct_train_1003_label_frame_63.png", "ct_train_1014_label_frame_155.png", "ct_train_1020_label_frame_323.png", "ct_train_1017_label_frame_265.png", "ct_train_1016_label_frame_96.png", "ct_train_1018_label_frame_183.png", "ct_train_1020_label_frame_356.png", "ct_train_1001_label_frame_356.png", "ct_train_1008_label_frame_105.png", "ct_train_1001_label_frame_204.png", "ct_train_1020_label_frame_24.png", "ct_train_1018_label_frame_83.png", "ct_train_1003_label_frame_144.png", "ct_train_1020_label_frame_350.png", "ct_train_1014_label_frame_279.png", "ct_train_1018_label_frame_174.png", "ct_train_1008_label_frame_21.png", "ct_train_1014_label_frame_232.png", "ct_train_1020_label_frame_273.png", "ct_train_1014_label_frame_109.png", "ct_train_1018_label_frame_46.png", "ct_train_1011_label_frame_122.png", "ct_train_1014_label_frame_186.png", "ct_train_1009_label_frame_127.png", "ct_train_1017_label_frame_108.png", "ct_train_1001_label_frame_28.png", "ct_train_1020_label_frame_21.png", "ct_train_1017_label_frame_231.png", "ct_train_1005_label_frame_70.png", "ct_train_1003_label_frame_281.png", "ct_train_1017_label_frame_262.png", "ct_train_1016_label_frame_277.png", "ct_train_1014_label_frame_162.png", "ct_train_1020_label_frame_65.png", "ct_train_1001_label_frame_230.png", "ct_train_1006_label_frame_246.png", "ct_train_1020_label_frame_336.png", "ct_train_1003_label_frame_165.png", "ct_train_1001_label_frame_53.png", "ct_train_1015_label_frame_254.png", "ct_train_1009_label_frame_276.png", "ct_train_1017_label_frame_243.png", "ct_train_1009_label_frame_220.png", "ct_train_1003_label_frame_210.png", "ct_train_1001_label_frame_240.png", "ct_train_1015_label_frame_196.png", "ct_train_1001_label_frame_336.png", "ct_train_1016_label_frame_44.png", "ct_train_1008_label_frame_192.png", "ct_train_1006_label_frame_110.png", "ct_train_1009_label_frame_56.png", "ct_train_1005_label_frame_145.png", "ct_train_1009_label_frame_258.png", "ct_train_1003_label_frame_260.png", "ct_train_1001_label_frame_243.png", "ct_train_1006_label_frame_148.png", "ct_train_1003_label_frame_211.png", "ct_train_1017_label_frame_126.png", "ct_train_1006_label_frame_69.png", "ct_train_1015_label_frame_78.png", "ct_train_1014_label_frame_294.png", "ct_train_1003_label_frame_149.png", "ct_train_1018_label_frame_73.png", "ct_train_1005_label_frame_102.png", "ct_train_1003_label_frame_1.png", "ct_train_1009_label_frame_40.png", "ct_train_1001_label_frame_137.png", "ct_train_1003_label_frame_155.png", "ct_train_1018_label_frame_145.png", "ct_train_1003_label_frame_124.png", "ct_train_1006_label_frame_70.png", "ct_train_1011_label_frame_165.png", "ct_train_1015_label_frame_87.png", "ct_train_1020_label_frame_319.png", "ct_train_1008_label_frame_134.png", "ct_train_1009_label_frame_198.png", "ct_train_1005_label_frame_93.png", "ct_train_1017_label_frame_73.png", "ct_train_1001_label_frame_201.png", "ct_train_1016_label_frame_221.png", "ct_train_1016_label_frame_124.png", "ct_train_1003_label_frame_296.png", "ct_train_1020_label_frame_57.png", "ct_train_1001_label_frame_168.png", "ct_train_1011_label_frame_187.png", "ct_train_1003_label_frame_209.png", "ct_train_1001_label_frame_74.png", "ct_train_1008_label_frame_71.png", "ct_train_1014_label_frame_321.png", "ct_train_1001_label_frame_61.png", "ct_train_1017_label_frame_275.png", "ct_train_1016_label_frame_196.png", "ct_train_1015_label_frame_166.png", "ct_train_1020_label_frame_201.png", "ct_train_1018_label_frame_113.png", "ct_train_1015_label_frame_80.png", "ct_train_1003_label_frame_156.png", "ct_train_1014_label_frame_329.png", "ct_train_1003_label_frame_248.png", "ct_train_1011_label_frame_208.png", "ct_train_1009_label_frame_39.png", "ct_train_1014_label_frame_134.png", "ct_train_1017_label_frame_208.png", "ct_train_1014_label_frame_167.png", "ct_train_1009_label_frame_143.png", "ct_train_1018_label_frame_82.png", "ct_train_1016_label_frame_219.png", "ct_train_1016_label_frame_153.png", "ct_train_1001_label_frame_10.png", "ct_train_1009_label_frame_190.png", "ct_train_1016_label_frame_261.png", "ct_train_1020_label_frame_193.png", "ct_train_1017_label_frame_162.png", "ct_train_1017_label_frame_193.png", "ct_train_1008_label_frame_154.png", "ct_train_1015_label_frame_255.png", "ct_train_1008_label_frame_39.png", "ct_train_1008_label_frame_183.png", "ct_train_1016_label_frame_175.png", "ct_train_1001_label_frame_198.png", "ct_train_1017_label_frame_67.png", "ct_train_1018_label_frame_97.png", "ct_train_1005_label_frame_50.png", "ct_train_1018_label_frame_45.png", "ct_train_1009_label_frame_45.png", "ct_train_1016_label_frame_20.png", "ct_train_1005_label_frame_80.png", "ct_train_1005_label_frame_54.png", "ct_train_1015_label_frame_35.png", "ct_train_1020_label_frame_156.png", "ct_train_1014_label_frame_185.png", "ct_train_1014_label_frame_227.png", "ct_train_1020_label_frame_181.png", "ct_train_1009_label_frame_286.png", "ct_train_1018_label_frame_75.png", "ct_train_1014_label_frame_350.png", "ct_train_1017_label_frame_41.png", "ct_train_1011_label_frame_6.png", "ct_train_1001_label_frame_302.png", "ct_train_1017_label_frame_62.png", "ct_train_1020_label_frame_104.png", "ct_train_1016_label_frame_88.png", "ct_train_1001_label_frame_219.png", "ct_train_1015_label_frame_183.png", "ct_train_1017_label_frame_25.png", "ct_train_1020_label_frame_303.png", "ct_train_1015_label_frame_106.png", "ct_train_1016_label_frame_30.png", "ct_train_1003_label_frame_117.png", "ct_train_1009_label_frame_13.png", "ct_train_1005_label_frame_157.png", "ct_train_1003_label_frame_225.png", "ct_train_1006_label_frame_204.png", "ct_train_1003_label_frame_221.png", "ct_train_1009_label_frame_223.png", "ct_train_1003_label_frame_238.png", "ct_train_1011_label_frame_179.png", "ct_train_1001_label_frame_131.png", "ct_train_1017_label_frame_70.png", "ct_train_1011_label_frame_118.png", "ct_train_1001_label_frame_18.png", "ct_train_1009_label_frame_205.png", "ct_train_1008_label_frame_147.png", "ct_train_1018_label_frame_66.png", "ct_train_1005_label_frame_3.png", "ct_train_1003_label_frame_17.png", "ct_train_1005_label_frame_69.png", "ct_train_1009_label_frame_241.png", "ct_train_1020_label_frame_178.png", "ct_train_1005_label_frame_33.png", "ct_train_1020_label_frame_43.png", "ct_train_1008_label_frame_212.png", "ct_train_1008_label_frame_70.png", "ct_train_1005_label_frame_143.png", "ct_train_1011_label_frame_216.png", "ct_train_1011_label_frame_138.png", "ct_train_1005_label_frame_82.png", "ct_train_1009_label_frame_227.png", "ct_train_1016_label_frame_182.png", "ct_train_1001_label_frame_142.png", "ct_train_1001_label_frame_191.png", "ct_train_1017_label_frame_220.png", "ct_train_1017_label_frame_194.png", "ct_train_1016_label_frame_45.png", "ct_train_1008_label_frame_56.png", "ct_train_1015_label_frame_19.png", "ct_train_1014_label_frame_106.png", "ct_train_1015_label_frame_123.png", "ct_train_1009_label_frame_3.png", "ct_train_1015_label_frame_67.png", "ct_train_1003_label_frame_42.png", "ct_train_1005_label_frame_62.png", "ct_train_1014_label_frame_116.png", "ct_train_1011_label_frame_193.png", "ct_train_1015_label_frame_49.png", "ct_train_1008_label_frame_195.png", "ct_train_1009_label_frame_239.png", "ct_train_1017_label_frame_113.png", "ct_train_1018_label_frame_96.png", "ct_train_1006_label_frame_120.png", "ct_train_1011_label_frame_230.png", "ct_train_1009_label_frame_15.png", "ct_train_1001_label_frame_231.png", "ct_train_1009_label_frame_160.png", "ct_train_1006_label_frame_94.png", "ct_train_1020_label_frame_77.png", "ct_train_1016_label_frame_284.png", "ct_train_1014_label_frame_101.png", "ct_train_1001_label_frame_116.png", "ct_train_1014_label_frame_17.png", "ct_train_1003_label_frame_81.png", "ct_train_1001_label_frame_313.png", "ct_train_1014_label_frame_146.png", "ct_train_1006_label_frame_185.png", "ct_train_1005_label_frame_28.png", "ct_train_1016_label_frame_331.png", "ct_train_1005_label_frame_114.png", "ct_train_1016_label_frame_249.png", "ct_train_1020_label_frame_355.png", "ct_train_1014_label_frame_195.png", "ct_train_1006_label_frame_35.png", "ct_train_1001_label_frame_239.png", "ct_train_1011_label_frame_211.png", "ct_train_1017_label_frame_185.png", "ct_train_1009_label_frame_191.png", "ct_train_1015_label_frame_175.png", "ct_train_1014_label_frame_24.png", "ct_train_1001_label_frame_208.png", "ct_train_1009_label_frame_275.png", "ct_train_1006_label_frame_165.png", "ct_train_1006_label_frame_182.png", "ct_train_1018_label_frame_166.png", "ct_train_1008_label_frame_203.png", "ct_train_1009_label_frame_208.png", "ct_train_1001_label_frame_44.png", "ct_train_1011_label_frame_76.png", "ct_train_1017_label_frame_154.png", "ct_train_1011_label_frame_127.png", "ct_train_1006_label_frame_112.png", "ct_train_1001_label_frame_103.png", "ct_train_1009_label_frame_72.png", "ct_train_1020_label_frame_249.png", "ct_train_1001_label_frame_5.png", "ct_train_1011_label_frame_139.png", "ct_train_1017_label_frame_76.png", "ct_train_1018_label_frame_152.png", "ct_train_1016_label_frame_285.png", "ct_train_1016_label_frame_36.png", "ct_train_1009_label_frame_141.png", "ct_train_1016_label_frame_77.png", "ct_train_1006_label_frame_42.png", "ct_train_1009_label_frame_115.png", "ct_train_1006_label_frame_129.png", "ct_train_1008_label_frame_120.png", "ct_train_1006_label_frame_102.png", "ct_train_1020_label_frame_357.png", "ct_train_1003_label_frame_272.png", "ct_train_1020_label_frame_257.png", "ct_train_1006_label_frame_104.png", "ct_train_1001_label_frame_184.png", "ct_train_1020_label_frame_202.png", "ct_train_1017_label_frame_93.png", "ct_train_1011_label_frame_98.png", "ct_train_1014_label_frame_212.png", "ct_train_1015_label_frame_29.png", "ct_train_1011_label_frame_185.png", "ct_train_1009_label_frame_192.png", "ct_train_1009_label_frame_92.png", "ct_train_1001_label_frame_173.png", "ct_train_1014_label_frame_96.png", "ct_train_1009_label_frame_140.png", "ct_train_1001_label_frame_130.png", "ct_train_1003_label_frame_242.png", "ct_train_1018_label_frame_178.png", "ct_train_1005_label_frame_27.png", "ct_train_1011_label_frame_83.png", "ct_train_1009_label_frame_10.png", "ct_train_1016_label_frame_87.png", "ct_train_1015_label_frame_180.png", "ct_train_1011_label_frame_73.png", "ct_train_1011_label_frame_119.png", "ct_train_1011_label_frame_194.png", "ct_train_1003_label_frame_285.png", "ct_train_1009_label_frame_242.png", "ct_train_1003_label_frame_70.png", "ct_train_1003_label_frame_94.png", "ct_train_1006_label_frame_67.png", "ct_train_1009_label_frame_157.png", "ct_train_1018_label_frame_129.png", "ct_train_1020_label_frame_340.png", "ct_train_1014_label_frame_355.png", "ct_train_1014_label_frame_73.png", "ct_train_1017_label_frame_60.png", "ct_train_1017_label_frame_3.png", "ct_train_1014_label_frame_262.png", "ct_train_1014_label_frame_236.png", "ct_train_1017_label_frame_246.png", "ct_train_1015_label_frame_28.png", "ct_train_1017_label_frame_172.png", "ct_train_1018_label_frame_32.png", "ct_train_1015_label_frame_48.png", "ct_train_1003_label_frame_21.png", "ct_train_1016_label_frame_251.png", "ct_train_1003_label_frame_37.png", "ct_train_1014_label_frame_287.png", "ct_train_1008_label_frame_51.png", "ct_train_1015_label_frame_215.png", "ct_train_1005_label_frame_72.png", "ct_train_1011_label_frame_67.png", "ct_train_1016_label_frame_208.png", "ct_train_1011_label_frame_157.png", "ct_train_1014_label_frame_263.png", "ct_train_1016_label_frame_49.png", "ct_train_1006_label_frame_226.png", "ct_train_1016_label_frame_290.png", "ct_train_1009_label_frame_36.png", "ct_train_1020_label_frame_228.png", "ct_train_1008_label_frame_152.png", "ct_train_1015_label_frame_270.png", "ct_train_1006_label_frame_22.png", "ct_train_1001_label_frame_327.png", "ct_train_1003_label_frame_98.png", "ct_train_1011_label_frame_162.png", "ct_train_1005_label_frame_174.png", "ct_train_1014_label_frame_44.png", "ct_train_1020_label_frame_182.png", "ct_train_1014_label_frame_268.png", "ct_train_1016_label_frame_126.png", "ct_train_1020_label_frame_147.png", "ct_train_1016_label_frame_209.png", "ct_train_1017_label_frame_29.png", "ct_train_1009_label_frame_151.png", "ct_train_1016_label_frame_140.png", "ct_train_1020_label_frame_220.png", "ct_train_1016_label_frame_173.png", "ct_train_1001_label_frame_75.png", "ct_train_1017_label_frame_0.png", "ct_train_1015_label_frame_134.png", "ct_train_1009_label_frame_6.png", "ct_train_1017_label_frame_71.png", "ct_train_1008_label_frame_137.png", "ct_train_1005_label_frame_48.png", "ct_train_1006_label_frame_79.png", "ct_train_1018_label_frame_30.png", "ct_train_1008_label_frame_30.png", "ct_train_1001_label_frame_72.png", "ct_train_1003_label_frame_86.png", "ct_train_1015_label_frame_197.png", "ct_train_1005_label_frame_52.png", "ct_train_1018_label_frame_77.png", "ct_train_1001_label_frame_342.png", "ct_train_1014_label_frame_322.png", "ct_train_1015_label_frame_249.png", "ct_train_1016_label_frame_254.png", "ct_train_1018_label_frame_10.png", "ct_train_1014_label_frame_65.png", "ct_train_1015_label_frame_289.png", "ct_train_1001_label_frame_169.png", "ct_train_1020_label_frame_135.png", "ct_train_1018_label_frame_34.png", "ct_train_1008_label_frame_143.png", "ct_train_1014_label_frame_267.png", "ct_train_1017_label_frame_253.png", "ct_train_1008_label_frame_124.png", "ct_train_1020_label_frame_5.png", "ct_train_1020_label_frame_92.png", "ct_train_1020_label_frame_142.png", "ct_train_1017_label_frame_169.png", "ct_train_1003_label_frame_140.png", "ct_train_1020_label_frame_86.png", "ct_train_1020_label_frame_149.png", "ct_train_1001_label_frame_177.png", "ct_train_1017_label_frame_101.png", "ct_train_1009_label_frame_188.png", "ct_train_1003_label_frame_64.png", "ct_train_1020_label_frame_23.png", "ct_train_1005_label_frame_113.png", "ct_train_1011_label_frame_192.png", "ct_train_1003_label_frame_145.png", "ct_train_1006_label_frame_58.png", "ct_train_1001_label_frame_274.png", "ct_train_1009_label_frame_87.png", "ct_train_1006_label_frame_150.png", "ct_train_1016_label_frame_137.png", "ct_train_1005_label_frame_53.png", "ct_train_1009_label_frame_231.png", "ct_train_1014_label_frame_170.png", "ct_train_1008_label_frame_128.png", "ct_train_1006_label_frame_31.png", "ct_train_1020_label_frame_36.png", "ct_train_1020_label_frame_299.png", "ct_train_1017_label_frame_102.png", "ct_train_1008_label_frame_205.png", "ct_train_1015_label_frame_130.png", "ct_train_1001_label_frame_117.png", "ct_train_1003_label_frame_102.png", "ct_train_1001_label_frame_358.png", "ct_train_1009_label_frame_124.png", "ct_train_1001_label_frame_227.png", "ct_train_1014_label_frame_173.png", "ct_train_1011_label_frame_19.png", "ct_train_1016_label_frame_135.png", "ct_train_1008_label_frame_132.png", "ct_train_1016_label_frame_123.png", "ct_train_1001_label_frame_84.png", "ct_train_1017_label_frame_276.png", "ct_train_1011_label_frame_130.png", "ct_train_1015_label_frame_59.png", "ct_train_1005_label_frame_140.png", "ct_train_1015_label_frame_209.png", "ct_train_1015_label_frame_71.png", "ct_train_1018_label_frame_59.png", "ct_train_1011_label_frame_219.png", "ct_train_1017_label_frame_21.png", "ct_train_1016_label_frame_295.png", "ct_train_1008_label_frame_138.png", "ct_train_1011_label_frame_143.png", "ct_train_1009_label_frame_26.png", "ct_train_1016_label_frame_298.png", "ct_train_1020_label_frame_45.png", "ct_train_1011_label_frame_10.png", "ct_train_1014_label_frame_163.png", "ct_train_1003_label_frame_199.png", "ct_train_1017_label_frame_270.png", "ct_train_1003_label_frame_134.png", "ct_train_1018_label_frame_53.png", "ct_train_1014_label_frame_305.png", "ct_train_1009_label_frame_217.png", "ct_train_1020_label_frame_66.png", "ct_train_1015_label_frame_250.png", "ct_train_1016_label_frame_169.png", "ct_train_1011_label_frame_15.png", "ct_train_1011_label_frame_32.png", "ct_train_1011_label_frame_200.png", "ct_train_1009_label_frame_129.png", "ct_train_1016_label_frame_67.png", "ct_train_1009_label_frame_20.png", "ct_train_1003_label_frame_167.png", "ct_train_1003_label_frame_182.png", "ct_train_1001_label_frame_343.png", "ct_train_1014_label_frame_297.png", "ct_train_1014_label_frame_126.png", "ct_train_1009_label_frame_27.png", "ct_train_1020_label_frame_74.png", "ct_train_1014_label_frame_87.png", "ct_train_1014_label_frame_117.png", "ct_train_1016_label_frame_99.png", "ct_train_1001_label_frame_247.png", "ct_train_1018_label_frame_117.png", "ct_train_1018_label_frame_67.png", "ct_train_1020_label_frame_13.png", "ct_train_1015_label_frame_15.png", "ct_train_1001_label_frame_228.png", "ct_train_1017_label_frame_116.png", "ct_train_1016_label_frame_53.png", "ct_train_1020_label_frame_15.png", "ct_train_1009_label_frame_17.png", "ct_train_1006_label_frame_63.png", "ct_train_1015_label_frame_201.png", "ct_train_1015_label_frame_125.png", "ct_train_1001_label_frame_351.png", "ct_train_1001_label_frame_190.png", "ct_train_1014_label_frame_323.png", "ct_train_1017_label_frame_213.png", "ct_train_1006_label_frame_178.png", "ct_train_1008_label_frame_122.png", "ct_train_1006_label_frame_101.png", "ct_train_1008_label_frame_24.png", "ct_train_1016_label_frame_231.png", "ct_train_1005_label_frame_13.png", "ct_train_1009_label_frame_11.png", "ct_train_1001_label_frame_319.png", "ct_train_1014_label_frame_281.png", "ct_train_1003_label_frame_294.png", "ct_train_1018_label_frame_141.png", "ct_train_1015_label_frame_119.png", "ct_train_1014_label_frame_299.png", "ct_train_1008_label_frame_118.png", "ct_train_1015_label_frame_298.png", "ct_train_1003_label_frame_244.png", "ct_train_1016_label_frame_260.png", "ct_train_1005_label_frame_74.png", "ct_train_1015_label_frame_6.png", "ct_train_1014_label_frame_192.png", "ct_train_1011_label_frame_86.png", "ct_train_1020_label_frame_347.png", "ct_train_1008_label_frame_78.png", "ct_train_1001_label_frame_354.png", "ct_train_1008_label_frame_46.png", "ct_train_1003_label_frame_100.png", "ct_train_1006_label_frame_130.png", "ct_train_1016_label_frame_37.png", "ct_train_1014_label_frame_214.png", "ct_train_1011_label_frame_33.png", "ct_train_1011_label_frame_78.png", "ct_train_1011_label_frame_34.png", "ct_train_1011_label_frame_108.png", "ct_train_1017_label_frame_239.png", "ct_train_1003_label_frame_220.png", "ct_train_1014_label_frame_245.png", "ct_train_1001_label_frame_88.png", "ct_train_1014_label_frame_139.png", "ct_train_1009_label_frame_60.png", "ct_train_1014_label_frame_131.png", "ct_train_1014_label_frame_45.png", "ct_train_1001_label_frame_52.png", "ct_train_1020_label_frame_294.png", "ct_train_1001_label_frame_235.png", "ct_train_1001_label_frame_357.png", "ct_train_1001_label_frame_91.png", "ct_train_1001_label_frame_295.png", "ct_train_1008_label_frame_218.png", "ct_train_1001_label_frame_46.png", "ct_train_1006_label_frame_187.png", "ct_train_1014_label_frame_224.png", "ct_train_1001_label_frame_287.png", "ct_train_1015_label_frame_16.png", "ct_train_1020_label_frame_256.png", "ct_train_1001_label_frame_87.png", "ct_train_1015_label_frame_159.png", "ct_train_1001_label_frame_163.png", "ct_train_1020_label_frame_154.png", "ct_train_1015_label_frame_149.png", "ct_train_1008_label_frame_49.png", "ct_train_1001_label_frame_211.png", "ct_train_1014_label_frame_1.png", "ct_train_1014_label_frame_188.png", "ct_train_1014_label_frame_190.png", "ct_train_1018_label_frame_74.png", "ct_train_1016_label_frame_183.png", "ct_train_1015_label_frame_89.png", "ct_train_1017_label_frame_11.png", "ct_train_1015_label_frame_24.png", "ct_train_1003_label_frame_12.png", "ct_train_1015_label_frame_259.png", "ct_train_1016_label_frame_202.png", "ct_train_1015_label_frame_7.png", "ct_train_1006_label_frame_207.png", "ct_train_1014_label_frame_222.png", "ct_train_1018_label_frame_99.png", "ct_train_1016_label_frame_316.png", "ct_train_1017_label_frame_221.png", "ct_train_1005_label_frame_49.png", "ct_train_1006_label_frame_92.png", "ct_train_1017_label_frame_222.png", "ct_train_1017_label_frame_184.png", "ct_train_1006_label_frame_247.png", "ct_train_1014_label_frame_238.png", "ct_train_1018_label_frame_177.png", "ct_train_1001_label_frame_63.png", "ct_train_1014_label_frame_158.png", "ct_train_1016_label_frame_225.png", "ct_train_1001_label_frame_281.png", "ct_train_1016_label_frame_301.png", "ct_train_1011_label_frame_166.png", "ct_train_1014_label_frame_79.png", "ct_train_1009_label_frame_146.png", "ct_train_1001_label_frame_185.png", "ct_train_1009_label_frame_233.png", "ct_train_1008_label_frame_80.png", "ct_train_1017_label_frame_141.png", "ct_train_1003_label_frame_215.png", "ct_train_1011_label_frame_52.png", "ct_train_1003_label_frame_247.png", "ct_train_1016_label_frame_235.png", "ct_train_1020_label_frame_265.png", "ct_train_1001_label_frame_9.png", "ct_train_1015_label_frame_294.png", "ct_train_1018_label_frame_58.png", "ct_train_1009_label_frame_19.png", "ct_train_1014_label_frame_46.png", "ct_train_1011_label_frame_184.png", "ct_train_1011_label_frame_79.png", "ct_train_1003_label_frame_180.png", "ct_train_1003_label_frame_266.png", "ct_train_1001_label_frame_43.png", "ct_train_1001_label_frame_254.png", "ct_train_1020_label_frame_79.png", "ct_train_1020_label_frame_189.png", "ct_train_1011_label_frame_218.png", "ct_train_1009_label_frame_179.png", "ct_train_1016_label_frame_272.png", "ct_train_1017_label_frame_143.png", "ct_train_1014_label_frame_218.png", "ct_train_1020_label_frame_134.png", "ct_train_1016_label_frame_178.png", "ct_train_1016_label_frame_69.png", "ct_train_1006_label_frame_166.png", "ct_train_1020_label_frame_31.png", "ct_train_1018_label_frame_155.png", "ct_train_1014_label_frame_16.png", "ct_train_1015_label_frame_53.png", "ct_train_1017_label_frame_171.png", "ct_train_1015_label_frame_168.png", "ct_train_1016_label_frame_246.png", "ct_train_1006_label_frame_236.png", "ct_train_1016_label_frame_115.png", "ct_train_1014_label_frame_271.png", "ct_train_1014_label_frame_114.png", "ct_train_1003_label_frame_125.png", "ct_train_1005_label_frame_166.png", "ct_train_1015_label_frame_232.png", "ct_train_1014_label_frame_215.png", "ct_train_1015_label_frame_221.png", "ct_train_1009_label_frame_98.png", "ct_train_1005_label_frame_173.png", "ct_train_1009_label_frame_61.png", "ct_train_1008_label_frame_188.png", "ct_train_1020_label_frame_37.png", "ct_train_1008_label_frame_153.png", "ct_train_1014_label_frame_133.png", "ct_train_1001_label_frame_194.png", "ct_train_1009_label_frame_43.png", "ct_train_1014_label_frame_120.png", "ct_train_1011_label_frame_204.png", "ct_train_1014_label_frame_233.png", "ct_train_1008_label_frame_14.png", "ct_train_1003_label_frame_22.png", "ct_train_1020_label_frame_195.png", "ct_train_1003_label_frame_11.png", "ct_train_1005_label_frame_79.png", "ct_train_1020_label_frame_278.png", "ct_train_1006_label_frame_212.png", "ct_train_1006_label_frame_147.png", "ct_train_1006_label_frame_164.png", "ct_train_1009_label_frame_228.png", "ct_train_1018_label_frame_172.png", "ct_train_1017_label_frame_52.png", "ct_train_1006_label_frame_209.png", "ct_train_1020_label_frame_10.png", "ct_train_1011_label_frame_96.png", "ct_train_1014_label_frame_273.png", "ct_train_1015_label_frame_143.png", "ct_train_1015_label_frame_0.png", "ct_train_1005_label_frame_104.png", "ct_train_1006_label_frame_15.png", "ct_train_1014_label_frame_264.png", "ct_train_1006_label_frame_136.png", "ct_train_1014_label_frame_21.png", "ct_train_1011_label_frame_168.png", "ct_train_1008_label_frame_94.png", "ct_train_1020_label_frame_128.png", "ct_train_1016_label_frame_293.png", "ct_train_1015_label_frame_120.png", "ct_train_1020_label_frame_236.png", "ct_train_1020_label_frame_82.png", "ct_train_1016_label_frame_90.png", "ct_train_1005_label_frame_46.png", "ct_train_1011_label_frame_69.png", "ct_train_1009_label_frame_53.png", "ct_train_1006_label_frame_125.png", "ct_train_1016_label_frame_18.png", "ct_train_1016_label_frame_287.png", "ct_train_1015_label_frame_230.png", "ct_train_1006_label_frame_16.png", "ct_train_1015_label_frame_93.png", "ct_train_1003_label_frame_75.png", "ct_train_1011_label_frame_181.png", "ct_train_1003_label_frame_264.png", "ct_train_1003_label_frame_123.png", "ct_train_1015_label_frame_132.png", "ct_train_1009_label_frame_243.png", "ct_train_1005_label_frame_156.png", "ct_train_1020_label_frame_358.png", "ct_train_1003_label_frame_54.png", "ct_train_1008_label_frame_68.png", "ct_train_1017_label_frame_164.png", "ct_train_1006_label_frame_133.png", "ct_train_1001_label_frame_262.png", "ct_train_1014_label_frame_4.png", "ct_train_1011_label_frame_43.png", "ct_train_1017_label_frame_16.png", "ct_train_1020_label_frame_70.png", "ct_train_1006_label_frame_152.png", "ct_train_1011_label_frame_160.png", "ct_train_1016_label_frame_105.png", "ct_train_1017_label_frame_124.png", "ct_train_1009_label_frame_194.png", "ct_train_1009_label_frame_9.png", "ct_train_1001_label_frame_78.png", "ct_train_1017_label_frame_77.png", "ct_train_1008_label_frame_85.png", "ct_train_1020_label_frame_316.png", "ct_train_1016_label_frame_147.png", "ct_train_1016_label_frame_62.png", "ct_train_1018_label_frame_18.png", "ct_train_1001_label_frame_135.png", "ct_train_1015_label_frame_131.png", "ct_train_1017_label_frame_133.png", "ct_train_1020_label_frame_260.png", "ct_train_1008_label_frame_189.png", "ct_train_1014_label_frame_151.png", "ct_train_1016_label_frame_22.png", "ct_train_1016_label_frame_220.png", "ct_train_1014_label_frame_85.png", "ct_train_1020_label_frame_279.png", "ct_train_1006_label_frame_176.png", "ct_train_1003_label_frame_158.png", "ct_train_1014_label_frame_113.png", "ct_train_1017_label_frame_26.png", "ct_train_1005_label_frame_168.png", "ct_train_1008_label_frame_13.png", "ct_train_1015_label_frame_295.png", "ct_train_1018_label_frame_160.png", "ct_train_1014_label_frame_193.png", "ct_train_1008_label_frame_184.png", "ct_train_1008_label_frame_155.png", "ct_train_1009_label_frame_76.png", "ct_train_1005_label_frame_77.png", "ct_train_1003_label_frame_120.png", "ct_train_1001_label_frame_144.png", "ct_train_1011_label_frame_234.png", "ct_train_1014_label_frame_132.png", "ct_train_1009_label_frame_67.png", "ct_train_1001_label_frame_275.png", "ct_train_1017_label_frame_224.png", "ct_train_1011_label_frame_30.png", "ct_train_1016_label_frame_302.png", "ct_train_1008_label_frame_0.png", "ct_train_1016_label_frame_121.png", "ct_train_1005_label_frame_34.png", "ct_train_1014_label_frame_353.png", "ct_train_1020_label_frame_183.png", "ct_train_1016_label_frame_245.png", "ct_train_1001_label_frame_215.png", "ct_train_1017_label_frame_200.png", "ct_train_1001_label_frame_359.png", "ct_train_1003_label_frame_274.png", "ct_train_1006_label_frame_151.png", "ct_train_1001_label_frame_164.png", "ct_train_1011_label_frame_220.png", "ct_train_1018_label_frame_71.png", "ct_train_1015_label_frame_39.png", "ct_train_1018_label_frame_181.png", "ct_train_1009_label_frame_99.png", "ct_train_1020_label_frame_283.png", "ct_train_1020_label_frame_96.png", "ct_train_1008_label_frame_52.png", "ct_train_1015_label_frame_8.png", "ct_train_1001_label_frame_139.png", "ct_train_1003_label_frame_172.png", "ct_train_1006_label_frame_195.png", "ct_train_1009_label_frame_271.png", "ct_train_1008_label_frame_62.png", "ct_train_1009_label_frame_207.png", "ct_train_1011_label_frame_91.png", "ct_train_1003_label_frame_207.png", "ct_train_1009_label_frame_94.png", "ct_train_1011_label_frame_196.png", "ct_train_1001_label_frame_333.png", "ct_train_1001_label_frame_233.png", "ct_train_1015_label_frame_41.png", "ct_train_1018_label_frame_43.png", "ct_train_1020_label_frame_16.png", "ct_train_1016_label_frame_11.png", "ct_train_1014_label_frame_261.png", "ct_train_1017_label_frame_142.png", "ct_train_1008_label_frame_109.png", "ct_train_1005_label_frame_171.png", "ct_train_1003_label_frame_181.png", "ct_train_1008_label_frame_145.png", "ct_train_1009_label_frame_152.png", "ct_train_1001_label_frame_39.png", "ct_train_1001_label_frame_132.png", "ct_train_1017_label_frame_191.png", "ct_train_1001_label_frame_45.png", "ct_train_1016_label_frame_78.png", "ct_train_1014_label_frame_316.png", "ct_train_1005_label_frame_97.png", "ct_train_1017_label_frame_182.png", "ct_train_1011_label_frame_148.png", "ct_train_1016_label_frame_64.png", "ct_train_1018_label_frame_35.png", "ct_train_1005_label_frame_98.png", "ct_train_1003_label_frame_139.png", "ct_train_1005_label_frame_31.png", "ct_train_1003_label_frame_50.png", "ct_train_1014_label_frame_11.png", "ct_train_1003_label_frame_10.png", "ct_train_1001_label_frame_21.png", "ct_train_1003_label_frame_196.png", "ct_train_1016_label_frame_244.png", "ct_train_1014_label_frame_307.png", "ct_train_1011_label_frame_87.png", "ct_train_1003_label_frame_106.png", "ct_train_1016_label_frame_52.png", "ct_train_1014_label_frame_345.png", "ct_train_1001_label_frame_99.png", "ct_train_1020_label_frame_215.png", "ct_train_1003_label_frame_113.png", "ct_train_1015_label_frame_188.png", "ct_train_1009_label_frame_155.png", "ct_train_1015_label_frame_145.png", "ct_train_1008_label_frame_125.png", "ct_train_1009_label_frame_48.png", "ct_train_1017_label_frame_139.png", "ct_train_1006_label_frame_13.png", "ct_train_1011_label_frame_37.png", "ct_train_1001_label_frame_92.png", "ct_train_1001_label_frame_257.png", "ct_train_1003_label_frame_249.png", "ct_train_1016_label_frame_283.png", "ct_train_1009_label_frame_229.png", "ct_train_1016_label_frame_257.png", "ct_train_1017_label_frame_280.png", "ct_train_1017_label_frame_20.png", "ct_train_1017_label_frame_242.png", "ct_train_1018_label_frame_26.png", "ct_train_1006_label_frame_144.png", "ct_train_1014_label_frame_150.png", "ct_train_1016_label_frame_29.png", "ct_train_1003_label_frame_56.png", "ct_train_1020_label_frame_248.png", "ct_train_1016_label_frame_227.png", "ct_train_1009_label_frame_81.png", "ct_train_1009_label_frame_291.png", "ct_train_1018_label_frame_19.png", "ct_train_1014_label_frame_315.png", "ct_train_1003_label_frame_197.png", "ct_train_1015_label_frame_192.png", "ct_train_1016_label_frame_303.png", "ct_train_1014_label_frame_342.png", "ct_train_1018_label_frame_146.png", "ct_train_1016_label_frame_172.png", "ct_train_1018_label_frame_149.png", "ct_train_1016_label_frame_56.png", "ct_train_1020_label_frame_222.png", "ct_train_1001_label_frame_50.png", "ct_train_1016_label_frame_250.png", "ct_train_1001_label_frame_353.png", "ct_train_1015_label_frame_191.png", "ct_train_1008_label_frame_88.png", "ct_train_1011_label_frame_215.png", "ct_train_1001_label_frame_305.png", "ct_train_1018_label_frame_121.png", "ct_train_1005_label_frame_25.png", "ct_train_1016_label_frame_160.png", "ct_train_1020_label_frame_335.png", "ct_train_1018_label_frame_21.png", "ct_train_1008_label_frame_67.png", "ct_train_1001_label_frame_238.png", "ct_train_1006_label_frame_85.png", "ct_train_1008_label_frame_168.png", "ct_train_1015_label_frame_146.png", "ct_train_1006_label_frame_56.png", "ct_train_1014_label_frame_112.png", "ct_train_1011_label_frame_70.png", "ct_train_1006_label_frame_88.png", "ct_train_1015_label_frame_95.png", "ct_train_1001_label_frame_284.png", "ct_train_1003_label_frame_2.png", "ct_train_1003_label_frame_159.png", "ct_train_1015_label_frame_9.png", "ct_train_1017_label_frame_201.png", "ct_train_1008_label_frame_130.png", "ct_train_1015_label_frame_200.png", "ct_train_1020_label_frame_161.png", "ct_train_1008_label_frame_112.png", "ct_train_1015_label_frame_222.png", "ct_train_1001_label_frame_307.png", "ct_train_1020_label_frame_205.png", "ct_train_1015_label_frame_70.png", "ct_train_1016_label_frame_203.png", "ct_train_1017_label_frame_15.png", "ct_train_1017_label_frame_46.png", "ct_train_1020_label_frame_290.png", "ct_train_1006_label_frame_186.png", "ct_train_1009_label_frame_58.png", "ct_train_1020_label_frame_204.png", "ct_train_1020_label_frame_71.png", "ct_train_1009_label_frame_28.png", "ct_train_1008_label_frame_166.png", "ct_train_1001_label_frame_102.png", "ct_train_1020_label_frame_158.png", "ct_train_1014_label_frame_102.png", "ct_train_1014_label_frame_303.png", "ct_train_1001_label_frame_200.png", "ct_train_1020_label_frame_325.png", "ct_train_1008_label_frame_162.png", "ct_train_1016_label_frame_15.png", "ct_train_1009_label_frame_200.png", "ct_train_1001_label_frame_224.png", "ct_train_1015_label_frame_117.png", "ct_train_1016_label_frame_317.png", "ct_train_1015_label_frame_135.png", "ct_train_1017_label_frame_35.png", "ct_train_1018_label_frame_64.png", "ct_train_1020_label_frame_229.png", "ct_train_1009_label_frame_168.png", "ct_train_1020_label_frame_259.png", "ct_train_1016_label_frame_269.png", "ct_train_1017_label_frame_120.png", "ct_train_1020_label_frame_289.png", "ct_train_1020_label_frame_112.png", "ct_train_1008_label_frame_101.png", "ct_train_1020_label_frame_22.png", "ct_train_1014_label_frame_50.png", "ct_train_1020_label_frame_94.png", "ct_train_1020_label_frame_235.png", "ct_train_1001_label_frame_24.png", "ct_train_1015_label_frame_182.png", "ct_train_1003_label_frame_45.png", "ct_train_1003_label_frame_212.png", "ct_train_1020_label_frame_48.png", "ct_train_1009_label_frame_24.png", "ct_train_1015_label_frame_208.png", "ct_train_1003_label_frame_51.png", "ct_train_1011_label_frame_120.png", "ct_train_1020_label_frame_151.png", "ct_train_1006_label_frame_33.png", "ct_train_1011_label_frame_95.png", "ct_train_1001_label_frame_31.png", "ct_train_1011_label_frame_156.png", "ct_train_1020_label_frame_123.png", "ct_train_1001_label_frame_7.png", "ct_train_1008_label_frame_53.png", "ct_train_1003_label_frame_129.png", "ct_train_1014_label_frame_317.png", "ct_train_1017_label_frame_217.png", "ct_train_1006_label_frame_216.png", "ct_train_1003_label_frame_163.png", "ct_train_1020_label_frame_61.png", "ct_train_1020_label_frame_212.png", "ct_train_1005_label_frame_137.png", "ct_train_1001_label_frame_222.png", "ct_train_1006_label_frame_29.png", "ct_train_1006_label_frame_211.png", "ct_train_1003_label_frame_83.png", "ct_train_1009_label_frame_263.png", "ct_train_1003_label_frame_52.png", "ct_train_1017_label_frame_103.png", "ct_train_1009_label_frame_84.png", "ct_train_1006_label_frame_25.png", "ct_train_1003_label_frame_14.png", "ct_train_1003_label_frame_168.png", "ct_train_1017_label_frame_17.png", "ct_train_1014_label_frame_51.png", "ct_train_1014_label_frame_340.png", "ct_train_1016_label_frame_165.png", "ct_train_1020_label_frame_295.png", "ct_train_1020_label_frame_51.png", "ct_train_1001_label_frame_162.png", "ct_train_1017_label_frame_140.png", "ct_train_1015_label_frame_271.png", "ct_train_1008_label_frame_48.png", "ct_train_1015_label_frame_50.png", "ct_train_1017_label_frame_257.png", "ct_train_1005_label_frame_118.png", "ct_train_1018_label_frame_134.png", "ct_train_1020_label_frame_284.png", "ct_train_1008_label_frame_169.png", "ct_train_1009_label_frame_216.png", "ct_train_1014_label_frame_332.png", "ct_train_1020_label_frame_210.png", "ct_train_1001_label_frame_129.png", "ct_train_1014_label_frame_107.png", "ct_train_1018_label_frame_6.png", "ct_train_1015_label_frame_171.png", "ct_train_1006_label_frame_171.png", "ct_train_1011_label_frame_132.png", "ct_train_1015_label_frame_101.png", "ct_train_1011_label_frame_170.png", "ct_train_1011_label_frame_135.png", "ct_train_1015_label_frame_52.png", "ct_train_1017_label_frame_241.png", "ct_train_1018_label_frame_95.png", "ct_train_1008_label_frame_131.png", "ct_train_1016_label_frame_50.png", "ct_train_1015_label_frame_248.png", "ct_train_1003_label_frame_185.png", "ct_train_1003_label_frame_85.png", "ct_train_1008_label_frame_5.png", "ct_train_1001_label_frame_113.png", "ct_train_1001_label_frame_37.png", "ct_train_1018_label_frame_61.png", "ct_train_1001_label_frame_79.png", "ct_train_1020_label_frame_231.png", "ct_train_1014_label_frame_28.png", "ct_train_1009_label_frame_267.png", "ct_train_1003_label_frame_76.png", "ct_train_1001_label_frame_278.png", "ct_train_1014_label_frame_92.png", "ct_train_1003_label_frame_188.png", "ct_train_1014_label_frame_52.png", "ct_train_1015_label_frame_153.png", "ct_train_1016_label_frame_74.png", "ct_train_1008_label_frame_119.png", "ct_train_1011_label_frame_16.png", "ct_train_1017_label_frame_30.png", "ct_train_1016_label_frame_103.png", "ct_train_1006_label_frame_47.png", "ct_train_1016_label_frame_217.png", "ct_train_1018_label_frame_22.png", "ct_train_1020_label_frame_124.png", "ct_train_1020_label_frame_150.png", "ct_train_1020_label_frame_25.png", "ct_train_1018_label_frame_4.png", "ct_train_1003_label_frame_203.png", "ct_train_1009_label_frame_248.png", "ct_train_1014_label_frame_94.png", "ct_train_1003_label_frame_190.png", "ct_train_1014_label_frame_99.png", "ct_train_1014_label_frame_54.png", "ct_train_1009_label_frame_103.png", "ct_train_1020_label_frame_226.png", "ct_train_1017_label_frame_278.png", "ct_train_1003_label_frame_28.png", "ct_train_1011_label_frame_24.png", "ct_train_1015_label_frame_156.png", "ct_train_1008_label_frame_66.png", "ct_train_1016_label_frame_238.png", "ct_train_1017_label_frame_47.png", "ct_train_1018_label_frame_23.png", "ct_train_1001_label_frame_143.png", "ct_train_1003_label_frame_219.png", "ct_train_1016_label_frame_204.png", "ct_train_1011_label_frame_81.png", "ct_train_1020_label_frame_33.png", "ct_train_1015_label_frame_142.png", "ct_train_1011_label_frame_31.png", "ct_train_1003_label_frame_92.png", "ct_train_1001_label_frame_244.png", "ct_train_1020_label_frame_179.png", "ct_train_1020_label_frame_27.png", "ct_train_1015_label_frame_251.png", "ct_train_1005_label_frame_23.png", "ct_train_1016_label_frame_294.png", "ct_train_1006_label_frame_27.png", "ct_train_1001_label_frame_80.png", "ct_train_1017_label_frame_181.png", "ct_train_1015_label_frame_278.png", "ct_train_1018_label_frame_142.png", "ct_train_1005_label_frame_6.png", "ct_train_1016_label_frame_313.png", "ct_train_1016_label_frame_199.png", "ct_train_1014_label_frame_280.png", "ct_train_1006_label_frame_59.png", "ct_train_1018_label_frame_62.png", "ct_train_1016_label_frame_319.png", "ct_train_1020_label_frame_73.png", "ct_train_1008_label_frame_185.png", "ct_train_1009_label_frame_134.png", "ct_train_1001_label_frame_199.png", "ct_train_1001_label_frame_101.png", "ct_train_1017_label_frame_245.png", "ct_train_1008_label_frame_95.png", "ct_train_1001_label_frame_121.png", "ct_train_1009_label_frame_259.png", "ct_train_1001_label_frame_140.png", "ct_train_1018_label_frame_60.png", "ct_train_1008_label_frame_171.png", "ct_train_1017_label_frame_56.png", "ct_train_1017_label_frame_249.png", "ct_train_1016_label_frame_80.png", "ct_train_1003_label_frame_273.png", "ct_train_1001_label_frame_11.png", "ct_train_1005_label_frame_154.png", "ct_train_1001_label_frame_322.png", "ct_train_1020_label_frame_20.png", "ct_train_1009_label_frame_222.png", "ct_train_1003_label_frame_245.png", "ct_train_1005_label_frame_85.png", "ct_train_1015_label_frame_56.png", "ct_train_1006_label_frame_26.png", "ct_train_1003_label_frame_141.png", "ct_train_1014_label_frame_333.png", "ct_train_1015_label_frame_1.png", "ct_train_1020_label_frame_197.png", "ct_train_1015_label_frame_136.png", "ct_train_1005_label_frame_172.png", "ct_train_1016_label_frame_122.png", "ct_train_1020_label_frame_298.png", "ct_train_1014_label_frame_209.png", "ct_train_1005_label_frame_2.png", "ct_train_1020_label_frame_238.png", "ct_train_1005_label_frame_116.png", "ct_train_1016_label_frame_164.png", "ct_train_1020_label_frame_106.png", "ct_train_1017_label_frame_227.png", "ct_train_1016_label_frame_289.png", "ct_train_1017_label_frame_267.png", "ct_train_1005_label_frame_134.png", "ct_train_1018_label_frame_132.png", "ct_train_1011_label_frame_232.png", "ct_train_1017_label_frame_69.png", "ct_train_1003_label_frame_205.png", "ct_train_1011_label_frame_58.png", "ct_train_1015_label_frame_202.png", "ct_train_1016_label_frame_86.png", "ct_train_1020_label_frame_240.png", "ct_train_1016_label_frame_311.png", "ct_train_1006_label_frame_137.png", "ct_train_1011_label_frame_26.png", "ct_train_1011_label_frame_199.png", "ct_train_1014_label_frame_144.png", "ct_train_1003_label_frame_288.png", "ct_train_1009_label_frame_107.png", "ct_train_1017_label_frame_2.png", "ct_train_1011_label_frame_177.png", "ct_train_1001_label_frame_285.png", "ct_train_1008_label_frame_91.png", "ct_train_1020_label_frame_309.png", "ct_train_1008_label_frame_34.png", "ct_train_1001_label_frame_331.png", "ct_train_1016_label_frame_127.png", "ct_train_1017_label_frame_36.png", "ct_train_1009_label_frame_260.png", "ct_train_1016_label_frame_57.png", "ct_train_1006_label_frame_119.png", "ct_train_1014_label_frame_327.png", "ct_train_1015_label_frame_133.png", "ct_train_1005_label_frame_78.png", "ct_train_1015_label_frame_121.png", "ct_train_1005_label_frame_43.png", "ct_train_1016_label_frame_201.png", "ct_train_1001_label_frame_301.png", "ct_train_1015_label_frame_291.png", "ct_train_1009_label_frame_121.png", "ct_train_1016_label_frame_6.png", "ct_train_1020_label_frame_143.png", "ct_train_1009_label_frame_154.png", "ct_train_1006_label_frame_108.png", "ct_train_1009_label_frame_246.png", "ct_train_1018_label_frame_148.png", "ct_train_1020_label_frame_132.png", "ct_train_1016_label_frame_291.png", "ct_train_1015_label_frame_165.png", "ct_train_1014_label_frame_285.png", "ct_train_1014_label_frame_154.png", "ct_train_1016_label_frame_281.png", "ct_train_1001_label_frame_323.png", "ct_train_1009_label_frame_78.png", "ct_train_1011_label_frame_110.png", "ct_train_1017_label_frame_136.png", "ct_train_1011_label_frame_54.png", "ct_train_1005_label_frame_95.png", "ct_train_1006_label_frame_188.png", "ct_train_1016_label_frame_151.png", "ct_train_1015_label_frame_2.png", "ct_train_1016_label_frame_59.png", "ct_train_1008_label_frame_146.png", "ct_train_1006_label_frame_123.png", "ct_train_1009_label_frame_166.png", "ct_train_1009_label_frame_35.png", "ct_train_1005_label_frame_86.png", "ct_train_1017_label_frame_131.png", "ct_train_1005_label_frame_108.png", "ct_train_1003_label_frame_82.png", "ct_train_1006_label_frame_107.png", "ct_train_1018_label_frame_127.png", "ct_train_1001_label_frame_339.png", "ct_train_1003_label_frame_4.png", "ct_train_1001_label_frame_334.png", "ct_train_1020_label_frame_217.png", "ct_train_1006_label_frame_124.png", "ct_train_1020_label_frame_194.png", "ct_train_1018_label_frame_50.png", "ct_train_1008_label_frame_186.png", "ct_train_1017_label_frame_18.png", "ct_train_1015_label_frame_37.png", "ct_train_1018_label_frame_88.png", "ct_train_1001_label_frame_167.png", "ct_train_1011_label_frame_11.png", "ct_train_1015_label_frame_109.png", "ct_train_1017_label_frame_167.png", "ct_train_1017_label_frame_58.png", "ct_train_1017_label_frame_87.png", "ct_train_1005_label_frame_88.png", "ct_train_1005_label_frame_1.png", "ct_train_1016_label_frame_279.png", "ct_train_1018_label_frame_28.png", "ct_train_1020_label_frame_185.png", "ct_train_1001_label_frame_108.png", "ct_train_1011_label_frame_36.png", "ct_train_1006_label_frame_111.png", "ct_train_1014_label_frame_130.png", "ct_train_1016_label_frame_280.png", "ct_train_1003_label_frame_26.png", "ct_train_1001_label_frame_93.png", "ct_train_1009_label_frame_138.png", "ct_train_1015_label_frame_187.png"], "valid_labels": ["ct_train_1004_label_frame_55.png", "ct_train_1013_label_frame_191.png", "ct_train_1007_label_frame_45.png", "ct_train_1013_label_frame_12.png", "ct_train_1007_label_frame_110.png", "ct_train_1004_label_frame_164.png", "ct_train_1013_label_frame_19.png", "ct_train_1007_label_frame_169.png", "ct_train_1004_label_frame_171.png", "ct_train_1004_label_frame_6.png", "ct_train_1007_label_frame_187.png", "ct_train_1004_label_frame_29.png", "ct_train_1013_label_frame_187.png", "ct_train_1007_label_frame_14.png", "ct_train_1013_label_frame_186.png", "ct_train_1004_label_frame_4.png", "ct_train_1004_label_frame_16.png", "ct_train_1004_label_frame_175.png", "ct_train_1013_label_frame_143.png", "ct_train_1013_label_frame_42.png", "ct_train_1004_label_frame_2.png", "ct_train_1013_label_frame_0.png", "ct_train_1013_label_frame_38.png", "ct_train_1004_label_frame_107.png", "ct_train_1004_label_frame_31.png", "ct_train_1013_label_frame_173.png", "ct_train_1013_label_frame_144.png", "ct_train_1004_label_frame_120.png", "ct_train_1007_label_frame_193.png", "ct_train_1007_label_frame_123.png", "ct_train_1013_label_frame_193.png", "ct_train_1013_label_frame_101.png", "ct_train_1004_label_frame_176.png", "ct_train_1007_label_frame_120.png", "ct_train_1013_label_frame_70.png", "ct_train_1007_label_frame_21.png", "ct_train_1013_label_frame_120.png", "ct_train_1013_label_frame_209.png", "ct_train_1013_label_frame_84.png", "ct_train_1004_label_frame_49.png", "ct_train_1004_label_frame_117.png", "ct_train_1007_label_frame_52.png", "ct_train_1007_label_frame_139.png", "ct_train_1004_label_frame_134.png", "ct_train_1004_label_frame_183.png", "ct_train_1013_label_frame_99.png", "ct_train_1004_label_frame_194.png", "ct_train_1013_label_frame_69.png", "ct_train_1007_label_frame_91.png", "ct_train_1004_label_frame_154.png", "ct_train_1013_label_frame_45.png", "ct_train_1013_label_frame_157.png", "ct_train_1004_label_frame_79.png", "ct_train_1007_label_frame_208.png", "ct_train_1007_label_frame_50.png", "ct_train_1007_label_frame_180.png", "ct_train_1004_label_frame_43.png", "ct_train_1013_label_frame_50.png", "ct_train_1004_label_frame_129.png", "ct_train_1004_label_frame_63.png", "ct_train_1004_label_frame_21.png", "ct_train_1013_label_frame_172.png", "ct_train_1007_label_frame_233.png", "ct_train_1007_label_frame_172.png", "ct_train_1007_label_frame_134.png", "ct_train_1007_label_frame_79.png", "ct_train_1013_label_frame_14.png", "ct_train_1007_label_frame_17.png", "ct_train_1004_label_frame_198.png", "ct_train_1007_label_frame_144.png", "ct_train_1007_label_frame_129.png", "ct_train_1013_label_frame_162.png", "ct_train_1007_label_frame_20.png", "ct_train_1013_label_frame_37.png", "ct_train_1004_label_frame_191.png", "ct_train_1007_label_frame_48.png", "ct_train_1004_label_frame_11.png", "ct_train_1007_label_frame_216.png", "ct_train_1007_label_frame_102.png", "ct_train_1007_label_frame_73.png", "ct_train_1013_label_frame_102.png", "ct_train_1013_label_frame_182.png", "ct_train_1007_label_frame_125.png", "ct_train_1007_label_frame_130.png", "ct_train_1007_label_frame_186.png", "ct_train_1004_label_frame_28.png", "ct_train_1007_label_frame_84.png", "ct_train_1007_label_frame_178.png", "ct_train_1007_label_frame_157.png", "ct_train_1004_label_frame_39.png", "ct_train_1007_label_frame_214.png", "ct_train_1004_label_frame_114.png", "ct_train_1007_label_frame_106.png", "ct_train_1007_label_frame_58.png", "ct_train_1013_label_frame_34.png", "ct_train_1007_label_frame_190.png", "ct_train_1007_label_frame_89.png", "ct_train_1007_label_frame_175.png", "ct_train_1013_label_frame_126.png", "ct_train_1007_label_frame_194.png", "ct_train_1007_label_frame_154.png", "ct_train_1007_label_frame_217.png", "ct_train_1013_label_frame_54.png", "ct_train_1007_label_frame_72.png", "ct_train_1013_label_frame_13.png", "ct_train_1007_label_frame_87.png", "ct_train_1007_label_frame_219.png", "ct_train_1004_label_frame_133.png", "ct_train_1007_label_frame_104.png", "ct_train_1007_label_frame_116.png", "ct_train_1013_label_frame_154.png", "ct_train_1007_label_frame_2.png", "ct_train_1013_label_frame_149.png", "ct_train_1013_label_frame_107.png", "ct_train_1004_label_frame_150.png", "ct_train_1007_label_frame_204.png", "ct_train_1004_label_frame_66.png", "ct_train_1007_label_frame_121.png", "ct_train_1004_label_frame_10.png", "ct_train_1013_label_frame_127.png", "ct_train_1013_label_frame_158.png", "ct_train_1004_label_frame_73.png", "ct_train_1013_label_frame_15.png", "ct_train_1013_label_frame_9.png", "ct_train_1004_label_frame_32.png", "ct_train_1007_label_frame_60.png", "ct_train_1007_label_frame_109.png", "ct_train_1004_label_frame_95.png", "ct_train_1013_label_frame_7.png", "ct_train_1004_label_frame_54.png", "ct_train_1013_label_frame_160.png", "ct_train_1004_label_frame_169.png", "ct_train_1004_label_frame_189.png", "ct_train_1013_label_frame_30.png", "ct_train_1013_label_frame_62.png", "ct_train_1013_label_frame_161.png", "ct_train_1004_label_frame_8.png", "ct_train_1013_label_frame_53.png", "ct_train_1013_label_frame_178.png", "ct_train_1004_label_frame_27.png", "ct_train_1004_label_frame_65.png", "ct_train_1013_label_frame_57.png", "ct_train_1004_label_frame_174.png", "ct_train_1007_label_frame_95.png", "ct_train_1007_label_frame_148.png", "ct_train_1013_label_frame_51.png", "ct_train_1013_label_frame_195.png", "ct_train_1013_label_frame_74.png", "ct_train_1013_label_frame_125.png", "ct_train_1004_label_frame_96.png", "ct_train_1007_label_frame_161.png", "ct_train_1007_label_frame_185.png", "ct_train_1004_label_frame_56.png", "ct_train_1007_label_frame_114.png", "ct_train_1004_label_frame_101.png", "ct_train_1004_label_frame_173.png", "ct_train_1007_label_frame_41.png", "ct_train_1013_label_frame_202.png", "ct_train_1013_label_frame_192.png", "ct_train_1004_label_frame_186.png", "ct_train_1007_label_frame_156.png", "ct_train_1004_label_frame_98.png", "ct_train_1013_label_frame_116.png", "ct_train_1004_label_frame_126.png", "ct_train_1004_label_frame_146.png", "ct_train_1013_label_frame_177.png", "ct_train_1004_label_frame_18.png", "ct_train_1013_label_frame_78.png", "ct_train_1013_label_frame_27.png", "ct_train_1007_label_frame_33.png", "ct_train_1004_label_frame_139.png", "ct_train_1013_label_frame_49.png", "ct_train_1013_label_frame_93.png", "ct_train_1007_label_frame_241.png", "ct_train_1004_label_frame_119.png", "ct_train_1007_label_frame_4.png", "ct_train_1007_label_frame_152.png", "ct_train_1007_label_frame_173.png", "ct_train_1013_label_frame_106.png", "ct_train_1007_label_frame_240.png", "ct_train_1013_label_frame_111.png", "ct_train_1004_label_frame_82.png", "ct_train_1004_label_frame_102.png", "ct_train_1013_label_frame_18.png", "ct_train_1013_label_frame_167.png", "ct_train_1004_label_frame_40.png", "ct_train_1013_label_frame_23.png", "ct_train_1007_label_frame_165.png", "ct_train_1007_label_frame_166.png", "ct_train_1007_label_frame_31.png", "ct_train_1007_label_frame_36.png", "ct_train_1013_label_frame_16.png", "ct_train_1013_label_frame_26.png", "ct_train_1007_label_frame_135.png", "ct_train_1013_label_frame_65.png", "ct_train_1007_label_frame_153.png", "ct_train_1004_label_frame_130.png", "ct_train_1013_label_frame_168.png", "ct_train_1007_label_frame_111.png", "ct_train_1013_label_frame_33.png", "ct_train_1007_label_frame_10.png", "ct_train_1007_label_frame_160.png", "ct_train_1004_label_frame_25.png", "ct_train_1004_label_frame_111.png", "ct_train_1013_label_frame_35.png", "ct_train_1007_label_frame_29.png", "ct_train_1013_label_frame_1.png", "ct_train_1004_label_frame_103.png", "ct_train_1007_label_frame_184.png", "ct_train_1004_label_frame_75.png", "ct_train_1007_label_frame_66.png", "ct_train_1013_label_frame_97.png", "ct_train_1007_label_frame_181.png", "ct_train_1004_label_frame_185.png", "ct_train_1013_label_frame_77.png", "ct_train_1004_label_frame_64.png", "ct_train_1007_label_frame_74.png", "ct_train_1004_label_frame_153.png", "ct_train_1004_label_frame_93.png", "ct_train_1004_label_frame_81.png", "ct_train_1013_label_frame_123.png", "ct_train_1013_label_frame_61.png", "ct_train_1004_label_frame_36.png", "ct_train_1004_label_frame_196.png", "ct_train_1013_label_frame_129.png", "ct_train_1013_label_frame_117.png", "ct_train_1007_label_frame_218.png", "ct_train_1013_label_frame_72.png", "ct_train_1007_label_frame_150.png", "ct_train_1004_label_frame_53.png", "ct_train_1007_label_frame_27.png", "ct_train_1013_label_frame_76.png", "ct_train_1013_label_frame_66.png", "ct_train_1013_label_frame_140.png", "ct_train_1004_label_frame_131.png", "ct_train_1004_label_frame_156.png", "ct_train_1007_label_frame_174.png", "ct_train_1004_label_frame_151.png", "ct_train_1007_label_frame_42.png", "ct_train_1004_label_frame_5.png", "ct_train_1007_label_frame_138.png", "ct_train_1007_label_frame_227.png", "ct_train_1004_label_frame_158.png", "ct_train_1004_label_frame_199.png", "ct_train_1004_label_frame_193.png", "ct_train_1007_label_frame_222.png", "ct_train_1004_label_frame_144.png", "ct_train_1007_label_frame_112.png", "ct_train_1004_label_frame_195.png", "ct_train_1013_label_frame_79.png", "ct_train_1007_label_frame_231.png", "ct_train_1004_label_frame_89.png", "ct_train_1013_label_frame_194.png", "ct_train_1007_label_frame_239.png", "ct_train_1004_label_frame_72.png", "ct_train_1013_label_frame_8.png", "ct_train_1013_label_frame_43.png", "ct_train_1007_label_frame_213.png", "ct_train_1013_label_frame_94.png", "ct_train_1007_label_frame_137.png", "ct_train_1007_label_frame_82.png", "ct_train_1007_label_frame_164.png", "ct_train_1004_label_frame_92.png", "ct_train_1007_label_frame_155.png", "ct_train_1004_label_frame_52.png", "ct_train_1013_label_frame_110.png", "ct_train_1013_label_frame_183.png", "ct_train_1013_label_frame_171.png", "ct_train_1007_label_frame_235.png", "ct_train_1007_label_frame_224.png", "ct_train_1007_label_frame_12.png", "ct_train_1004_label_frame_125.png", "ct_train_1007_label_frame_189.png", "ct_train_1007_label_frame_44.png", "ct_train_1007_label_frame_115.png", "ct_train_1007_label_frame_64.png", "ct_train_1007_label_frame_81.png", "ct_train_1013_label_frame_205.png", "ct_train_1007_label_frame_149.png", "ct_train_1013_label_frame_184.png", "ct_train_1007_label_frame_0.png", "ct_train_1007_label_frame_177.png", "ct_train_1004_label_frame_149.png", "ct_train_1004_label_frame_58.png", "ct_train_1004_label_frame_123.png", "ct_train_1004_label_frame_159.png", "ct_train_1007_label_frame_1.png", "ct_train_1007_label_frame_131.png", "ct_train_1007_label_frame_76.png", "ct_train_1013_label_frame_98.png", "ct_train_1007_label_frame_75.png", "ct_train_1013_label_frame_3.png", "ct_train_1013_label_frame_89.png", "ct_train_1007_label_frame_107.png", "ct_train_1007_label_frame_168.png", "ct_train_1007_label_frame_85.png", "ct_train_1004_label_frame_46.png", "ct_train_1004_label_frame_97.png", "ct_train_1007_label_frame_225.png", "ct_train_1013_label_frame_133.png", "ct_train_1013_label_frame_114.png", "ct_train_1007_label_frame_232.png", "ct_train_1007_label_frame_19.png", "ct_train_1007_label_frame_195.png", "ct_train_1007_label_frame_99.png", "ct_train_1004_label_frame_112.png", "ct_train_1013_label_frame_164.png", "ct_train_1007_label_frame_143.png", "ct_train_1007_label_frame_126.png", "ct_train_1013_label_frame_201.png", "ct_train_1004_label_frame_132.png", "ct_train_1004_label_frame_84.png", "ct_train_1013_label_frame_67.png", "ct_train_1007_label_frame_124.png", "ct_train_1013_label_frame_121.png", "ct_train_1013_label_frame_25.png", "ct_train_1013_label_frame_86.png", "ct_train_1007_label_frame_236.png", "ct_train_1013_label_frame_105.png", "ct_train_1013_label_frame_20.png", "ct_train_1013_label_frame_176.png", "ct_train_1007_label_frame_159.png", "ct_train_1013_label_frame_181.png", "ct_train_1013_label_frame_48.png", "ct_train_1007_label_frame_35.png", "ct_train_1004_label_frame_168.png", "ct_train_1007_label_frame_101.png", "ct_train_1007_label_frame_220.png", "ct_train_1004_label_frame_15.png", "ct_train_1013_label_frame_109.png", "ct_train_1004_label_frame_26.png", "ct_train_1004_label_frame_69.png", "ct_train_1004_label_frame_80.png", "ct_train_1013_label_frame_139.png", "ct_train_1013_label_frame_199.png", "ct_train_1007_label_frame_98.png", "ct_train_1013_label_frame_197.png", "ct_train_1013_label_frame_137.png", "ct_train_1007_label_frame_7.png", "ct_train_1007_label_frame_198.png", "ct_train_1004_label_frame_94.png", "ct_train_1004_label_frame_197.png", "ct_train_1004_label_frame_179.png", "ct_train_1013_label_frame_118.png", "ct_train_1013_label_frame_11.png", "ct_train_1013_label_frame_207.png", "ct_train_1004_label_frame_83.png", "ct_train_1013_label_frame_115.png", "ct_train_1007_label_frame_43.png", "ct_train_1013_label_frame_152.png", "ct_train_1013_label_frame_85.png", "ct_train_1007_label_frame_179.png", "ct_train_1007_label_frame_24.png", "ct_train_1007_label_frame_86.png", "ct_train_1007_label_frame_92.png", "ct_train_1007_label_frame_140.png", "ct_train_1007_label_frame_71.png", "ct_train_1007_label_frame_56.png", "ct_train_1007_label_frame_55.png", "ct_train_1004_label_frame_76.png", "ct_train_1013_label_frame_122.png", "ct_train_1004_label_frame_188.png", "ct_train_1007_label_frame_78.png", "ct_train_1007_label_frame_68.png", "ct_train_1013_label_frame_58.png", "ct_train_1004_label_frame_20.png", "ct_train_1004_label_frame_70.png", "ct_train_1004_label_frame_152.png", "ct_train_1007_label_frame_237.png", "ct_train_1004_label_frame_57.png", "ct_train_1013_label_frame_204.png", "ct_train_1004_label_frame_9.png", "ct_train_1004_label_frame_118.png", "ct_train_1013_label_frame_119.png", "ct_train_1013_label_frame_32.png", "ct_train_1007_label_frame_119.png", "ct_train_1004_label_frame_115.png", "ct_train_1007_label_frame_207.png", "ct_train_1007_label_frame_147.png", "ct_train_1007_label_frame_199.png", "ct_train_1007_label_frame_210.png", "ct_train_1013_label_frame_136.png", "ct_train_1007_label_frame_54.png", "ct_train_1007_label_frame_5.png", "ct_train_1007_label_frame_61.png", "ct_train_1007_label_frame_191.png", "ct_train_1013_label_frame_71.png", "ct_train_1013_label_frame_17.png", "ct_train_1007_label_frame_13.png", "ct_train_1013_label_frame_68.png", "ct_train_1004_label_frame_41.png", "ct_train_1007_label_frame_26.png", "ct_train_1004_label_frame_7.png", "ct_train_1004_label_frame_61.png", "ct_train_1007_label_frame_16.png", "ct_train_1004_label_frame_88.png", "ct_train_1007_label_frame_196.png", "ct_train_1004_label_frame_136.png", "ct_train_1007_label_frame_77.png", "ct_train_1013_label_frame_59.png", "ct_train_1004_label_frame_0.png", "ct_train_1004_label_frame_135.png", "ct_train_1007_label_frame_62.png", "ct_train_1007_label_frame_63.png", "ct_train_1013_label_frame_208.png", "ct_train_1013_label_frame_55.png", "ct_train_1007_label_frame_105.png", "ct_train_1013_label_frame_87.png", "ct_train_1013_label_frame_39.png", "ct_train_1004_label_frame_19.png", "ct_train_1007_label_frame_242.png", "ct_train_1004_label_frame_34.png", "ct_train_1013_label_frame_145.png", "ct_train_1013_label_frame_41.png", "ct_train_1007_label_frame_200.png", "ct_train_1007_label_frame_25.png", "ct_train_1004_label_frame_140.png", "ct_train_1007_label_frame_118.png", "ct_train_1004_label_frame_137.png", "ct_train_1007_label_frame_57.png", "ct_train_1007_label_frame_15.png", "ct_train_1004_label_frame_51.png", "ct_train_1007_label_frame_65.png", "ct_train_1007_label_frame_142.png", "ct_train_1007_label_frame_88.png", "ct_train_1004_label_frame_165.png", "ct_train_1007_label_frame_108.png", "ct_train_1004_label_frame_172.png", "ct_train_1013_label_frame_150.png", "ct_train_1004_label_frame_17.png", "ct_train_1013_label_frame_174.png", "ct_train_1007_label_frame_90.png", "ct_train_1013_label_frame_60.png", "ct_train_1007_label_frame_9.png", "ct_train_1013_label_frame_131.png", "ct_train_1007_label_frame_215.png", "ct_train_1004_label_frame_3.png", "ct_train_1013_label_frame_185.png", "ct_train_1004_label_frame_109.png", "ct_train_1007_label_frame_28.png", "ct_train_1004_label_frame_100.png", "ct_train_1013_label_frame_175.png", "ct_train_1013_label_frame_170.png", "ct_train_1007_label_frame_6.png", "ct_train_1007_label_frame_202.png", "ct_train_1004_label_frame_60.png", "ct_train_1013_label_frame_210.png", "ct_train_1007_label_frame_188.png", "ct_train_1013_label_frame_112.png", "ct_train_1007_label_frame_162.png", "ct_train_1007_label_frame_49.png", "ct_train_1013_label_frame_22.png", "ct_train_1004_label_frame_127.png", "ct_train_1004_label_frame_180.png", "ct_train_1013_label_frame_151.png", "ct_train_1007_label_frame_209.png", "ct_train_1004_label_frame_67.png", "ct_train_1004_label_frame_45.png", "ct_train_1013_label_frame_165.png", "ct_train_1007_label_frame_141.png", "ct_train_1013_label_frame_104.png", "ct_train_1007_label_frame_113.png", "ct_train_1007_label_frame_238.png", "ct_train_1004_label_frame_44.png", "ct_train_1013_label_frame_5.png", "ct_train_1013_label_frame_4.png", "ct_train_1007_label_frame_234.png", "ct_train_1013_label_frame_166.png", "ct_train_1004_label_frame_74.png", "ct_train_1013_label_frame_124.png", "ct_train_1013_label_frame_29.png", "ct_train_1004_label_frame_161.png", "ct_train_1013_label_frame_56.png", "ct_train_1013_label_frame_148.png", "ct_train_1004_label_frame_166.png", "ct_train_1007_label_frame_226.png", "ct_train_1004_label_frame_110.png", "ct_train_1004_label_frame_184.png", "ct_train_1013_label_frame_132.png", "ct_train_1007_label_frame_183.png", "ct_train_1013_label_frame_83.png", "ct_train_1013_label_frame_163.png", "ct_train_1004_label_frame_59.png", "ct_train_1007_label_frame_18.png", "ct_train_1007_label_frame_46.png", "ct_train_1013_label_frame_44.png", "ct_train_1007_label_frame_22.png", "ct_train_1013_label_frame_180.png", "ct_train_1004_label_frame_90.png", "ct_train_1013_label_frame_28.png", "ct_train_1004_label_frame_113.png", "ct_train_1004_label_frame_141.png", "ct_train_1004_label_frame_121.png", "ct_train_1007_label_frame_170.png", "ct_train_1007_label_frame_127.png", "ct_train_1004_label_frame_85.png", "ct_train_1007_label_frame_197.png", "ct_train_1007_label_frame_171.png", "ct_train_1013_label_frame_188.png", "ct_train_1007_label_frame_205.png", "ct_train_1004_label_frame_116.png", "ct_train_1007_label_frame_203.png", "ct_train_1004_label_frame_170.png", "ct_train_1004_label_frame_13.png", "ct_train_1013_label_frame_90.png", "ct_train_1004_label_frame_187.png", "ct_train_1013_label_frame_179.png", "ct_train_1013_label_frame_2.png", "ct_train_1013_label_frame_47.png", "ct_train_1007_label_frame_151.png", "ct_train_1007_label_frame_201.png", "ct_train_1013_label_frame_80.png", "ct_train_1004_label_frame_91.png", "ct_train_1007_label_frame_38.png", "ct_train_1007_label_frame_230.png", "ct_train_1007_label_frame_206.png", "ct_train_1007_label_frame_51.png", "ct_train_1007_label_frame_192.png", "ct_train_1007_label_frame_96.png", "ct_train_1007_label_frame_70.png", "ct_train_1013_label_frame_206.png", "ct_train_1004_label_frame_177.png", "ct_train_1013_label_frame_147.png", "ct_train_1013_label_frame_91.png", "ct_train_1007_label_frame_145.png", "ct_train_1004_label_frame_163.png", "ct_train_1004_label_frame_38.png", "ct_train_1004_label_frame_42.png", "ct_train_1004_label_frame_106.png", "ct_train_1004_label_frame_181.png", "ct_train_1007_label_frame_228.png", "ct_train_1007_label_frame_34.png", "ct_train_1007_label_frame_103.png", "ct_train_1004_label_frame_157.png", "ct_train_1004_label_frame_105.png", "ct_train_1004_label_frame_142.png", "ct_train_1004_label_frame_78.png", "ct_train_1007_label_frame_128.png", "ct_train_1004_label_frame_104.png", "ct_train_1007_label_frame_59.png", "ct_train_1004_label_frame_155.png", "ct_train_1007_label_frame_80.png", "ct_train_1013_label_frame_155.png", "ct_train_1004_label_frame_48.png", "ct_train_1004_label_frame_87.png", "ct_train_1013_label_frame_196.png", "ct_train_1007_label_frame_158.png", "ct_train_1013_label_frame_113.png", "ct_train_1007_label_frame_146.png", "ct_train_1007_label_frame_93.png", "ct_train_1013_label_frame_128.png", "ct_train_1004_label_frame_138.png", "ct_train_1013_label_frame_82.png", "ct_train_1013_label_frame_146.png", "ct_train_1013_label_frame_31.png", "ct_train_1007_label_frame_11.png", "ct_train_1007_label_frame_212.png", "ct_train_1007_label_frame_83.png", "ct_train_1007_label_frame_167.png", "ct_train_1013_label_frame_159.png", "ct_train_1013_label_frame_156.png", "ct_train_1004_label_frame_182.png", "ct_train_1004_label_frame_35.png", "ct_train_1013_label_frame_141.png", "ct_train_1013_label_frame_200.png", "ct_train_1013_label_frame_75.png", "ct_train_1007_label_frame_53.png", "ct_train_1004_label_frame_33.png", "ct_train_1007_label_frame_132.png", "ct_train_1004_label_frame_122.png", "ct_train_1004_label_frame_47.png", "ct_train_1007_label_frame_221.png", "ct_train_1007_label_frame_23.png", "ct_train_1013_label_frame_153.png", "ct_train_1007_label_frame_229.png", "ct_train_1013_label_frame_130.png", "ct_train_1013_label_frame_92.png", "ct_train_1013_label_frame_108.png", "ct_train_1013_label_frame_46.png", "ct_train_1004_label_frame_23.png", "ct_train_1013_label_frame_95.png", "ct_train_1013_label_frame_138.png", "ct_train_1013_label_frame_52.png", "ct_train_1013_label_frame_36.png", "ct_train_1013_label_frame_10.png", "ct_train_1004_label_frame_22.png", "ct_train_1007_label_frame_67.png", "ct_train_1004_label_frame_143.png", "ct_train_1013_label_frame_6.png", "ct_train_1013_label_frame_24.png", "ct_train_1013_label_frame_189.png", "ct_train_1013_label_frame_64.png", "ct_train_1004_label_frame_12.png", "ct_train_1004_label_frame_190.png", "ct_train_1004_label_frame_50.png", "ct_train_1007_label_frame_47.png", "ct_train_1007_label_frame_211.png", "ct_train_1004_label_frame_128.png", "ct_train_1007_label_frame_37.png", "ct_train_1013_label_frame_190.png", "ct_train_1013_label_frame_134.png", "ct_train_1007_label_frame_122.png", "ct_train_1007_label_frame_8.png", "ct_train_1013_label_frame_203.png", "ct_train_1013_label_frame_63.png", "ct_train_1004_label_frame_162.png", "ct_train_1013_label_frame_103.png", "ct_train_1007_label_frame_32.png", "ct_train_1004_label_frame_71.png", "ct_train_1007_label_frame_136.png", "ct_train_1004_label_frame_147.png", "ct_train_1013_label_frame_88.png", "ct_train_1013_label_frame_142.png", "ct_train_1007_label_frame_117.png", "ct_train_1013_label_frame_40.png", "ct_train_1007_label_frame_39.png", "ct_train_1007_label_frame_94.png", "ct_train_1004_label_frame_37.png", "ct_train_1004_label_frame_86.png", "ct_train_1004_label_frame_24.png", "ct_train_1007_label_frame_223.png", "ct_train_1004_label_frame_62.png", "ct_train_1004_label_frame_148.png", "ct_train_1007_label_frame_97.png", "ct_train_1007_label_frame_100.png", "ct_train_1004_label_frame_99.png", "ct_train_1004_label_frame_14.png", "ct_train_1004_label_frame_68.png", "ct_train_1004_label_frame_77.png", "ct_train_1013_label_frame_135.png", "ct_train_1007_label_frame_69.png", "ct_train_1013_label_frame_73.png", "ct_train_1013_label_frame_96.png", "ct_train_1004_label_frame_145.png", "ct_train_1004_label_frame_124.png", "ct_train_1007_label_frame_163.png", "ct_train_1007_label_frame_182.png", "ct_train_1013_label_frame_21.png", "ct_train_1004_label_frame_178.png", "ct_train_1004_label_frame_167.png", "ct_train_1013_label_frame_81.png", "ct_train_1013_label_frame_100.png", "ct_train_1004_label_frame_30.png", "ct_train_1013_label_frame_169.png", "ct_train_1004_label_frame_108.png", "ct_train_1004_label_frame_1.png", "ct_train_1007_label_frame_133.png", "ct_train_1007_label_frame_3.png", "ct_train_1007_label_frame_30.png", "ct_train_1004_label_frame_192.png", "ct_train_1007_label_frame_40.png", "ct_train_1004_label_frame_160.png", "ct_train_1007_label_frame_176.png", "ct_train_1013_label_frame_198.png"], "test_labels": ["ct_train_1019_label_frame_226.png", "ct_train_1002_label_frame_207.png", "ct_train_1010_label_frame_3.png", "ct_train_1012_label_frame_39.png", "ct_train_1010_label_frame_76.png", "ct_train_1010_label_frame_90.png", "ct_train_1002_label_frame_8.png", "ct_train_1010_label_frame_200.png", "ct_train_1019_label_frame_287.png", "ct_train_1010_label_frame_253.png", "ct_train_1010_label_frame_126.png", "ct_train_1019_label_frame_217.png", "ct_train_1002_label_frame_209.png", "ct_train_1012_label_frame_135.png", "ct_train_1002_label_frame_5.png", "ct_train_1010_label_frame_23.png", "ct_train_1010_label_frame_86.png", "ct_train_1012_label_frame_5.png", "ct_train_1012_label_frame_72.png", "ct_train_1010_label_frame_209.png", "ct_train_1012_label_frame_6.png", "ct_train_1019_label_frame_218.png", "ct_train_1002_label_frame_235.png", "ct_train_1012_label_frame_170.png", "ct_train_1019_label_frame_232.png", "ct_train_1002_label_frame_194.png", "ct_train_1019_label_frame_201.png", "ct_train_1010_label_frame_63.png", "ct_train_1010_label_frame_56.png", "ct_train_1010_label_frame_104.png", "ct_train_1010_label_frame_167.png", "ct_train_1019_label_frame_199.png", "ct_train_1012_label_frame_77.png", "ct_train_1010_label_frame_99.png", "ct_train_1010_label_frame_249.png", "ct_train_1012_label_frame_68.png", "ct_train_1002_label_frame_70.png", "ct_train_1012_label_frame_146.png", "ct_train_1012_label_frame_78.png", "ct_train_1019_label_frame_61.png", "ct_train_1019_label_frame_177.png", "ct_train_1010_label_frame_51.png", "ct_train_1019_label_frame_59.png", "ct_train_1002_label_frame_66.png", "ct_train_1019_label_frame_162.png", "ct_train_1002_label_frame_152.png", "ct_train_1019_label_frame_195.png", "ct_train_1010_label_frame_165.png", "ct_train_1002_label_frame_105.png", "ct_train_1019_label_frame_91.png", "ct_train_1002_label_frame_116.png", "ct_train_1010_label_frame_210.png", "ct_train_1002_label_frame_103.png", "ct_train_1002_label_frame_187.png", "ct_train_1010_label_frame_173.png", "ct_train_1002_label_frame_85.png", "ct_train_1019_label_frame_200.png", "ct_train_1010_label_frame_113.png", "ct_train_1010_label_frame_163.png", "ct_train_1019_label_frame_164.png", "ct_train_1019_label_frame_274.png", "ct_train_1010_label_frame_142.png", "ct_train_1002_label_frame_189.png", "ct_train_1012_label_frame_150.png", "ct_train_1012_label_frame_165.png", "ct_train_1002_label_frame_216.png", "ct_train_1019_label_frame_168.png", "ct_train_1010_label_frame_164.png", "ct_train_1019_label_frame_99.png", "ct_train_1012_label_frame_55.png", "ct_train_1010_label_frame_196.png", "ct_train_1019_label_frame_124.png", "ct_train_1019_label_frame_173.png", "ct_train_1002_label_frame_2.png", "ct_train_1019_label_frame_25.png", "ct_train_1019_label_frame_14.png", "ct_train_1010_label_frame_94.png", "ct_train_1019_label_frame_80.png", "ct_train_1010_label_frame_92.png", "ct_train_1012_label_frame_84.png", "ct_train_1019_label_frame_181.png", "ct_train_1010_label_frame_11.png", "ct_train_1012_label_frame_99.png", "ct_train_1019_label_frame_172.png", "ct_train_1012_label_frame_109.png", "ct_train_1002_label_frame_81.png", "ct_train_1002_label_frame_107.png", "ct_train_1012_label_frame_30.png", "ct_train_1010_label_frame_97.png", "ct_train_1002_label_frame_48.png", "ct_train_1002_label_frame_19.png", "ct_train_1002_label_frame_170.png", "ct_train_1019_label_frame_26.png", "ct_train_1002_label_frame_149.png", "ct_train_1002_label_frame_77.png", "ct_train_1010_label_frame_16.png", "ct_train_1002_label_frame_145.png", "ct_train_1010_label_frame_125.png", "ct_train_1002_label_frame_232.png", "ct_train_1010_label_frame_222.png", "ct_train_1012_label_frame_44.png", "ct_train_1019_label_frame_191.png", "ct_train_1002_label_frame_87.png", "ct_train_1012_label_frame_175.png", "ct_train_1010_label_frame_84.png", "ct_train_1002_label_frame_222.png", "ct_train_1002_label_frame_109.png", "ct_train_1010_label_frame_263.png", "ct_train_1012_label_frame_33.png", "ct_train_1019_label_frame_265.png", "ct_train_1019_label_frame_187.png", "ct_train_1010_label_frame_228.png", "ct_train_1019_label_frame_15.png", "ct_train_1012_label_frame_40.png", "ct_train_1010_label_frame_123.png", "ct_train_1010_label_frame_178.png", "ct_train_1019_label_frame_51.png", "ct_train_1019_label_frame_127.png", "ct_train_1002_label_frame_134.png", "ct_train_1019_label_frame_108.png", "ct_train_1010_label_frame_122.png", "ct_train_1012_label_frame_157.png", "ct_train_1019_label_frame_273.png", "ct_train_1010_label_frame_232.png", "ct_train_1002_label_frame_225.png", "ct_train_1002_label_frame_4.png", "ct_train_1019_label_frame_110.png", "ct_train_1012_label_frame_122.png", "ct_train_1019_label_frame_49.png", "ct_train_1010_label_frame_88.png", "ct_train_1010_label_frame_124.png", "ct_train_1019_label_frame_288.png", "ct_train_1010_label_frame_177.png", "ct_train_1019_label_frame_246.png", "ct_train_1002_label_frame_227.png", "ct_train_1019_label_frame_105.png", "ct_train_1019_label_frame_144.png", "ct_train_1012_label_frame_134.png", "ct_train_1019_label_frame_239.png", "ct_train_1019_label_frame_39.png", "ct_train_1019_label_frame_224.png", "ct_train_1019_label_frame_209.png", "ct_train_1002_label_frame_100.png", "ct_train_1002_label_frame_40.png", "ct_train_1002_label_frame_238.png", "ct_train_1012_label_frame_125.png", "ct_train_1019_label_frame_100.png", "ct_train_1010_label_frame_159.png", "ct_train_1019_label_frame_292.png", "ct_train_1002_label_frame_16.png", "ct_train_1019_label_frame_212.png", "ct_train_1002_label_frame_178.png", "ct_train_1010_label_frame_201.png", "ct_train_1019_label_frame_84.png", "ct_train_1019_label_frame_221.png", "ct_train_1010_label_frame_61.png", "ct_train_1010_label_frame_121.png", "ct_train_1012_label_frame_22.png", "ct_train_1019_label_frame_231.png", "ct_train_1019_label_frame_151.png", "ct_train_1002_label_frame_47.png", "ct_train_1019_label_frame_222.png", "ct_train_1002_label_frame_160.png", "ct_train_1019_label_frame_23.png", "ct_train_1010_label_frame_170.png", "ct_train_1012_label_frame_1.png", "ct_train_1012_label_frame_64.png", "ct_train_1002_label_frame_135.png", "ct_train_1002_label_frame_144.png", "ct_train_1019_label_frame_70.png", "ct_train_1002_label_frame_131.png", "ct_train_1010_label_frame_133.png", "ct_train_1010_label_frame_42.png", "ct_train_1010_label_frame_132.png", "ct_train_1019_label_frame_123.png", "ct_train_1002_label_frame_75.png", "ct_train_1010_label_frame_160.png", "ct_train_1012_label_frame_4.png", "ct_train_1019_label_frame_167.png", "ct_train_1010_label_frame_69.png", "ct_train_1012_label_frame_101.png", "ct_train_1010_label_frame_215.png", "ct_train_1010_label_frame_93.png", "ct_train_1012_label_frame_54.png", "ct_train_1002_label_frame_171.png", "ct_train_1002_label_frame_101.png", "ct_train_1010_label_frame_117.png", "ct_train_1002_label_frame_35.png", "ct_train_1010_label_frame_190.png", "ct_train_1002_label_frame_91.png", "ct_train_1012_label_frame_16.png", "ct_train_1010_label_frame_194.png", "ct_train_1002_label_frame_15.png", "ct_train_1002_label_frame_41.png", "ct_train_1002_label_frame_179.png", "ct_train_1010_label_frame_72.png", "ct_train_1010_label_frame_38.png", "ct_train_1019_label_frame_109.png", "ct_train_1010_label_frame_52.png", "ct_train_1019_label_frame_290.png", "ct_train_1019_label_frame_24.png", "ct_train_1019_label_frame_163.png", "ct_train_1010_label_frame_224.png", "ct_train_1019_label_frame_227.png", "ct_train_1002_label_frame_168.png", "ct_train_1012_label_frame_24.png", "ct_train_1019_label_frame_44.png", "ct_train_1002_label_frame_214.png", "ct_train_1019_label_frame_189.png", "ct_train_1010_label_frame_187.png", "ct_train_1012_label_frame_50.png", "ct_train_1002_label_frame_80.png", "ct_train_1019_label_frame_88.png", "ct_train_1010_label_frame_257.png", "ct_train_1002_label_frame_53.png", "ct_train_1002_label_frame_49.png", "ct_train_1019_label_frame_159.png", "ct_train_1019_label_frame_248.png", "ct_train_1002_label_frame_176.png", "ct_train_1002_label_frame_96.png", "ct_train_1019_label_frame_57.png", "ct_train_1019_label_frame_233.png", "ct_train_1019_label_frame_291.png", "ct_train_1012_label_frame_67.png", "ct_train_1010_label_frame_21.png", "ct_train_1002_label_frame_94.png", "ct_train_1010_label_frame_17.png", "ct_train_1012_label_frame_0.png", "ct_train_1019_label_frame_280.png", "ct_train_1019_label_frame_10.png", "ct_train_1010_label_frame_109.png", "ct_train_1010_label_frame_206.png", "ct_train_1012_label_frame_14.png", "ct_train_1012_label_frame_131.png", "ct_train_1002_label_frame_26.png", "ct_train_1012_label_frame_12.png", "ct_train_1012_label_frame_66.png", "ct_train_1019_label_frame_282.png", "ct_train_1012_label_frame_85.png", "ct_train_1010_label_frame_81.png", "ct_train_1019_label_frame_83.png", "ct_train_1002_label_frame_184.png", "ct_train_1002_label_frame_147.png", "ct_train_1019_label_frame_98.png", "ct_train_1010_label_frame_261.png", "ct_train_1019_label_frame_169.png", "ct_train_1010_label_frame_144.png", "ct_train_1019_label_frame_192.png", "ct_train_1010_label_frame_128.png", "ct_train_1002_label_frame_71.png", "ct_train_1019_label_frame_67.png", "ct_train_1010_label_frame_44.png", "ct_train_1002_label_frame_180.png", "ct_train_1002_label_frame_165.png", "ct_train_1002_label_frame_82.png", "ct_train_1002_label_frame_110.png", "ct_train_1019_label_frame_140.png", "ct_train_1019_label_frame_97.png", "ct_train_1002_label_frame_126.png", "ct_train_1012_label_frame_31.png", "ct_train_1019_label_frame_138.png", "ct_train_1002_label_frame_36.png", "ct_train_1019_label_frame_184.png", "ct_train_1019_label_frame_277.png", "ct_train_1010_label_frame_112.png", "ct_train_1019_label_frame_75.png", "ct_train_1012_label_frame_113.png", "ct_train_1019_label_frame_178.png", "ct_train_1019_label_frame_154.png", "ct_train_1002_label_frame_108.png", "ct_train_1010_label_frame_241.png", "ct_train_1019_label_frame_182.png", "ct_train_1002_label_frame_164.png", "ct_train_1012_label_frame_95.png", "ct_train_1019_label_frame_48.png", "ct_train_1012_label_frame_89.png", "ct_train_1019_label_frame_74.png", "ct_train_1010_label_frame_13.png", "ct_train_1019_label_frame_29.png", "ct_train_1010_label_frame_35.png", "ct_train_1019_label_frame_28.png", "ct_train_1010_label_frame_161.png", "ct_train_1010_label_frame_204.png", "ct_train_1019_label_frame_281.png", "ct_train_1012_label_frame_79.png", "ct_train_1019_label_frame_56.png", "ct_train_1002_label_frame_18.png", "ct_train_1019_label_frame_180.png", "ct_train_1012_label_frame_20.png", "ct_train_1012_label_frame_19.png", "ct_train_1010_label_frame_1.png", "ct_train_1019_label_frame_205.png", "ct_train_1010_label_frame_248.png", "ct_train_1012_label_frame_158.png", "ct_train_1019_label_frame_175.png", "ct_train_1002_label_frame_177.png", "ct_train_1002_label_frame_65.png", "ct_train_1019_label_frame_5.png", "ct_train_1012_label_frame_47.png", "ct_train_1010_label_frame_181.png", "ct_train_1002_label_frame_111.png", "ct_train_1002_label_frame_191.png", "ct_train_1012_label_frame_96.png", "ct_train_1002_label_frame_231.png", "ct_train_1002_label_frame_28.png", "ct_train_1002_label_frame_20.png", "ct_train_1002_label_frame_182.png", "ct_train_1019_label_frame_230.png", "ct_train_1019_label_frame_236.png", "ct_train_1002_label_frame_95.png", "ct_train_1002_label_frame_157.png", "ct_train_1002_label_frame_32.png", "ct_train_1002_label_frame_38.png", "ct_train_1002_label_frame_162.png", "ct_train_1002_label_frame_56.png", "ct_train_1012_label_frame_82.png", "ct_train_1010_label_frame_107.png", "ct_train_1019_label_frame_34.png", "ct_train_1010_label_frame_100.png", "ct_train_1019_label_frame_111.png", "ct_train_1010_label_frame_208.png", "ct_train_1010_label_frame_251.png", "ct_train_1019_label_frame_125.png", "ct_train_1019_label_frame_106.png", "ct_train_1002_label_frame_7.png", "ct_train_1010_label_frame_68.png", "ct_train_1012_label_frame_15.png", "ct_train_1010_label_frame_148.png", "ct_train_1019_label_frame_73.png", "ct_train_1019_label_frame_161.png", "ct_train_1012_label_frame_94.png", "ct_train_1010_label_frame_116.png", "ct_train_1010_label_frame_141.png", "ct_train_1019_label_frame_20.png", "ct_train_1019_label_frame_114.png", "ct_train_1010_label_frame_271.png", "ct_train_1002_label_frame_104.png", "ct_train_1012_label_frame_83.png", "ct_train_1010_label_frame_183.png", "ct_train_1019_label_frame_107.png", "ct_train_1002_label_frame_6.png", "ct_train_1010_label_frame_243.png", "ct_train_1002_label_frame_93.png", "ct_train_1012_label_frame_148.png", "ct_train_1019_label_frame_296.png", "ct_train_1010_label_frame_203.png", "ct_train_1019_label_frame_244.png", "ct_train_1010_label_frame_225.png", "ct_train_1002_label_frame_153.png", "ct_train_1010_label_frame_262.png", "ct_train_1012_label_frame_43.png", "ct_train_1010_label_frame_24.png", "ct_train_1019_label_frame_270.png", "ct_train_1019_label_frame_176.png", "ct_train_1012_label_frame_65.png", "ct_train_1010_label_frame_130.png", "ct_train_1019_label_frame_259.png", "ct_train_1012_label_frame_110.png", "ct_train_1019_label_frame_72.png", "ct_train_1019_label_frame_37.png", "ct_train_1012_label_frame_174.png", "ct_train_1010_label_frame_41.png", "ct_train_1002_label_frame_158.png", "ct_train_1012_label_frame_156.png", "ct_train_1019_label_frame_54.png", "ct_train_1010_label_frame_10.png", "ct_train_1010_label_frame_237.png", "ct_train_1019_label_frame_13.png", "ct_train_1012_label_frame_51.png", "ct_train_1019_label_frame_243.png", "ct_train_1010_label_frame_31.png", "ct_train_1019_label_frame_122.png", "ct_train_1012_label_frame_103.png", "ct_train_1010_label_frame_213.png", "ct_train_1002_label_frame_197.png", "ct_train_1019_label_frame_235.png", "ct_train_1002_label_frame_229.png", "ct_train_1019_label_frame_22.png", "ct_train_1019_label_frame_132.png", "ct_train_1002_label_frame_181.png", "ct_train_1002_label_frame_201.png", "ct_train_1019_label_frame_284.png", "ct_train_1012_label_frame_36.png", "ct_train_1002_label_frame_203.png", "ct_train_1019_label_frame_46.png", "ct_train_1019_label_frame_19.png", "ct_train_1019_label_frame_155.png", "ct_train_1019_label_frame_3.png", "ct_train_1012_label_frame_106.png", "ct_train_1012_label_frame_102.png", "ct_train_1010_label_frame_223.png", "ct_train_1012_label_frame_53.png", "ct_train_1002_label_frame_78.png", "ct_train_1019_label_frame_89.png", "ct_train_1019_label_frame_45.png", "ct_train_1019_label_frame_267.png", "ct_train_1019_label_frame_134.png", "ct_train_1010_label_frame_199.png", "ct_train_1002_label_frame_23.png", "ct_train_1012_label_frame_76.png", "ct_train_1019_label_frame_197.png", "ct_train_1002_label_frame_186.png", "ct_train_1002_label_frame_62.png", "ct_train_1002_label_frame_193.png", "ct_train_1010_label_frame_272.png", "ct_train_1010_label_frame_15.png", "ct_train_1019_label_frame_79.png", "ct_train_1010_label_frame_47.png", "ct_train_1010_label_frame_175.png", "ct_train_1002_label_frame_188.png", "ct_train_1012_label_frame_161.png", "ct_train_1002_label_frame_51.png", "ct_train_1012_label_frame_97.png", "ct_train_1010_label_frame_6.png", "ct_train_1002_label_frame_50.png", "ct_train_1012_label_frame_163.png", "ct_train_1010_label_frame_234.png", "ct_train_1010_label_frame_119.png", "ct_train_1019_label_frame_219.png", "ct_train_1019_label_frame_252.png", "ct_train_1012_label_frame_173.png", "ct_train_1002_label_frame_9.png", "ct_train_1012_label_frame_88.png", "ct_train_1002_label_frame_30.png", "ct_train_1012_label_frame_118.png", "ct_train_1012_label_frame_155.png", "ct_train_1010_label_frame_246.png", "ct_train_1019_label_frame_85.png", "ct_train_1019_label_frame_103.png", "ct_train_1010_label_frame_270.png", "ct_train_1002_label_frame_148.png", "ct_train_1019_label_frame_65.png", "ct_train_1012_label_frame_60.png", "ct_train_1012_label_frame_111.png", "ct_train_1012_label_frame_23.png", "ct_train_1002_label_frame_33.png", "ct_train_1010_label_frame_108.png", "ct_train_1019_label_frame_32.png", "ct_train_1010_label_frame_120.png", "ct_train_1002_label_frame_112.png", "ct_train_1010_label_frame_134.png", "ct_train_1002_label_frame_34.png", "ct_train_1012_label_frame_105.png", "ct_train_1019_label_frame_293.png", "ct_train_1010_label_frame_12.png", "ct_train_1019_label_frame_104.png", "ct_train_1019_label_frame_33.png", "ct_train_1010_label_frame_252.png", "ct_train_1002_label_frame_161.png", "ct_train_1010_label_frame_75.png", "ct_train_1002_label_frame_172.png", "ct_train_1012_label_frame_124.png", "ct_train_1019_label_frame_69.png", "ct_train_1019_label_frame_131.png", "ct_train_1019_label_frame_4.png", "ct_train_1019_label_frame_116.png", "ct_train_1002_label_frame_140.png", "ct_train_1002_label_frame_106.png", "ct_train_1010_label_frame_269.png", "ct_train_1002_label_frame_129.png", "ct_train_1019_label_frame_262.png", "ct_train_1002_label_frame_117.png", "ct_train_1019_label_frame_202.png", "ct_train_1010_label_frame_66.png", "ct_train_1002_label_frame_234.png", "ct_train_1019_label_frame_147.png", "ct_train_1010_label_frame_129.png", "ct_train_1002_label_frame_92.png", "ct_train_1002_label_frame_136.png", "ct_train_1002_label_frame_224.png", "ct_train_1010_label_frame_162.png", "ct_train_1002_label_frame_175.png", "ct_train_1010_label_frame_40.png", "ct_train_1010_label_frame_193.png", "ct_train_1010_label_frame_179.png", "ct_train_1019_label_frame_2.png", "ct_train_1010_label_frame_255.png", "ct_train_1010_label_frame_207.png", "ct_train_1019_label_frame_210.png", "ct_train_1019_label_frame_255.png", "ct_train_1012_label_frame_48.png", "ct_train_1019_label_frame_152.png", "ct_train_1012_label_frame_121.png", "ct_train_1019_label_frame_170.png", "ct_train_1002_label_frame_102.png", "ct_train_1002_label_frame_42.png", "ct_train_1019_label_frame_211.png", "ct_train_1010_label_frame_235.png", "ct_train_1019_label_frame_174.png", "ct_train_1019_label_frame_194.png", "ct_train_1012_label_frame_120.png", "ct_train_1010_label_frame_95.png", "ct_train_1012_label_frame_92.png", "ct_train_1010_label_frame_266.png", "ct_train_1010_label_frame_87.png", "ct_train_1012_label_frame_11.png", "ct_train_1019_label_frame_87.png", "ct_train_1002_label_frame_233.png", "ct_train_1002_label_frame_173.png", "ct_train_1010_label_frame_19.png", "ct_train_1010_label_frame_110.png", "ct_train_1010_label_frame_8.png", "ct_train_1002_label_frame_130.png", "ct_train_1010_label_frame_247.png", "ct_train_1002_label_frame_190.png", "ct_train_1002_label_frame_202.png", "ct_train_1019_label_frame_196.png", "ct_train_1019_label_frame_81.png", "ct_train_1019_label_frame_16.png", "ct_train_1002_label_frame_120.png", "ct_train_1010_label_frame_80.png", "ct_train_1012_label_frame_153.png", "ct_train_1010_label_frame_39.png", "ct_train_1012_label_frame_147.png", "ct_train_1010_label_frame_0.png", "ct_train_1019_label_frame_190.png", "ct_train_1002_label_frame_123.png", "ct_train_1019_label_frame_41.png", "ct_train_1019_label_frame_58.png", "ct_train_1012_label_frame_52.png", "ct_train_1010_label_frame_26.png", "ct_train_1010_label_frame_195.png", "ct_train_1012_label_frame_45.png", "ct_train_1010_label_frame_172.png", "ct_train_1012_label_frame_87.png", "ct_train_1002_label_frame_31.png", "ct_train_1010_label_frame_30.png", "ct_train_1002_label_frame_37.png", "ct_train_1010_label_frame_82.png", "ct_train_1019_label_frame_126.png", "ct_train_1010_label_frame_186.png", "ct_train_1002_label_frame_155.png", "ct_train_1002_label_frame_25.png", "ct_train_1012_label_frame_132.png", "ct_train_1012_label_frame_69.png", "ct_train_1002_label_frame_205.png", "ct_train_1010_label_frame_229.png", "ct_train_1012_label_frame_127.png", "ct_train_1019_label_frame_11.png", "ct_train_1012_label_frame_8.png", "ct_train_1012_label_frame_104.png", "ct_train_1019_label_frame_117.png", "ct_train_1002_label_frame_212.png", "ct_train_1012_label_frame_98.png", "ct_train_1002_label_frame_199.png", "ct_train_1012_label_frame_140.png", "ct_train_1019_label_frame_216.png", "ct_train_1012_label_frame_159.png", "ct_train_1010_label_frame_106.png", "ct_train_1002_label_frame_0.png", "ct_train_1002_label_frame_146.png", "ct_train_1019_label_frame_149.png", "ct_train_1002_label_frame_79.png", "ct_train_1010_label_frame_138.png", "ct_train_1019_label_frame_130.png", "ct_train_1012_label_frame_116.png", "ct_train_1012_label_frame_21.png", "ct_train_1012_label_frame_18.png", "ct_train_1012_label_frame_41.png", "ct_train_1002_label_frame_196.png", "ct_train_1010_label_frame_218.png", "ct_train_1010_label_frame_192.png", "ct_train_1019_label_frame_86.png", "ct_train_1010_label_frame_58.png", "ct_train_1010_label_frame_147.png", "ct_train_1019_label_frame_179.png", "ct_train_1002_label_frame_29.png", "ct_train_1002_label_frame_125.png", "ct_train_1019_label_frame_141.png", "ct_train_1019_label_frame_215.png", "ct_train_1019_label_frame_213.png", "ct_train_1012_label_frame_115.png", "ct_train_1012_label_frame_145.png", "ct_train_1019_label_frame_42.png", "ct_train_1002_label_frame_219.png", "ct_train_1012_label_frame_137.png", "ct_train_1002_label_frame_221.png", "ct_train_1010_label_frame_2.png", "ct_train_1002_label_frame_10.png", "ct_train_1010_label_frame_149.png", "ct_train_1002_label_frame_59.png", "ct_train_1010_label_frame_155.png", "ct_train_1010_label_frame_57.png", "ct_train_1019_label_frame_279.png", "ct_train_1002_label_frame_228.png", "ct_train_1012_label_frame_29.png", "ct_train_1012_label_frame_57.png", "ct_train_1019_label_frame_77.png", "ct_train_1012_label_frame_38.png", "ct_train_1010_label_frame_216.png", "ct_train_1019_label_frame_260.png", "ct_train_1019_label_frame_1.png", "ct_train_1019_label_frame_17.png", "ct_train_1002_label_frame_13.png", "ct_train_1010_label_frame_22.png", "ct_train_1012_label_frame_112.png", "ct_train_1012_label_frame_73.png", "ct_train_1012_label_frame_139.png", "ct_train_1019_label_frame_133.png", "ct_train_1010_label_frame_111.png", "ct_train_1012_label_frame_107.png", "ct_train_1012_label_frame_86.png", "ct_train_1019_label_frame_242.png", "ct_train_1019_label_frame_78.png", "ct_train_1002_label_frame_192.png", "ct_train_1012_label_frame_142.png", "ct_train_1019_label_frame_50.png", "ct_train_1010_label_frame_176.png", "ct_train_1010_label_frame_53.png", "ct_train_1010_label_frame_18.png", "ct_train_1012_label_frame_13.png", "ct_train_1002_label_frame_163.png", "ct_train_1019_label_frame_135.png", "ct_train_1002_label_frame_156.png", "ct_train_1002_label_frame_74.png", "ct_train_1010_label_frame_55.png", "ct_train_1019_label_frame_278.png", "ct_train_1010_label_frame_136.png", "ct_train_1010_label_frame_71.png", "ct_train_1012_label_frame_169.png", "ct_train_1019_label_frame_272.png", "ct_train_1012_label_frame_123.png", "ct_train_1010_label_frame_264.png", "ct_train_1019_label_frame_43.png", "ct_train_1010_label_frame_153.png", "ct_train_1019_label_frame_198.png", "ct_train_1012_label_frame_114.png", "ct_train_1002_label_frame_69.png", "ct_train_1019_label_frame_289.png", "ct_train_1012_label_frame_138.png", "ct_train_1010_label_frame_14.png", "ct_train_1010_label_frame_158.png", "ct_train_1019_label_frame_143.png", "ct_train_1019_label_frame_225.png", "ct_train_1019_label_frame_165.png", "ct_train_1010_label_frame_48.png", "ct_train_1002_label_frame_114.png", "ct_train_1019_label_frame_90.png", "ct_train_1012_label_frame_3.png", "ct_train_1010_label_frame_45.png", "ct_train_1002_label_frame_218.png", "ct_train_1010_label_frame_184.png", "ct_train_1010_label_frame_230.png", "ct_train_1002_label_frame_63.png", "ct_train_1002_label_frame_169.png", "ct_train_1002_label_frame_27.png", "ct_train_1012_label_frame_26.png", "ct_train_1019_label_frame_6.png", "ct_train_1019_label_frame_186.png", "ct_train_1010_label_frame_260.png", "ct_train_1010_label_frame_214.png", "ct_train_1010_label_frame_250.png", "ct_train_1010_label_frame_43.png", "ct_train_1010_label_frame_239.png", "ct_train_1010_label_frame_143.png", "ct_train_1002_label_frame_52.png", "ct_train_1019_label_frame_47.png", "ct_train_1010_label_frame_83.png", "ct_train_1010_label_frame_33.png", "ct_train_1019_label_frame_102.png", "ct_train_1002_label_frame_133.png", "ct_train_1019_label_frame_206.png", "ct_train_1010_label_frame_4.png", "ct_train_1002_label_frame_61.png", "ct_train_1010_label_frame_189.png", "ct_train_1019_label_frame_139.png", "ct_train_1012_label_frame_10.png", "ct_train_1010_label_frame_20.png", "ct_train_1019_label_frame_63.png", "ct_train_1012_label_frame_2.png", "ct_train_1019_label_frame_249.png", "ct_train_1002_label_frame_137.png", "ct_train_1012_label_frame_162.png", "ct_train_1019_label_frame_36.png", "ct_train_1019_label_frame_261.png", "ct_train_1010_label_frame_9.png", "ct_train_1019_label_frame_263.png", "ct_train_1012_label_frame_70.png", "ct_train_1010_label_frame_139.png", "ct_train_1010_label_frame_205.png", "ct_train_1010_label_frame_154.png", "ct_train_1019_label_frame_247.png", "ct_train_1019_label_frame_52.png", "ct_train_1010_label_frame_267.png", "ct_train_1019_label_frame_18.png", "ct_train_1012_label_frame_149.png", "ct_train_1010_label_frame_46.png", "ct_train_1010_label_frame_185.png", "ct_train_1002_label_frame_124.png", "ct_train_1002_label_frame_167.png", "ct_train_1002_label_frame_226.png", "ct_train_1012_label_frame_154.png", "ct_train_1010_label_frame_91.png", "ct_train_1002_label_frame_73.png", "ct_train_1002_label_frame_67.png", "ct_train_1010_label_frame_236.png", "ct_train_1019_label_frame_121.png", "ct_train_1002_label_frame_150.png", "ct_train_1019_label_frame_0.png", "ct_train_1019_label_frame_8.png", "ct_train_1002_label_frame_204.png", "ct_train_1010_label_frame_182.png", "ct_train_1019_label_frame_171.png", "ct_train_1010_label_frame_102.png", "ct_train_1002_label_frame_213.png", "ct_train_1002_label_frame_236.png", "ct_train_1010_label_frame_226.png", "ct_train_1010_label_frame_244.png", "ct_train_1019_label_frame_60.png", "ct_train_1019_label_frame_250.png", "ct_train_1019_label_frame_62.png", "ct_train_1012_label_frame_63.png", "ct_train_1019_label_frame_256.png", "ct_train_1012_label_frame_28.png", "ct_train_1010_label_frame_49.png", "ct_train_1010_label_frame_156.png", "ct_train_1019_label_frame_35.png", "ct_train_1019_label_frame_30.png", "ct_train_1012_label_frame_59.png", "ct_train_1002_label_frame_3.png", "ct_train_1010_label_frame_78.png", "ct_train_1012_label_frame_74.png", "ct_train_1002_label_frame_183.png", "ct_train_1010_label_frame_191.png", "ct_train_1002_label_frame_198.png", "ct_train_1010_label_frame_37.png", "ct_train_1010_label_frame_233.png", "ct_train_1002_label_frame_76.png", "ct_train_1019_label_frame_214.png", "ct_train_1002_label_frame_200.png", "ct_train_1002_label_frame_45.png", "ct_train_1019_label_frame_240.png", "ct_train_1010_label_frame_32.png", "ct_train_1012_label_frame_61.png", "ct_train_1002_label_frame_44.png", "ct_train_1019_label_frame_283.png", "ct_train_1019_label_frame_275.png", "ct_train_1002_label_frame_195.png", "ct_train_1010_label_frame_127.png", "ct_train_1010_label_frame_65.png", "ct_train_1019_label_frame_258.png", "ct_train_1010_label_frame_60.png", "ct_train_1019_label_frame_253.png", "ct_train_1002_label_frame_99.png", "ct_train_1019_label_frame_285.png", "ct_train_1019_label_frame_266.png", "ct_train_1019_label_frame_96.png", "ct_train_1010_label_frame_67.png", "ct_train_1002_label_frame_43.png", "ct_train_1019_label_frame_12.png", "ct_train_1010_label_frame_166.png", "ct_train_1019_label_frame_245.png", "ct_train_1012_label_frame_117.png", "ct_train_1002_label_frame_211.png", "ct_train_1010_label_frame_188.png", "ct_train_1012_label_frame_167.png", "ct_train_1010_label_frame_180.png", "ct_train_1010_label_frame_256.png", "ct_train_1010_label_frame_59.png", "ct_train_1019_label_frame_82.png", "ct_train_1019_label_frame_94.png", "ct_train_1019_label_frame_223.png", "ct_train_1010_label_frame_73.png", "ct_train_1012_label_frame_62.png", "ct_train_1019_label_frame_21.png", "ct_train_1010_label_frame_36.png", "ct_train_1002_label_frame_223.png", "ct_train_1002_label_frame_88.png", "ct_train_1002_label_frame_60.png", "ct_train_1019_label_frame_92.png", "ct_train_1019_label_frame_276.png", "ct_train_1012_label_frame_90.png", "ct_train_1002_label_frame_208.png", "ct_train_1019_label_frame_257.png", "ct_train_1012_label_frame_34.png", "ct_train_1019_label_frame_294.png", "ct_train_1012_label_frame_27.png", "ct_train_1002_label_frame_141.png", "ct_train_1019_label_frame_237.png", "ct_train_1002_label_frame_185.png", "ct_train_1012_label_frame_58.png", "ct_train_1002_label_frame_1.png", "ct_train_1002_label_frame_97.png", "ct_train_1010_label_frame_114.png", "ct_train_1002_label_frame_119.png", "ct_train_1010_label_frame_273.png", "ct_train_1019_label_frame_264.png", "ct_train_1012_label_frame_91.png", "ct_train_1010_label_frame_217.png", "ct_train_1010_label_frame_70.png", "ct_train_1019_label_frame_27.png", "ct_train_1019_label_frame_38.png", "ct_train_1019_label_frame_203.png", "ct_train_1012_label_frame_129.png", "ct_train_1012_label_frame_128.png", "ct_train_1002_label_frame_57.png", "ct_train_1002_label_frame_24.png", "ct_train_1010_label_frame_28.png", "ct_train_1019_label_frame_101.png", "ct_train_1019_label_frame_228.png", "ct_train_1019_label_frame_157.png", "ct_train_1010_label_frame_89.png", "ct_train_1019_label_frame_95.png", "ct_train_1002_label_frame_98.png", "ct_train_1002_label_frame_113.png", "ct_train_1010_label_frame_50.png", "ct_train_1012_label_frame_17.png", "ct_train_1002_label_frame_64.png", "ct_train_1010_label_frame_268.png", "ct_train_1019_label_frame_129.png", "ct_train_1002_label_frame_230.png", "ct_train_1019_label_frame_53.png", "ct_train_1019_label_frame_183.png", "ct_train_1019_label_frame_113.png", "ct_train_1012_label_frame_166.png", "ct_train_1019_label_frame_128.png", "ct_train_1010_label_frame_118.png", "ct_train_1010_label_frame_79.png", "ct_train_1019_label_frame_145.png", "ct_train_1019_label_frame_7.png", "ct_train_1010_label_frame_137.png", "ct_train_1002_label_frame_17.png", "ct_train_1002_label_frame_127.png", "ct_train_1012_label_frame_108.png", "ct_train_1012_label_frame_152.png", "ct_train_1019_label_frame_156.png", "ct_train_1019_label_frame_207.png", "ct_train_1010_label_frame_157.png", "ct_train_1012_label_frame_130.png", "ct_train_1010_label_frame_145.png", "ct_train_1010_label_frame_27.png", "ct_train_1002_label_frame_154.png", "ct_train_1019_label_frame_71.png", "ct_train_1002_label_frame_138.png", "ct_train_1010_label_frame_258.png", "ct_train_1019_label_frame_241.png", "ct_train_1019_label_frame_112.png", "ct_train_1019_label_frame_188.png", "ct_train_1002_label_frame_12.png", "ct_train_1002_label_frame_46.png", "ct_train_1019_label_frame_166.png", "ct_train_1010_label_frame_105.png", "ct_train_1019_label_frame_115.png", "ct_train_1002_label_frame_14.png", "ct_train_1010_label_frame_131.png", "ct_train_1019_label_frame_64.png", "ct_train_1012_label_frame_168.png", "ct_train_1019_label_frame_269.png", "ct_train_1010_label_frame_197.png", "ct_train_1002_label_frame_174.png", "ct_train_1002_label_frame_68.png", "ct_train_1010_label_frame_168.png", "ct_train_1019_label_frame_185.png", "ct_train_1010_label_frame_64.png", "ct_train_1002_label_frame_21.png", "ct_train_1010_label_frame_245.png", "ct_train_1012_label_frame_35.png", "ct_train_1002_label_frame_220.png", "ct_train_1019_label_frame_251.png", "ct_train_1002_label_frame_217.png", "ct_train_1010_label_frame_146.png", "ct_train_1012_label_frame_9.png", "ct_train_1019_label_frame_229.png", "ct_train_1012_label_frame_37.png", "ct_train_1019_label_frame_160.png", "ct_train_1002_label_frame_121.png", "ct_train_1019_label_frame_120.png", "ct_train_1019_label_frame_193.png", "ct_train_1010_label_frame_103.png", "ct_train_1002_label_frame_72.png", "ct_train_1002_label_frame_86.png", "ct_train_1010_label_frame_150.png", "ct_train_1010_label_frame_265.png", "ct_train_1012_label_frame_160.png", "ct_train_1012_label_frame_93.png", "ct_train_1002_label_frame_115.png", "ct_train_1012_label_frame_7.png", "ct_train_1002_label_frame_122.png", "ct_train_1019_label_frame_119.png", "ct_train_1012_label_frame_119.png", "ct_train_1002_label_frame_118.png", "ct_train_1012_label_frame_32.png", "ct_train_1019_label_frame_136.png", "ct_train_1002_label_frame_55.png", "ct_train_1010_label_frame_29.png", "ct_train_1002_label_frame_151.png", "ct_train_1002_label_frame_237.png", "ct_train_1019_label_frame_254.png", "ct_train_1010_label_frame_25.png", "ct_train_1019_label_frame_220.png", "ct_train_1012_label_frame_80.png", "ct_train_1010_label_frame_169.png", "ct_train_1019_label_frame_55.png", "ct_train_1012_label_frame_133.png", "ct_train_1012_label_frame_176.png", "ct_train_1019_label_frame_40.png", "ct_train_1002_label_frame_22.png", "ct_train_1002_label_frame_39.png", "ct_train_1002_label_frame_128.png", "ct_train_1012_label_frame_49.png", "ct_train_1019_label_frame_148.png", "ct_train_1012_label_frame_42.png", "ct_train_1019_label_frame_146.png", "ct_train_1010_label_frame_198.png", "ct_train_1019_label_frame_234.png", "ct_train_1010_label_frame_221.png", "ct_train_1012_label_frame_136.png", "ct_train_1010_label_frame_238.png", "ct_train_1019_label_frame_295.png", "ct_train_1012_label_frame_143.png", "ct_train_1010_label_frame_231.png", "ct_train_1019_label_frame_137.png", "ct_train_1019_label_frame_208.png", "ct_train_1010_label_frame_220.png", "ct_train_1019_label_frame_153.png", "ct_train_1012_label_frame_25.png", "ct_train_1010_label_frame_140.png", "ct_train_1019_label_frame_238.png", "ct_train_1012_label_frame_56.png", "ct_train_1002_label_frame_132.png", "ct_train_1019_label_frame_142.png", "ct_train_1012_label_frame_141.png", "ct_train_1019_label_frame_150.png", "ct_train_1002_label_frame_210.png", "ct_train_1012_label_frame_144.png", "ct_train_1002_label_frame_89.png", "ct_train_1019_label_frame_158.png", "ct_train_1019_label_frame_9.png", "ct_train_1010_label_frame_7.png", "ct_train_1010_label_frame_5.png", "ct_train_1002_label_frame_83.png", "ct_train_1019_label_frame_204.png", "ct_train_1019_label_frame_93.png", "ct_train_1002_label_frame_166.png", "ct_train_1019_label_frame_76.png", "ct_train_1002_label_frame_90.png", "ct_train_1012_label_frame_151.png", "ct_train_1010_label_frame_171.png", "ct_train_1010_label_frame_254.png", "ct_train_1012_label_frame_172.png", "ct_train_1012_label_frame_126.png", "ct_train_1002_label_frame_206.png", "ct_train_1010_label_frame_151.png", "ct_train_1002_label_frame_139.png", "ct_train_1010_label_frame_242.png", "ct_train_1010_label_frame_115.png", "ct_train_1002_label_frame_159.png", "ct_train_1019_label_frame_68.png", "ct_train_1010_label_frame_202.png", "ct_train_1010_label_frame_135.png", "ct_train_1010_label_frame_85.png", "ct_train_1012_label_frame_171.png", "ct_train_1010_label_frame_211.png", "ct_train_1002_label_frame_143.png", "ct_train_1002_label_frame_58.png", "ct_train_1010_label_frame_34.png", "ct_train_1019_label_frame_286.png", "ct_train_1002_label_frame_142.png", "ct_train_1010_label_frame_101.png", "ct_train_1010_label_frame_227.png", "ct_train_1010_label_frame_96.png", "ct_train_1010_label_frame_74.png", "ct_train_1010_label_frame_174.png", "ct_train_1019_label_frame_271.png", "ct_train_1012_label_frame_71.png", "ct_train_1012_label_frame_81.png", "ct_train_1002_label_frame_215.png", "ct_train_1012_label_frame_164.png", "ct_train_1019_label_frame_31.png", "ct_train_1010_label_frame_62.png", "ct_train_1010_label_frame_77.png", "ct_train_1010_label_frame_98.png", "ct_train_1012_label_frame_75.png", "ct_train_1002_label_frame_11.png", "ct_train_1012_label_frame_46.png", "ct_train_1010_label_frame_152.png", "ct_train_1010_label_frame_259.png", "ct_train_1012_label_frame_100.png", "ct_train_1002_label_frame_84.png", "ct_train_1010_label_frame_219.png", "ct_train_1010_label_frame_212.png", "ct_train_1010_label_frame_54.png", "ct_train_1010_label_frame_240.png", "ct_train_1019_label_frame_118.png", "ct_train_1019_label_frame_66.png", "ct_train_1002_label_frame_54.png", "ct_train_1019_label_frame_268.png"]} \ No newline at end of file diff --git a/files_split/MMWHS_MR_Heart.json b/files_split/MMWHS_MR_Heart.json new file mode 100644 index 0000000000000000000000000000000000000000..1a9ab5384fe85be65b0dfd9107a20f14ff852546 --- /dev/null +++ b/files_split/MMWHS_MR_Heart.json @@ -0,0 +1 @@ +{"train": ["mr_train_1017_image_frame_89.png", "mr_train_1016_image_frame_39.png", "mr_train_1011_image_frame_114.png", "mr_train_1001_image_frame_106.png", "mr_train_1018_image_frame_143.png", "mr_train_1001_image_frame_51.png", "mr_train_1008_image_frame_94.png", "mr_train_1008_image_frame_128.png", "mr_train_1018_image_frame_100.png", "mr_train_1006_image_frame_24.png", "mr_train_1014_image_frame_107.png", "mr_train_1020_image_frame_119.png", "mr_train_1006_image_frame_52.png", "mr_train_1001_image_frame_50.png", "mr_train_1009_image_frame_18.png", "mr_train_1001_image_frame_141.png", "mr_train_1006_image_frame_3.png", "mr_train_1017_image_frame_80.png", "mr_train_1014_image_frame_34.png", "mr_train_1014_image_frame_157.png", "mr_train_1006_image_frame_120.png", "mr_train_1003_image_frame_159.png", "mr_train_1001_image_frame_24.png", "mr_train_1006_image_frame_92.png", "mr_train_1001_image_frame_109.png", "mr_train_1011_image_frame_8.png", "mr_train_1008_image_frame_82.png", "mr_train_1017_image_frame_70.png", "mr_train_1001_image_frame_44.png", "mr_train_1015_image_frame_73.png", "mr_train_1015_image_frame_135.png", "mr_train_1005_image_frame_72.png", "mr_train_1011_image_frame_12.png", "mr_train_1015_image_frame_88.png", "mr_train_1003_image_frame_144.png", "mr_train_1017_image_frame_110.png", "mr_train_1011_image_frame_43.png", "mr_train_1003_image_frame_110.png", "mr_train_1005_image_frame_39.png", "mr_train_1006_image_frame_57.png", "mr_train_1017_image_frame_12.png", "mr_train_1011_image_frame_141.png", "mr_train_1003_image_frame_0.png", "mr_train_1018_image_frame_91.png", "mr_train_1020_image_frame_105.png", "mr_train_1020_image_frame_24.png", "mr_train_1008_image_frame_50.png", "mr_train_1011_image_frame_15.png", "mr_train_1003_image_frame_133.png", "mr_train_1020_image_frame_93.png", "mr_train_1017_image_frame_31.png", "mr_train_1020_image_frame_7.png", "mr_train_1014_image_frame_93.png", "mr_train_1015_image_frame_80.png", "mr_train_1001_image_frame_113.png", "mr_train_1003_image_frame_76.png", "mr_train_1006_image_frame_7.png", "mr_train_1011_image_frame_119.png", "mr_train_1020_image_frame_92.png", "mr_train_1006_image_frame_114.png", "mr_train_1016_image_frame_125.png", "mr_train_1001_image_frame_96.png", "mr_train_1008_image_frame_35.png", "mr_train_1006_image_frame_159.png", "mr_train_1011_image_frame_120.png", "mr_train_1006_image_frame_76.png", "mr_train_1020_image_frame_118.png", "mr_train_1016_image_frame_58.png", "mr_train_1008_image_frame_1.png", "mr_train_1016_image_frame_96.png", "mr_train_1005_image_frame_99.png", "mr_train_1015_image_frame_193.png", "mr_train_1003_image_frame_107.png", "mr_train_1014_image_frame_59.png", "mr_train_1006_image_frame_90.png", "mr_train_1003_image_frame_145.png", "mr_train_1005_image_frame_15.png", "mr_train_1018_image_frame_115.png", "mr_train_1014_image_frame_136.png", "mr_train_1001_image_frame_86.png", "mr_train_1018_image_frame_105.png", "mr_train_1011_image_frame_150.png", "mr_train_1005_image_frame_6.png", "mr_train_1015_image_frame_17.png", "mr_train_1016_image_frame_94.png", "mr_train_1003_image_frame_90.png", "mr_train_1001_image_frame_143.png", "mr_train_1009_image_frame_60.png", "mr_train_1018_image_frame_102.png", "mr_train_1005_image_frame_42.png", "mr_train_1001_image_frame_99.png", "mr_train_1001_image_frame_64.png", "mr_train_1011_image_frame_77.png", "mr_train_1011_image_frame_35.png", "mr_train_1005_image_frame_92.png", "mr_train_1011_image_frame_1.png", "mr_train_1015_image_frame_107.png", "mr_train_1020_image_frame_8.png", "mr_train_1003_image_frame_8.png", "mr_train_1017_image_frame_27.png", "mr_train_1011_image_frame_73.png", "mr_train_1016_image_frame_75.png", "mr_train_1014_image_frame_33.png", "mr_train_1015_image_frame_116.png", "mr_train_1011_image_frame_151.png", "mr_train_1003_image_frame_150.png", "mr_train_1006_image_frame_38.png", "mr_train_1009_image_frame_114.png", "mr_train_1011_image_frame_25.png", "mr_train_1018_image_frame_47.png", "mr_train_1006_image_frame_33.png", "mr_train_1018_image_frame_41.png", "mr_train_1011_image_frame_86.png", "mr_train_1015_image_frame_3.png", "mr_train_1011_image_frame_72.png", "mr_train_1006_image_frame_34.png", "mr_train_1020_image_frame_26.png", "mr_train_1003_image_frame_156.png", "mr_train_1008_image_frame_103.png", "mr_train_1001_image_frame_42.png", "mr_train_1018_image_frame_126.png", "mr_train_1015_image_frame_120.png", "mr_train_1015_image_frame_161.png", "mr_train_1008_image_frame_79.png", "mr_train_1005_image_frame_49.png", "mr_train_1001_image_frame_158.png", "mr_train_1011_image_frame_130.png", "mr_train_1015_image_frame_124.png", "mr_train_1008_image_frame_21.png", "mr_train_1016_image_frame_10.png", "mr_train_1018_image_frame_123.png", "mr_train_1009_image_frame_42.png", "mr_train_1014_image_frame_79.png", "mr_train_1003_image_frame_28.png", "mr_train_1020_image_frame_27.png", "mr_train_1011_image_frame_123.png", "mr_train_1005_image_frame_122.png", "mr_train_1008_image_frame_31.png", "mr_train_1018_image_frame_44.png", "mr_train_1014_image_frame_63.png", "mr_train_1001_image_frame_21.png", "mr_train_1016_image_frame_70.png", "mr_train_1016_image_frame_86.png", "mr_train_1018_image_frame_7.png", "mr_train_1006_image_frame_133.png", "mr_train_1008_image_frame_14.png", "mr_train_1014_image_frame_141.png", "mr_train_1016_image_frame_35.png", "mr_train_1020_image_frame_117.png", "mr_train_1005_image_frame_112.png", "mr_train_1018_image_frame_93.png", "mr_train_1018_image_frame_13.png", "mr_train_1006_image_frame_82.png", "mr_train_1008_image_frame_26.png", "mr_train_1009_image_frame_81.png", "mr_train_1003_image_frame_99.png", "mr_train_1006_image_frame_149.png", "mr_train_1009_image_frame_85.png", "mr_train_1018_image_frame_71.png", "mr_train_1014_image_frame_111.png", "mr_train_1014_image_frame_15.png", "mr_train_1016_image_frame_13.png", "mr_train_1005_image_frame_26.png", "mr_train_1018_image_frame_103.png", "mr_train_1018_image_frame_67.png", "mr_train_1003_image_frame_135.png", "mr_train_1017_image_frame_82.png", "mr_train_1001_image_frame_67.png", "mr_train_1014_image_frame_116.png", "mr_train_1006_image_frame_22.png", "mr_train_1016_image_frame_79.png", "mr_train_1011_image_frame_21.png", "mr_train_1011_image_frame_41.png", "mr_train_1016_image_frame_3.png", "mr_train_1001_image_frame_104.png", "mr_train_1018_image_frame_0.png", "mr_train_1017_image_frame_117.png", "mr_train_1008_image_frame_61.png", "mr_train_1006_image_frame_119.png", "mr_train_1017_image_frame_0.png", "mr_train_1008_image_frame_46.png", "mr_train_1011_image_frame_51.png", "mr_train_1001_image_frame_33.png", "mr_train_1015_image_frame_165.png", "mr_train_1017_image_frame_129.png", "mr_train_1017_image_frame_32.png", "mr_train_1011_image_frame_26.png", "mr_train_1020_image_frame_38.png", "mr_train_1003_image_frame_34.png", "mr_train_1006_image_frame_56.png", "mr_train_1005_image_frame_24.png", "mr_train_1006_image_frame_2.png", "mr_train_1015_image_frame_176.png", "mr_train_1018_image_frame_52.png", "mr_train_1020_image_frame_60.png", "mr_train_1006_image_frame_1.png", "mr_train_1017_image_frame_99.png", "mr_train_1009_image_frame_103.png", "mr_train_1001_image_frame_5.png", "mr_train_1001_image_frame_37.png", "mr_train_1014_image_frame_137.png", "mr_train_1011_image_frame_2.png", "mr_train_1020_image_frame_6.png", "mr_train_1003_image_frame_154.png", "mr_train_1001_image_frame_69.png", "mr_train_1003_image_frame_55.png", "mr_train_1014_image_frame_20.png", "mr_train_1015_image_frame_187.png", "mr_train_1018_image_frame_117.png", "mr_train_1006_image_frame_135.png", "mr_train_1003_image_frame_27.png", "mr_train_1008_image_frame_69.png", "mr_train_1009_image_frame_21.png", "mr_train_1014_image_frame_38.png", "mr_train_1017_image_frame_59.png", "mr_train_1020_image_frame_58.png", "mr_train_1001_image_frame_126.png", "mr_train_1015_image_frame_54.png", "mr_train_1008_image_frame_84.png", "mr_train_1020_image_frame_59.png", "mr_train_1017_image_frame_133.png", "mr_train_1015_image_frame_160.png", "mr_train_1016_image_frame_44.png", "mr_train_1001_image_frame_65.png", "mr_train_1009_image_frame_59.png", "mr_train_1014_image_frame_55.png", "mr_train_1001_image_frame_70.png", "mr_train_1014_image_frame_26.png", "mr_train_1005_image_frame_50.png", "mr_train_1020_image_frame_11.png", "mr_train_1003_image_frame_6.png", "mr_train_1001_image_frame_132.png", "mr_train_1018_image_frame_9.png", "mr_train_1006_image_frame_126.png", "mr_train_1006_image_frame_77.png", "mr_train_1001_image_frame_16.png", "mr_train_1017_image_frame_20.png", "mr_train_1006_image_frame_39.png", "mr_train_1014_image_frame_1.png", "mr_train_1006_image_frame_8.png", "mr_train_1017_image_frame_33.png", "mr_train_1015_image_frame_27.png", "mr_train_1001_image_frame_85.png", "mr_train_1005_image_frame_58.png", "mr_train_1018_image_frame_4.png", "mr_train_1001_image_frame_8.png", "mr_train_1003_image_frame_46.png", "mr_train_1016_image_frame_12.png", "mr_train_1014_image_frame_10.png", "mr_train_1005_image_frame_56.png", "mr_train_1003_image_frame_49.png", "mr_train_1015_image_frame_11.png", "mr_train_1006_image_frame_12.png", "mr_train_1001_image_frame_11.png", "mr_train_1011_image_frame_107.png", "mr_train_1018_image_frame_59.png", "mr_train_1011_image_frame_22.png", "mr_train_1005_image_frame_45.png", "mr_train_1014_image_frame_155.png", "mr_train_1008_image_frame_37.png", "mr_train_1008_image_frame_49.png", "mr_train_1016_image_frame_124.png", "mr_train_1008_image_frame_96.png", "mr_train_1017_image_frame_91.png", "mr_train_1020_image_frame_103.png", "mr_train_1016_image_frame_36.png", "mr_train_1015_image_frame_104.png", "mr_train_1009_image_frame_119.png", "mr_train_1006_image_frame_78.png", "mr_train_1008_image_frame_126.png", "mr_train_1020_image_frame_52.png", "mr_train_1015_image_frame_23.png", "mr_train_1011_image_frame_63.png", "mr_train_1017_image_frame_101.png", "mr_train_1009_image_frame_14.png", "mr_train_1009_image_frame_94.png", "mr_train_1015_image_frame_26.png", "mr_train_1003_image_frame_48.png", "mr_train_1016_image_frame_104.png", "mr_train_1018_image_frame_26.png", "mr_train_1011_image_frame_108.png", "mr_train_1006_image_frame_4.png", "mr_train_1020_image_frame_101.png", "mr_train_1014_image_frame_92.png", "mr_train_1014_image_frame_76.png", "mr_train_1015_image_frame_41.png", "mr_train_1001_image_frame_123.png", "mr_train_1001_image_frame_155.png", "mr_train_1014_image_frame_54.png", "mr_train_1015_image_frame_68.png", "mr_train_1014_image_frame_117.png", "mr_train_1003_image_frame_81.png", "mr_train_1020_image_frame_10.png", "mr_train_1011_image_frame_79.png", "mr_train_1014_image_frame_159.png", "mr_train_1001_image_frame_22.png", "mr_train_1001_image_frame_119.png", "mr_train_1018_image_frame_51.png", "mr_train_1017_image_frame_24.png", "mr_train_1009_image_frame_16.png", "mr_train_1018_image_frame_130.png", "mr_train_1020_image_frame_5.png", "mr_train_1014_image_frame_133.png", "mr_train_1005_image_frame_13.png", "mr_train_1018_image_frame_25.png", "mr_train_1015_image_frame_42.png", "mr_train_1006_image_frame_104.png", "mr_train_1011_image_frame_83.png", "mr_train_1014_image_frame_153.png", "mr_train_1014_image_frame_101.png", "mr_train_1017_image_frame_123.png", "mr_train_1020_image_frame_22.png", "mr_train_1016_image_frame_59.png", "mr_train_1015_image_frame_126.png", "mr_train_1018_image_frame_31.png", "mr_train_1003_image_frame_142.png", "mr_train_1018_image_frame_65.png", "mr_train_1016_image_frame_29.png", "mr_train_1005_image_frame_97.png", "mr_train_1006_image_frame_18.png", "mr_train_1011_image_frame_5.png", "mr_train_1014_image_frame_6.png", "mr_train_1009_image_frame_92.png", "mr_train_1011_image_frame_16.png", "mr_train_1006_image_frame_0.png", "mr_train_1005_image_frame_80.png", "mr_train_1011_image_frame_6.png", "mr_train_1016_image_frame_115.png", "mr_train_1014_image_frame_73.png", "mr_train_1005_image_frame_123.png", "mr_train_1009_image_frame_27.png", "mr_train_1011_image_frame_121.png", "mr_train_1008_image_frame_56.png", "mr_train_1017_image_frame_9.png", "mr_train_1005_image_frame_75.png", "mr_train_1005_image_frame_38.png", "mr_train_1016_image_frame_17.png", "mr_train_1020_image_frame_14.png", "mr_train_1017_image_frame_51.png", "mr_train_1018_image_frame_46.png", "mr_train_1009_image_frame_48.png", "mr_train_1008_image_frame_65.png", "mr_train_1015_image_frame_100.png", "mr_train_1014_image_frame_13.png", "mr_train_1005_image_frame_128.png", "mr_train_1011_image_frame_13.png", "mr_train_1014_image_frame_151.png", "mr_train_1001_image_frame_89.png", "mr_train_1005_image_frame_82.png", "mr_train_1008_image_frame_80.png", "mr_train_1014_image_frame_41.png", "mr_train_1006_image_frame_85.png", "mr_train_1016_image_frame_37.png", "mr_train_1001_image_frame_20.png", "mr_train_1015_image_frame_39.png", "mr_train_1001_image_frame_121.png", "mr_train_1011_image_frame_84.png", "mr_train_1011_image_frame_94.png", "mr_train_1006_image_frame_94.png", "mr_train_1009_image_frame_45.png", "mr_train_1020_image_frame_134.png", "mr_train_1003_image_frame_1.png", "mr_train_1005_image_frame_79.png", "mr_train_1008_image_frame_105.png", "mr_train_1003_image_frame_4.png", "mr_train_1017_image_frame_118.png", "mr_train_1003_image_frame_152.png", "mr_train_1014_image_frame_4.png", "mr_train_1014_image_frame_130.png", "mr_train_1003_image_frame_62.png", "mr_train_1014_image_frame_48.png", "mr_train_1009_image_frame_115.png", "mr_train_1006_image_frame_153.png", "mr_train_1020_image_frame_130.png", "mr_train_1008_image_frame_85.png", "mr_train_1003_image_frame_85.png", "mr_train_1006_image_frame_145.png", "mr_train_1003_image_frame_30.png", "mr_train_1018_image_frame_136.png", "mr_train_1005_image_frame_19.png", "mr_train_1003_image_frame_98.png", "mr_train_1008_image_frame_70.png", "mr_train_1017_image_frame_132.png", "mr_train_1003_image_frame_7.png", "mr_train_1015_image_frame_55.png", "mr_train_1003_image_frame_155.png", "mr_train_1018_image_frame_34.png", "mr_train_1014_image_frame_49.png", "mr_train_1018_image_frame_10.png", "mr_train_1014_image_frame_72.png", "mr_train_1017_image_frame_38.png", "mr_train_1001_image_frame_32.png", "mr_train_1018_image_frame_120.png", "mr_train_1005_image_frame_65.png", "mr_train_1016_image_frame_97.png", "mr_train_1018_image_frame_57.png", "mr_train_1020_image_frame_63.png", "mr_train_1011_image_frame_62.png", "mr_train_1017_image_frame_136.png", "mr_train_1008_image_frame_36.png", "mr_train_1006_image_frame_36.png", "mr_train_1015_image_frame_40.png", "mr_train_1005_image_frame_129.png", "mr_train_1015_image_frame_174.png", "mr_train_1018_image_frame_82.png", "mr_train_1008_image_frame_74.png", "mr_train_1006_image_frame_74.png", "mr_train_1011_image_frame_27.png", "mr_train_1003_image_frame_29.png", "mr_train_1005_image_frame_77.png", "mr_train_1011_image_frame_87.png", "mr_train_1009_image_frame_75.png", "mr_train_1005_image_frame_33.png", "mr_train_1020_image_frame_17.png", "mr_train_1020_image_frame_54.png", "mr_train_1018_image_frame_36.png", "mr_train_1008_image_frame_114.png", "mr_train_1016_image_frame_15.png", "mr_train_1014_image_frame_53.png", "mr_train_1011_image_frame_154.png", "mr_train_1001_image_frame_147.png", "mr_train_1016_image_frame_18.png", "mr_train_1009_image_frame_58.png", "mr_train_1018_image_frame_22.png", "mr_train_1008_image_frame_90.png", "mr_train_1014_image_frame_7.png", "mr_train_1015_image_frame_142.png", "mr_train_1015_image_frame_52.png", "mr_train_1016_image_frame_23.png", "mr_train_1003_image_frame_74.png", "mr_train_1011_image_frame_78.png", "mr_train_1006_image_frame_129.png", "mr_train_1009_image_frame_24.png", "mr_train_1017_image_frame_3.png", "mr_train_1016_image_frame_62.png", "mr_train_1016_image_frame_19.png", "mr_train_1020_image_frame_65.png", "mr_train_1017_image_frame_29.png", "mr_train_1018_image_frame_8.png", "mr_train_1003_image_frame_83.png", "mr_train_1018_image_frame_81.png", "mr_train_1003_image_frame_86.png", "mr_train_1016_image_frame_77.png", "mr_train_1016_image_frame_80.png", "mr_train_1014_image_frame_152.png", "mr_train_1008_image_frame_118.png", "mr_train_1020_image_frame_121.png", "mr_train_1005_image_frame_107.png", "mr_train_1008_image_frame_88.png", "mr_train_1003_image_frame_64.png", "mr_train_1016_image_frame_48.png", "mr_train_1001_image_frame_60.png", "mr_train_1003_image_frame_73.png", "mr_train_1011_image_frame_54.png", "mr_train_1016_image_frame_67.png", "mr_train_1008_image_frame_121.png", "mr_train_1006_image_frame_28.png", "mr_train_1006_image_frame_102.png", "mr_train_1011_image_frame_66.png", "mr_train_1015_image_frame_78.png", "mr_train_1014_image_frame_114.png", "mr_train_1003_image_frame_126.png", "mr_train_1014_image_frame_31.png", "mr_train_1015_image_frame_16.png", "mr_train_1011_image_frame_57.png", "mr_train_1009_image_frame_19.png", "mr_train_1016_image_frame_46.png", "mr_train_1001_image_frame_82.png", "mr_train_1017_image_frame_134.png", "mr_train_1003_image_frame_37.png", "mr_train_1017_image_frame_42.png", "mr_train_1003_image_frame_106.png", "mr_train_1020_image_frame_111.png", "mr_train_1006_image_frame_116.png", "mr_train_1005_image_frame_20.png", "mr_train_1020_image_frame_73.png", "mr_train_1014_image_frame_37.png", "mr_train_1014_image_frame_62.png", "mr_train_1017_image_frame_50.png", "mr_train_1016_image_frame_78.png", "mr_train_1009_image_frame_30.png", "mr_train_1001_image_frame_145.png", "mr_train_1020_image_frame_67.png", "mr_train_1001_image_frame_14.png", "mr_train_1006_image_frame_88.png", "mr_train_1014_image_frame_44.png", "mr_train_1017_image_frame_71.png", "mr_train_1015_image_frame_46.png", "mr_train_1014_image_frame_148.png", "mr_train_1005_image_frame_11.png", "mr_train_1008_image_frame_22.png", "mr_train_1011_image_frame_10.png", "mr_train_1015_image_frame_133.png", "mr_train_1014_image_frame_36.png", "mr_train_1018_image_frame_109.png", "mr_train_1006_image_frame_31.png", "mr_train_1018_image_frame_53.png", "mr_train_1008_image_frame_7.png", "mr_train_1017_image_frame_74.png", "mr_train_1006_image_frame_71.png", "mr_train_1018_image_frame_101.png", "mr_train_1017_image_frame_13.png", "mr_train_1016_image_frame_4.png", "mr_train_1005_image_frame_110.png", "mr_train_1020_image_frame_68.png", "mr_train_1003_image_frame_61.png", "mr_train_1016_image_frame_123.png", "mr_train_1003_image_frame_121.png", "mr_train_1009_image_frame_63.png", "mr_train_1003_image_frame_91.png", "mr_train_1014_image_frame_104.png", "mr_train_1015_image_frame_105.png", "mr_train_1014_image_frame_100.png", "mr_train_1014_image_frame_35.png", "mr_train_1005_image_frame_76.png", "mr_train_1003_image_frame_52.png", "mr_train_1014_image_frame_144.png", "mr_train_1006_image_frame_70.png", "mr_train_1006_image_frame_84.png", "mr_train_1017_image_frame_25.png", "mr_train_1014_image_frame_134.png", "mr_train_1008_image_frame_98.png", "mr_train_1009_image_frame_25.png", "mr_train_1006_image_frame_68.png", "mr_train_1008_image_frame_64.png", "mr_train_1006_image_frame_49.png", "mr_train_1018_image_frame_133.png", "mr_train_1016_image_frame_126.png", "mr_train_1015_image_frame_74.png", "mr_train_1011_image_frame_147.png", "mr_train_1006_image_frame_148.png", "mr_train_1018_image_frame_2.png", "mr_train_1001_image_frame_150.png", "mr_train_1008_image_frame_124.png", "mr_train_1016_image_frame_7.png", "mr_train_1001_image_frame_108.png", "mr_train_1018_image_frame_110.png", "mr_train_1011_image_frame_88.png", "mr_train_1001_image_frame_41.png", "mr_train_1008_image_frame_78.png", "mr_train_1011_image_frame_138.png", "mr_train_1014_image_frame_58.png", "mr_train_1020_image_frame_47.png", "mr_train_1006_image_frame_122.png", "mr_train_1018_image_frame_104.png", "mr_train_1001_image_frame_107.png", "mr_train_1005_image_frame_57.png", "mr_train_1018_image_frame_140.png", "mr_train_1011_image_frame_102.png", "mr_train_1011_image_frame_9.png", "mr_train_1008_image_frame_11.png", "mr_train_1018_image_frame_39.png", "mr_train_1016_image_frame_99.png", "mr_train_1008_image_frame_106.png", "mr_train_1011_image_frame_48.png", "mr_train_1016_image_frame_82.png", "mr_train_1006_image_frame_51.png", "mr_train_1016_image_frame_85.png", "mr_train_1011_image_frame_143.png", "mr_train_1006_image_frame_146.png", "mr_train_1001_image_frame_30.png", "mr_train_1009_image_frame_11.png", "mr_train_1001_image_frame_128.png", "mr_train_1009_image_frame_44.png", "mr_train_1015_image_frame_179.png", "mr_train_1006_image_frame_134.png", "mr_train_1009_image_frame_35.png", "mr_train_1001_image_frame_3.png", "mr_train_1017_image_frame_56.png", "mr_train_1001_image_frame_102.png", "mr_train_1016_image_frame_110.png", "mr_train_1008_image_frame_117.png", "mr_train_1017_image_frame_6.png", "mr_train_1014_image_frame_119.png", "mr_train_1003_image_frame_92.png", "mr_train_1001_image_frame_59.png", "mr_train_1020_image_frame_33.png", "mr_train_1005_image_frame_86.png", "mr_train_1015_image_frame_82.png", "mr_train_1011_image_frame_149.png", "mr_train_1011_image_frame_37.png", "mr_train_1018_image_frame_21.png", "mr_train_1003_image_frame_115.png", "mr_train_1008_image_frame_20.png", "mr_train_1020_image_frame_126.png", "mr_train_1017_image_frame_26.png", "mr_train_1008_image_frame_8.png", "mr_train_1001_image_frame_19.png", "mr_train_1014_image_frame_27.png", "mr_train_1005_image_frame_32.png", "mr_train_1009_image_frame_77.png", "mr_train_1008_image_frame_107.png", "mr_train_1018_image_frame_62.png", "mr_train_1014_image_frame_88.png", "mr_train_1003_image_frame_87.png", "mr_train_1015_image_frame_103.png", "mr_train_1017_image_frame_61.png", "mr_train_1015_image_frame_131.png", "mr_train_1020_image_frame_28.png", "mr_train_1017_image_frame_76.png", "mr_train_1001_image_frame_79.png", "mr_train_1016_image_frame_105.png", "mr_train_1014_image_frame_39.png", "mr_train_1001_image_frame_156.png", "mr_train_1001_image_frame_23.png", "mr_train_1016_image_frame_60.png", "mr_train_1017_image_frame_2.png", "mr_train_1009_image_frame_79.png", "mr_train_1015_image_frame_92.png", "mr_train_1009_image_frame_112.png", "mr_train_1020_image_frame_76.png", "mr_train_1014_image_frame_128.png", "mr_train_1006_image_frame_142.png", "mr_train_1016_image_frame_106.png", "mr_train_1001_image_frame_146.png", "mr_train_1018_image_frame_70.png", "mr_train_1003_image_frame_78.png", "mr_train_1006_image_frame_100.png", "mr_train_1018_image_frame_129.png", "mr_train_1011_image_frame_98.png", "mr_train_1015_image_frame_12.png", "mr_train_1017_image_frame_41.png", "mr_train_1017_image_frame_45.png", "mr_train_1009_image_frame_76.png", "mr_train_1018_image_frame_122.png", "mr_train_1017_image_frame_93.png", "mr_train_1015_image_frame_147.png", "mr_train_1020_image_frame_57.png", "mr_train_1009_image_frame_15.png", "mr_train_1003_image_frame_32.png", "mr_train_1020_image_frame_49.png", "mr_train_1017_image_frame_100.png", "mr_train_1017_image_frame_66.png", "mr_train_1001_image_frame_6.png", "mr_train_1005_image_frame_48.png", "mr_train_1003_image_frame_113.png", "mr_train_1017_image_frame_47.png", "mr_train_1003_image_frame_122.png", "mr_train_1018_image_frame_108.png", "mr_train_1017_image_frame_130.png", "mr_train_1006_image_frame_131.png", "mr_train_1020_image_frame_128.png", "mr_train_1011_image_frame_153.png", "mr_train_1001_image_frame_25.png", "mr_train_1006_image_frame_150.png", "mr_train_1014_image_frame_109.png", "mr_train_1018_image_frame_50.png", "mr_train_1005_image_frame_96.png", "mr_train_1008_image_frame_57.png", "mr_train_1011_image_frame_112.png", "mr_train_1005_image_frame_12.png", "mr_train_1011_image_frame_91.png", "mr_train_1017_image_frame_19.png", "mr_train_1018_image_frame_38.png", "mr_train_1001_image_frame_140.png", "mr_train_1006_image_frame_72.png", "mr_train_1009_image_frame_10.png", "mr_train_1016_image_frame_92.png", "mr_train_1011_image_frame_56.png", "mr_train_1015_image_frame_134.png", "mr_train_1017_image_frame_105.png", "mr_train_1014_image_frame_85.png", "mr_train_1005_image_frame_62.png", "mr_train_1008_image_frame_9.png", "mr_train_1005_image_frame_5.png", "mr_train_1003_image_frame_75.png", "mr_train_1020_image_frame_30.png", "mr_train_1015_image_frame_87.png", "mr_train_1008_image_frame_59.png", "mr_train_1014_image_frame_19.png", "mr_train_1015_image_frame_69.png", "mr_train_1016_image_frame_54.png", "mr_train_1015_image_frame_117.png", "mr_train_1003_image_frame_147.png", "mr_train_1015_image_frame_83.png", "mr_train_1005_image_frame_63.png", "mr_train_1017_image_frame_85.png", "mr_train_1008_image_frame_116.png", "mr_train_1017_image_frame_124.png", "mr_train_1006_image_frame_83.png", "mr_train_1016_image_frame_120.png", "mr_train_1009_image_frame_17.png", "mr_train_1020_image_frame_127.png", "mr_train_1011_image_frame_146.png", "mr_train_1003_image_frame_16.png", "mr_train_1003_image_frame_130.png", "mr_train_1001_image_frame_157.png", "mr_train_1003_image_frame_123.png", "mr_train_1018_image_frame_131.png", "mr_train_1020_image_frame_81.png", "mr_train_1008_image_frame_48.png", "mr_train_1018_image_frame_118.png", "mr_train_1020_image_frame_42.png", "mr_train_1017_image_frame_112.png", "mr_train_1017_image_frame_131.png", "mr_train_1018_image_frame_18.png", "mr_train_1014_image_frame_156.png", "mr_train_1020_image_frame_23.png", "mr_train_1020_image_frame_18.png", "mr_train_1015_image_frame_185.png", "mr_train_1011_image_frame_52.png", "mr_train_1008_image_frame_75.png", "mr_train_1015_image_frame_91.png", "mr_train_1005_image_frame_115.png", "mr_train_1014_image_frame_139.png", "mr_train_1009_image_frame_57.png", "mr_train_1009_image_frame_9.png", "mr_train_1003_image_frame_35.png", "mr_train_1011_image_frame_67.png", "mr_train_1014_image_frame_89.png", "mr_train_1006_image_frame_136.png", "mr_train_1016_image_frame_119.png", "mr_train_1018_image_frame_106.png", "mr_train_1020_image_frame_113.png", "mr_train_1008_image_frame_110.png", "mr_train_1006_image_frame_42.png", "mr_train_1015_image_frame_150.png", "mr_train_1001_image_frame_68.png", "mr_train_1006_image_frame_144.png", "mr_train_1009_image_frame_104.png", "mr_train_1014_image_frame_50.png", "mr_train_1015_image_frame_76.png", "mr_train_1020_image_frame_98.png", "mr_train_1011_image_frame_46.png", "mr_train_1001_image_frame_144.png", "mr_train_1016_image_frame_28.png", "mr_train_1020_image_frame_112.png", "mr_train_1009_image_frame_40.png", "mr_train_1008_image_frame_25.png", "mr_train_1011_image_frame_44.png", "mr_train_1009_image_frame_31.png", "mr_train_1015_image_frame_163.png", "mr_train_1011_image_frame_132.png", "mr_train_1020_image_frame_89.png", "mr_train_1011_image_frame_11.png", "mr_train_1020_image_frame_61.png", "mr_train_1011_image_frame_156.png", "mr_train_1015_image_frame_118.png", "mr_train_1011_image_frame_0.png", "mr_train_1020_image_frame_85.png", "mr_train_1009_image_frame_80.png", "mr_train_1018_image_frame_141.png", "mr_train_1020_image_frame_45.png", "mr_train_1005_image_frame_113.png", "mr_train_1011_image_frame_42.png", "mr_train_1001_image_frame_7.png", "mr_train_1008_image_frame_125.png", "mr_train_1009_image_frame_65.png", "mr_train_1001_image_frame_151.png", "mr_train_1008_image_frame_120.png", "mr_train_1011_image_frame_85.png", "mr_train_1014_image_frame_16.png", "mr_train_1020_image_frame_56.png", "mr_train_1008_image_frame_102.png", "mr_train_1009_image_frame_67.png", "mr_train_1016_image_frame_26.png", "mr_train_1020_image_frame_132.png", "mr_train_1016_image_frame_31.png", "mr_train_1006_image_frame_48.png", "mr_train_1018_image_frame_84.png", "mr_train_1006_image_frame_17.png", "mr_train_1020_image_frame_1.png", "mr_train_1005_image_frame_91.png", "mr_train_1011_image_frame_28.png", "mr_train_1017_image_frame_96.png", "mr_train_1009_image_frame_98.png", "mr_train_1005_image_frame_68.png", "mr_train_1014_image_frame_68.png", "mr_train_1001_image_frame_127.png", "mr_train_1015_image_frame_113.png", "mr_train_1015_image_frame_101.png", "mr_train_1015_image_frame_164.png", "mr_train_1015_image_frame_141.png", "mr_train_1009_image_frame_53.png", "mr_train_1009_image_frame_13.png", "mr_train_1015_image_frame_99.png", "mr_train_1011_image_frame_95.png", "mr_train_1003_image_frame_69.png", "mr_train_1005_image_frame_66.png", "mr_train_1018_image_frame_75.png", "mr_train_1017_image_frame_90.png", "mr_train_1015_image_frame_67.png", "mr_train_1016_image_frame_118.png", "mr_train_1011_image_frame_4.png", "mr_train_1009_image_frame_29.png", "mr_train_1005_image_frame_43.png", "mr_train_1006_image_frame_5.png", "mr_train_1008_image_frame_44.png", "mr_train_1001_image_frame_76.png", "mr_train_1020_image_frame_66.png", "mr_train_1016_image_frame_101.png", "mr_train_1011_image_frame_131.png", "mr_train_1008_image_frame_38.png", "mr_train_1003_image_frame_26.png", "mr_train_1005_image_frame_69.png", "mr_train_1003_image_frame_136.png", "mr_train_1014_image_frame_154.png", "mr_train_1018_image_frame_15.png", "mr_train_1017_image_frame_120.png", "mr_train_1016_image_frame_45.png", "mr_train_1014_image_frame_110.png", "mr_train_1016_image_frame_16.png", "mr_train_1017_image_frame_94.png", "mr_train_1008_image_frame_19.png", "mr_train_1001_image_frame_75.png", "mr_train_1015_image_frame_65.png", "mr_train_1020_image_frame_62.png", "mr_train_1009_image_frame_66.png", "mr_train_1016_image_frame_30.png", "mr_train_1006_image_frame_26.png", "mr_train_1018_image_frame_89.png", "mr_train_1014_image_frame_135.png", "mr_train_1017_image_frame_4.png", "mr_train_1015_image_frame_190.png", "mr_train_1018_image_frame_64.png", "mr_train_1001_image_frame_54.png", "mr_train_1015_image_frame_60.png", "mr_train_1003_image_frame_57.png", "mr_train_1014_image_frame_149.png", "mr_train_1005_image_frame_71.png", "mr_train_1011_image_frame_60.png", "mr_train_1018_image_frame_3.png", "mr_train_1003_image_frame_120.png", "mr_train_1011_image_frame_140.png", "mr_train_1003_image_frame_54.png", "mr_train_1016_image_frame_40.png", "mr_train_1001_image_frame_63.png", "mr_train_1016_image_frame_83.png", "mr_train_1014_image_frame_94.png", "mr_train_1016_image_frame_47.png", "mr_train_1005_image_frame_116.png", "mr_train_1016_image_frame_32.png", "mr_train_1011_image_frame_45.png", "mr_train_1001_image_frame_80.png", "mr_train_1001_image_frame_131.png", "mr_train_1001_image_frame_129.png", "mr_train_1001_image_frame_43.png", "mr_train_1006_image_frame_73.png", "mr_train_1015_image_frame_7.png", "mr_train_1020_image_frame_87.png", "mr_train_1017_image_frame_104.png", "mr_train_1014_image_frame_121.png", "mr_train_1009_image_frame_46.png", "mr_train_1011_image_frame_59.png", "mr_train_1017_image_frame_79.png", "mr_train_1005_image_frame_16.png", "mr_train_1015_image_frame_45.png", "mr_train_1003_image_frame_23.png", "mr_train_1015_image_frame_25.png", "mr_train_1015_image_frame_66.png", "mr_train_1020_image_frame_20.png", "mr_train_1009_image_frame_102.png", "mr_train_1006_image_frame_10.png", "mr_train_1015_image_frame_112.png", "mr_train_1017_image_frame_46.png", "mr_train_1015_image_frame_127.png", "mr_train_1009_image_frame_83.png", "mr_train_1001_image_frame_116.png", "mr_train_1008_image_frame_39.png", "mr_train_1003_image_frame_33.png", "mr_train_1020_image_frame_31.png", "mr_train_1011_image_frame_36.png", "mr_train_1017_image_frame_81.png", "mr_train_1016_image_frame_117.png", "mr_train_1001_image_frame_91.png", "mr_train_1011_image_frame_70.png", "mr_train_1018_image_frame_92.png", "mr_train_1009_image_frame_54.png", "mr_train_1009_image_frame_84.png", "mr_train_1008_image_frame_0.png", "mr_train_1011_image_frame_33.png", "mr_train_1005_image_frame_18.png", "mr_train_1005_image_frame_14.png", "mr_train_1015_image_frame_166.png", "mr_train_1001_image_frame_57.png", "mr_train_1015_image_frame_167.png", "mr_train_1011_image_frame_148.png", "mr_train_1016_image_frame_72.png", "mr_train_1005_image_frame_87.png", "mr_train_1016_image_frame_50.png", "mr_train_1008_image_frame_55.png", "mr_train_1014_image_frame_86.png", "mr_train_1009_image_frame_86.png", "mr_train_1014_image_frame_71.png", "mr_train_1017_image_frame_35.png", "mr_train_1009_image_frame_41.png", "mr_train_1020_image_frame_78.png", "mr_train_1011_image_frame_53.png", "mr_train_1005_image_frame_118.png", "mr_train_1015_image_frame_102.png", "mr_train_1015_image_frame_53.png", "mr_train_1009_image_frame_23.png", "mr_train_1011_image_frame_93.png", "mr_train_1020_image_frame_40.png", "mr_train_1009_image_frame_111.png", "mr_train_1006_image_frame_19.png", "mr_train_1005_image_frame_21.png", "mr_train_1011_image_frame_38.png", "mr_train_1014_image_frame_21.png", "mr_train_1011_image_frame_158.png", "mr_train_1015_image_frame_178.png", "mr_train_1005_image_frame_29.png", "mr_train_1016_image_frame_76.png", "mr_train_1015_image_frame_47.png", "mr_train_1001_image_frame_90.png", "mr_train_1003_image_frame_31.png", "mr_train_1018_image_frame_32.png", "mr_train_1017_image_frame_121.png", "mr_train_1006_image_frame_128.png", "mr_train_1018_image_frame_125.png", "mr_train_1003_image_frame_84.png", "mr_train_1001_image_frame_27.png", "mr_train_1015_image_frame_24.png", "mr_train_1011_image_frame_14.png", "mr_train_1014_image_frame_103.png", "mr_train_1005_image_frame_108.png", "mr_train_1011_image_frame_104.png", "mr_train_1011_image_frame_115.png", "mr_train_1015_image_frame_50.png", "mr_train_1011_image_frame_69.png", "mr_train_1008_image_frame_122.png", "mr_train_1018_image_frame_87.png", "mr_train_1020_image_frame_53.png", "mr_train_1006_image_frame_158.png", "mr_train_1001_image_frame_152.png", "mr_train_1008_image_frame_5.png", "mr_train_1009_image_frame_32.png", "mr_train_1011_image_frame_110.png", "mr_train_1016_image_frame_68.png", "mr_train_1014_image_frame_28.png", "mr_train_1003_image_frame_139.png", "mr_train_1003_image_frame_119.png", "mr_train_1020_image_frame_15.png", "mr_train_1003_image_frame_70.png", "mr_train_1020_image_frame_64.png", "mr_train_1011_image_frame_80.png", "mr_train_1020_image_frame_90.png", "mr_train_1015_image_frame_129.png", "mr_train_1016_image_frame_107.png", "mr_train_1016_image_frame_129.png", "mr_train_1017_image_frame_72.png", "mr_train_1017_image_frame_113.png", "mr_train_1014_image_frame_18.png", "mr_train_1008_image_frame_109.png", "mr_train_1017_image_frame_114.png", "mr_train_1016_image_frame_0.png", "mr_train_1014_image_frame_61.png", "mr_train_1020_image_frame_39.png", "mr_train_1001_image_frame_130.png", "mr_train_1011_image_frame_124.png", "mr_train_1018_image_frame_111.png", "mr_train_1015_image_frame_152.png", "mr_train_1020_image_frame_133.png", "mr_train_1008_image_frame_66.png", "mr_train_1011_image_frame_135.png", "mr_train_1005_image_frame_31.png", "mr_train_1005_image_frame_78.png", "mr_train_1009_image_frame_50.png", "mr_train_1011_image_frame_139.png", "mr_train_1011_image_frame_3.png", "mr_train_1001_image_frame_72.png", "mr_train_1020_image_frame_80.png", "mr_train_1003_image_frame_117.png", "mr_train_1008_image_frame_28.png", "mr_train_1014_image_frame_17.png", "mr_train_1011_image_frame_89.png", "mr_train_1014_image_frame_29.png", "mr_train_1011_image_frame_74.png", "mr_train_1016_image_frame_53.png", "mr_train_1001_image_frame_111.png", "mr_train_1009_image_frame_0.png", "mr_train_1020_image_frame_122.png", "mr_train_1015_image_frame_36.png", "mr_train_1005_image_frame_8.png", "mr_train_1003_image_frame_128.png", "mr_train_1018_image_frame_30.png", "mr_train_1017_image_frame_14.png", "mr_train_1006_image_frame_103.png", "mr_train_1011_image_frame_39.png", "mr_train_1003_image_frame_43.png", "mr_train_1001_image_frame_35.png", "mr_train_1015_image_frame_84.png", "mr_train_1018_image_frame_116.png", "mr_train_1006_image_frame_25.png", "mr_train_1011_image_frame_65.png", "mr_train_1006_image_frame_63.png", "mr_train_1001_image_frame_100.png", "mr_train_1003_image_frame_20.png", "mr_train_1009_image_frame_110.png", "mr_train_1015_image_frame_77.png", "mr_train_1006_image_frame_137.png", "mr_train_1006_image_frame_107.png", "mr_train_1001_image_frame_94.png", "mr_train_1017_image_frame_60.png", "mr_train_1001_image_frame_71.png", "mr_train_1001_image_frame_115.png", "mr_train_1008_image_frame_27.png", "mr_train_1011_image_frame_92.png", "mr_train_1005_image_frame_4.png", "mr_train_1015_image_frame_148.png", "mr_train_1011_image_frame_122.png", "mr_train_1015_image_frame_183.png", "mr_train_1011_image_frame_81.png", "mr_train_1005_image_frame_22.png", "mr_train_1018_image_frame_148.png", "mr_train_1015_image_frame_186.png", "mr_train_1020_image_frame_70.png", "mr_train_1016_image_frame_98.png", "mr_train_1001_image_frame_29.png", "mr_train_1003_image_frame_102.png", "mr_train_1003_image_frame_114.png", "mr_train_1017_image_frame_102.png", "mr_train_1005_image_frame_25.png", "mr_train_1011_image_frame_136.png", "mr_train_1009_image_frame_47.png", "mr_train_1006_image_frame_14.png", "mr_train_1015_image_frame_181.png", "mr_train_1014_image_frame_14.png", "mr_train_1006_image_frame_6.png", "mr_train_1011_image_frame_23.png", "mr_train_1018_image_frame_97.png", "mr_train_1005_image_frame_106.png", "mr_train_1001_image_frame_154.png", "mr_train_1015_image_frame_197.png", "mr_train_1016_image_frame_57.png", "mr_train_1017_image_frame_39.png", "mr_train_1016_image_frame_52.png", "mr_train_1006_image_frame_138.png", "mr_train_1011_image_frame_103.png", "mr_train_1015_image_frame_175.png", "mr_train_1015_image_frame_37.png", "mr_train_1016_image_frame_66.png", "mr_train_1006_image_frame_60.png", "mr_train_1015_image_frame_194.png", "mr_train_1015_image_frame_51.png", "mr_train_1006_image_frame_86.png", "mr_train_1003_image_frame_112.png", "mr_train_1006_image_frame_81.png", "mr_train_1016_image_frame_90.png", "mr_train_1005_image_frame_125.png", "mr_train_1005_image_frame_52.png", "mr_train_1001_image_frame_15.png", "mr_train_1009_image_frame_78.png", "mr_train_1011_image_frame_19.png", "mr_train_1003_image_frame_9.png", "mr_train_1018_image_frame_23.png", "mr_train_1005_image_frame_120.png", "mr_train_1008_image_frame_18.png", "mr_train_1016_image_frame_127.png", "mr_train_1011_image_frame_144.png", "mr_train_1006_image_frame_43.png", "mr_train_1011_image_frame_61.png", "mr_train_1009_image_frame_88.png", "mr_train_1005_image_frame_126.png", "mr_train_1005_image_frame_85.png", "mr_train_1006_image_frame_35.png", "mr_train_1020_image_frame_97.png", "mr_train_1020_image_frame_120.png", "mr_train_1015_image_frame_136.png", "mr_train_1001_image_frame_114.png", "mr_train_1005_image_frame_0.png", "mr_train_1020_image_frame_131.png", "mr_train_1001_image_frame_0.png", "mr_train_1005_image_frame_90.png", "mr_train_1003_image_frame_38.png", "mr_train_1005_image_frame_54.png", "mr_train_1015_image_frame_168.png", "mr_train_1016_image_frame_109.png", "mr_train_1020_image_frame_91.png", "mr_train_1018_image_frame_134.png", "mr_train_1005_image_frame_40.png", "mr_train_1003_image_frame_132.png", "mr_train_1006_image_frame_106.png", "mr_train_1017_image_frame_48.png", "mr_train_1001_image_frame_98.png", "mr_train_1018_image_frame_114.png", "mr_train_1014_image_frame_120.png", "mr_train_1003_image_frame_13.png", "mr_train_1014_image_frame_146.png", "mr_train_1008_image_frame_91.png", "mr_train_1005_image_frame_94.png", "mr_train_1001_image_frame_153.png", "mr_train_1016_image_frame_34.png", "mr_train_1020_image_frame_19.png", "mr_train_1017_image_frame_63.png", "mr_train_1020_image_frame_46.png", "mr_train_1008_image_frame_87.png", "mr_train_1001_image_frame_101.png", "mr_train_1014_image_frame_145.png", "mr_train_1008_image_frame_77.png", "mr_train_1017_image_frame_127.png", "mr_train_1008_image_frame_108.png", "mr_train_1005_image_frame_100.png", "mr_train_1001_image_frame_124.png", "mr_train_1001_image_frame_133.png", "mr_train_1003_image_frame_72.png", "mr_train_1005_image_frame_89.png", "mr_train_1014_image_frame_143.png", "mr_train_1006_image_frame_154.png", "mr_train_1003_image_frame_148.png", "mr_train_1001_image_frame_53.png", "mr_train_1015_image_frame_125.png", "mr_train_1018_image_frame_43.png", "mr_train_1008_image_frame_68.png", "mr_train_1008_image_frame_73.png", "mr_train_1015_image_frame_111.png", "mr_train_1011_image_frame_152.png", "mr_train_1015_image_frame_58.png", "mr_train_1014_image_frame_147.png", "mr_train_1001_image_frame_66.png", "mr_train_1015_image_frame_56.png", "mr_train_1016_image_frame_111.png", "mr_train_1006_image_frame_55.png", "mr_train_1018_image_frame_135.png", "mr_train_1006_image_frame_32.png", "mr_train_1015_image_frame_192.png", "mr_train_1018_image_frame_58.png", "mr_train_1017_image_frame_68.png", "mr_train_1018_image_frame_142.png", "mr_train_1006_image_frame_53.png", "mr_train_1005_image_frame_81.png", "mr_train_1016_image_frame_113.png", "mr_train_1003_image_frame_41.png", "mr_train_1009_image_frame_12.png", "mr_train_1016_image_frame_8.png", "mr_train_1015_image_frame_149.png", "mr_train_1017_image_frame_57.png", "mr_train_1020_image_frame_35.png", "mr_train_1015_image_frame_95.png", "mr_train_1015_image_frame_22.png", "mr_train_1017_image_frame_58.png", "mr_train_1018_image_frame_88.png", "mr_train_1020_image_frame_125.png", "mr_train_1018_image_frame_35.png", "mr_train_1009_image_frame_69.png", "mr_train_1017_image_frame_40.png", "mr_train_1011_image_frame_97.png", "mr_train_1011_image_frame_96.png", "mr_train_1016_image_frame_27.png", "mr_train_1006_image_frame_95.png", "mr_train_1020_image_frame_37.png", "mr_train_1018_image_frame_124.png", "mr_train_1016_image_frame_87.png", "mr_train_1020_image_frame_36.png", "mr_train_1009_image_frame_89.png", "mr_train_1016_image_frame_21.png", "mr_train_1006_image_frame_75.png", "mr_train_1014_image_frame_115.png", "mr_train_1005_image_frame_23.png", "mr_train_1001_image_frame_112.png", "mr_train_1001_image_frame_49.png", "mr_train_1018_image_frame_63.png", "mr_train_1018_image_frame_119.png", "mr_train_1016_image_frame_5.png", "mr_train_1020_image_frame_116.png", "mr_train_1011_image_frame_99.png", "mr_train_1003_image_frame_24.png", "mr_train_1017_image_frame_84.png", "mr_train_1017_image_frame_55.png", "mr_train_1020_image_frame_110.png", "mr_train_1017_image_frame_54.png", "mr_train_1005_image_frame_64.png", "mr_train_1020_image_frame_50.png", "mr_train_1015_image_frame_189.png", "mr_train_1001_image_frame_34.png", "mr_train_1009_image_frame_109.png", "mr_train_1015_image_frame_180.png", "mr_train_1018_image_frame_90.png", "mr_train_1017_image_frame_21.png", "mr_train_1020_image_frame_13.png", "mr_train_1016_image_frame_22.png", "mr_train_1009_image_frame_38.png", "mr_train_1003_image_frame_157.png", "mr_train_1006_image_frame_47.png", "mr_train_1003_image_frame_100.png", "mr_train_1015_image_frame_14.png", "mr_train_1020_image_frame_69.png", "mr_train_1009_image_frame_61.png", "mr_train_1006_image_frame_27.png", "mr_train_1009_image_frame_37.png", "mr_train_1003_image_frame_21.png", "mr_train_1009_image_frame_2.png", "mr_train_1006_image_frame_30.png", "mr_train_1011_image_frame_30.png", "mr_train_1006_image_frame_118.png", "mr_train_1008_image_frame_127.png", "mr_train_1018_image_frame_16.png", "mr_train_1018_image_frame_76.png", "mr_train_1017_image_frame_34.png", "mr_train_1008_image_frame_23.png", "mr_train_1016_image_frame_2.png", "mr_train_1018_image_frame_127.png", "mr_train_1015_image_frame_2.png", "mr_train_1018_image_frame_45.png", "mr_train_1006_image_frame_109.png", "mr_train_1005_image_frame_127.png", "mr_train_1020_image_frame_124.png", "mr_train_1018_image_frame_28.png", "mr_train_1020_image_frame_96.png", "mr_train_1011_image_frame_100.png", "mr_train_1011_image_frame_111.png", "mr_train_1003_image_frame_77.png", "mr_train_1005_image_frame_17.png", "mr_train_1015_image_frame_153.png", "mr_train_1003_image_frame_124.png", "mr_train_1005_image_frame_114.png", "mr_train_1003_image_frame_47.png", "mr_train_1020_image_frame_109.png", "mr_train_1015_image_frame_75.png", "mr_train_1006_image_frame_156.png", "mr_train_1001_image_frame_2.png", "mr_train_1020_image_frame_84.png", "mr_train_1001_image_frame_58.png", "mr_train_1016_image_frame_42.png", "mr_train_1014_image_frame_52.png", "mr_train_1014_image_frame_87.png", "mr_train_1014_image_frame_84.png", "mr_train_1020_image_frame_16.png", "mr_train_1011_image_frame_34.png", "mr_train_1001_image_frame_46.png", "mr_train_1003_image_frame_89.png", "mr_train_1018_image_frame_137.png", "mr_train_1018_image_frame_61.png", "mr_train_1006_image_frame_157.png", "mr_train_1018_image_frame_17.png", "mr_train_1005_image_frame_28.png", "mr_train_1014_image_frame_81.png", "mr_train_1009_image_frame_49.png", "mr_train_1011_image_frame_68.png", "mr_train_1009_image_frame_56.png", "mr_train_1008_image_frame_67.png", "mr_train_1016_image_frame_61.png", "mr_train_1006_image_frame_66.png", "mr_train_1014_image_frame_57.png", "mr_train_1018_image_frame_85.png", "mr_train_1003_image_frame_22.png", "mr_train_1011_image_frame_159.png", "mr_train_1016_image_frame_64.png", "mr_train_1015_image_frame_63.png", "mr_train_1009_image_frame_95.png", "mr_train_1003_image_frame_12.png", "mr_train_1003_image_frame_53.png", "mr_train_1015_image_frame_106.png", "mr_train_1001_image_frame_134.png", "mr_train_1017_image_frame_69.png", "mr_train_1015_image_frame_145.png", "mr_train_1016_image_frame_100.png", "mr_train_1006_image_frame_155.png", "mr_train_1011_image_frame_49.png", "mr_train_1014_image_frame_83.png", "mr_train_1009_image_frame_93.png", "mr_train_1020_image_frame_75.png", "mr_train_1020_image_frame_88.png", "mr_train_1005_image_frame_34.png", "mr_train_1006_image_frame_58.png", "mr_train_1011_image_frame_24.png", "mr_train_1015_image_frame_20.png", "mr_train_1014_image_frame_64.png", "mr_train_1014_image_frame_47.png", "mr_train_1006_image_frame_44.png", "mr_train_1018_image_frame_128.png", "mr_train_1003_image_frame_105.png", "mr_train_1017_image_frame_86.png", "mr_train_1015_image_frame_132.png", "mr_train_1020_image_frame_123.png", "mr_train_1001_image_frame_26.png", "mr_train_1011_image_frame_155.png", "mr_train_1009_image_frame_72.png", "mr_train_1015_image_frame_57.png", "mr_train_1006_image_frame_16.png", "mr_train_1005_image_frame_88.png", "mr_train_1003_image_frame_131.png", "mr_train_1018_image_frame_83.png", "mr_train_1017_image_frame_103.png", "mr_train_1018_image_frame_40.png", "mr_train_1020_image_frame_108.png", "mr_train_1006_image_frame_69.png", "mr_train_1001_image_frame_142.png", "mr_train_1008_image_frame_43.png", "mr_train_1020_image_frame_100.png", "mr_train_1018_image_frame_112.png", "mr_train_1018_image_frame_73.png", "mr_train_1008_image_frame_71.png", "mr_train_1018_image_frame_145.png", "mr_train_1016_image_frame_33.png", "mr_train_1011_image_frame_58.png", "mr_train_1014_image_frame_65.png", "mr_train_1009_image_frame_90.png", "mr_train_1017_image_frame_22.png", "mr_train_1001_image_frame_136.png", "mr_train_1016_image_frame_114.png", "mr_train_1009_image_frame_96.png", "mr_train_1018_image_frame_24.png", "mr_train_1015_image_frame_9.png", "mr_train_1003_image_frame_45.png", "mr_train_1017_image_frame_137.png", "mr_train_1015_image_frame_123.png", "mr_train_1009_image_frame_52.png", "mr_train_1006_image_frame_139.png", "mr_train_1014_image_frame_8.png", "mr_train_1003_image_frame_58.png", "mr_train_1009_image_frame_26.png", "mr_train_1006_image_frame_147.png", "mr_train_1014_image_frame_0.png", "mr_train_1020_image_frame_71.png", "mr_train_1005_image_frame_119.png", "mr_train_1018_image_frame_99.png", "mr_train_1008_image_frame_52.png", "mr_train_1006_image_frame_130.png", "mr_train_1017_image_frame_106.png", "mr_train_1018_image_frame_86.png", "mr_train_1005_image_frame_93.png", "mr_train_1015_image_frame_15.png", "mr_train_1009_image_frame_64.png", "mr_train_1005_image_frame_73.png", "mr_train_1015_image_frame_93.png", "mr_train_1014_image_frame_123.png", "mr_train_1011_image_frame_125.png", "mr_train_1005_image_frame_124.png", "mr_train_1017_image_frame_64.png", "mr_train_1017_image_frame_128.png", "mr_train_1015_image_frame_198.png", "mr_train_1003_image_frame_10.png", "mr_train_1006_image_frame_91.png", "mr_train_1001_image_frame_4.png", "mr_train_1001_image_frame_118.png", "mr_train_1003_image_frame_5.png", "mr_train_1009_image_frame_5.png", "mr_train_1018_image_frame_98.png", "mr_train_1020_image_frame_115.png", "mr_train_1005_image_frame_84.png", "mr_train_1009_image_frame_99.png", "mr_train_1018_image_frame_149.png", "mr_train_1008_image_frame_53.png", "mr_train_1014_image_frame_32.png", "mr_train_1020_image_frame_77.png", "mr_train_1011_image_frame_109.png", "mr_train_1014_image_frame_67.png", "mr_train_1015_image_frame_199.png", "mr_train_1003_image_frame_18.png", "mr_train_1008_image_frame_86.png", "mr_train_1014_image_frame_142.png", "mr_train_1015_image_frame_81.png", "mr_train_1011_image_frame_90.png", "mr_train_1003_image_frame_129.png", "mr_train_1001_image_frame_159.png", "mr_train_1015_image_frame_6.png", "mr_train_1003_image_frame_134.png", "mr_train_1014_image_frame_131.png", "mr_train_1018_image_frame_11.png", "mr_train_1015_image_frame_70.png", "mr_train_1014_image_frame_42.png", "mr_train_1003_image_frame_104.png", "mr_train_1014_image_frame_22.png", "mr_train_1001_image_frame_36.png", "mr_train_1016_image_frame_116.png", "mr_train_1014_image_frame_66.png", "mr_train_1014_image_frame_140.png", "mr_train_1018_image_frame_94.png", "mr_train_1015_image_frame_48.png", "mr_train_1006_image_frame_143.png", "mr_train_1005_image_frame_36.png", "mr_train_1006_image_frame_64.png", "mr_train_1015_image_frame_140.png", "mr_train_1003_image_frame_42.png", "mr_train_1015_image_frame_5.png", "mr_train_1003_image_frame_50.png", "mr_train_1014_image_frame_98.png", "mr_train_1017_image_frame_37.png", "mr_train_1003_image_frame_36.png", "mr_train_1015_image_frame_61.png", "mr_train_1009_image_frame_74.png", "mr_train_1006_image_frame_124.png", "mr_train_1009_image_frame_4.png", "mr_train_1008_image_frame_16.png", "mr_train_1003_image_frame_65.png", "mr_train_1001_image_frame_18.png", "mr_train_1008_image_frame_6.png", "mr_train_1015_image_frame_195.png", "mr_train_1020_image_frame_44.png", "mr_train_1014_image_frame_23.png", "mr_train_1006_image_frame_97.png", "mr_train_1015_image_frame_43.png", "mr_train_1011_image_frame_20.png", "mr_train_1017_image_frame_62.png", "mr_train_1014_image_frame_45.png", "mr_train_1018_image_frame_147.png", "mr_train_1003_image_frame_17.png", "mr_train_1016_image_frame_122.png", "mr_train_1003_image_frame_66.png", "mr_train_1008_image_frame_100.png", "mr_train_1003_image_frame_97.png", "mr_train_1020_image_frame_129.png", "mr_train_1014_image_frame_99.png", "mr_train_1001_image_frame_88.png", "mr_train_1015_image_frame_154.png", "mr_train_1005_image_frame_70.png", "mr_train_1005_image_frame_55.png", "mr_train_1003_image_frame_44.png", "mr_train_1011_image_frame_118.png", "mr_train_1005_image_frame_83.png", "mr_train_1008_image_frame_47.png", "mr_train_1017_image_frame_28.png", "mr_train_1008_image_frame_95.png", "mr_train_1015_image_frame_182.png", "mr_train_1015_image_frame_122.png", "mr_train_1001_image_frame_78.png", "mr_train_1003_image_frame_2.png", "mr_train_1001_image_frame_55.png", "mr_train_1017_image_frame_77.png", "mr_train_1008_image_frame_33.png", "mr_train_1018_image_frame_68.png", "mr_train_1020_image_frame_83.png", "mr_train_1006_image_frame_113.png", "mr_train_1001_image_frame_10.png", "mr_train_1003_image_frame_51.png", "mr_train_1003_image_frame_108.png", "mr_train_1008_image_frame_99.png", "mr_train_1009_image_frame_97.png", "mr_train_1014_image_frame_91.png", "mr_train_1006_image_frame_121.png", "mr_train_1003_image_frame_82.png", "mr_train_1011_image_frame_126.png", "mr_train_1011_image_frame_129.png", "mr_train_1017_image_frame_111.png", "mr_train_1014_image_frame_132.png", "mr_train_1014_image_frame_9.png", "mr_train_1015_image_frame_34.png", "mr_train_1003_image_frame_125.png", "mr_train_1017_image_frame_7.png", "mr_train_1014_image_frame_69.png", "mr_train_1001_image_frame_9.png", "mr_train_1008_image_frame_13.png", "mr_train_1015_image_frame_13.png", "mr_train_1020_image_frame_12.png", "mr_train_1008_image_frame_112.png", "mr_train_1005_image_frame_53.png", "mr_train_1017_image_frame_17.png", "mr_train_1015_image_frame_59.png", "mr_train_1006_image_frame_115.png", "mr_train_1015_image_frame_94.png", "mr_train_1014_image_frame_112.png", "mr_train_1014_image_frame_108.png", "mr_train_1006_image_frame_151.png", "mr_train_1006_image_frame_93.png", "mr_train_1003_image_frame_109.png", "mr_train_1015_image_frame_64.png", "mr_train_1008_image_frame_104.png", "mr_train_1014_image_frame_80.png", "mr_train_1015_image_frame_157.png", "mr_train_1001_image_frame_39.png", "mr_train_1011_image_frame_18.png", "mr_train_1001_image_frame_48.png", "mr_train_1017_image_frame_49.png", "mr_train_1016_image_frame_84.png", "mr_train_1018_image_frame_79.png", "mr_train_1015_image_frame_114.png", "mr_train_1016_image_frame_49.png", "mr_train_1017_image_frame_10.png", "mr_train_1003_image_frame_93.png", "mr_train_1001_image_frame_110.png", "mr_train_1018_image_frame_74.png", "mr_train_1015_image_frame_188.png", "mr_train_1016_image_frame_93.png", "mr_train_1014_image_frame_90.png", "mr_train_1014_image_frame_82.png", "mr_train_1014_image_frame_12.png", "mr_train_1015_image_frame_44.png", "mr_train_1006_image_frame_89.png", "mr_train_1005_image_frame_60.png", "mr_train_1003_image_frame_146.png", "mr_train_1011_image_frame_40.png", "mr_train_1015_image_frame_21.png", "mr_train_1017_image_frame_78.png", "mr_train_1005_image_frame_95.png", "mr_train_1009_image_frame_73.png", "mr_train_1015_image_frame_0.png", "mr_train_1001_image_frame_125.png", "mr_train_1005_image_frame_30.png", "mr_train_1001_image_frame_139.png", "mr_train_1015_image_frame_162.png", "mr_train_1016_image_frame_74.png", "mr_train_1006_image_frame_65.png", "mr_train_1005_image_frame_9.png", "mr_train_1018_image_frame_132.png", "mr_train_1003_image_frame_79.png", "mr_train_1014_image_frame_40.png", "mr_train_1017_image_frame_126.png", "mr_train_1006_image_frame_152.png", "mr_train_1016_image_frame_9.png", "mr_train_1011_image_frame_101.png", "mr_train_1020_image_frame_79.png", "mr_train_1020_image_frame_99.png", "mr_train_1009_image_frame_87.png", "mr_train_1003_image_frame_103.png", "mr_train_1014_image_frame_97.png", "mr_train_1008_image_frame_12.png", "mr_train_1014_image_frame_129.png", "mr_train_1015_image_frame_98.png", "mr_train_1001_image_frame_97.png", "mr_train_1003_image_frame_153.png", "mr_train_1001_image_frame_31.png", "mr_train_1008_image_frame_51.png", "mr_train_1003_image_frame_19.png", "mr_train_1001_image_frame_83.png", "mr_train_1016_image_frame_89.png", "mr_train_1008_image_frame_42.png", "mr_train_1020_image_frame_9.png", "mr_train_1017_image_frame_73.png", "mr_train_1011_image_frame_113.png", "mr_train_1009_image_frame_118.png", "mr_train_1006_image_frame_125.png", "mr_train_1005_image_frame_59.png", "mr_train_1017_image_frame_11.png", "mr_train_1017_image_frame_88.png", "mr_train_1017_image_frame_98.png", "mr_train_1008_image_frame_93.png", "mr_train_1018_image_frame_80.png", "mr_train_1014_image_frame_3.png", "mr_train_1018_image_frame_56.png", "mr_train_1018_image_frame_37.png", "mr_train_1015_image_frame_89.png", "mr_train_1006_image_frame_141.png", "mr_train_1017_image_frame_23.png", "mr_train_1020_image_frame_43.png", "mr_train_1014_image_frame_70.png", "mr_train_1001_image_frame_103.png", "mr_train_1014_image_frame_106.png", "mr_train_1011_image_frame_55.png", "mr_train_1015_image_frame_33.png", "mr_train_1020_image_frame_74.png", "mr_train_1008_image_frame_17.png", "mr_train_1008_image_frame_15.png", "mr_train_1005_image_frame_67.png", "mr_train_1016_image_frame_41.png", "mr_train_1015_image_frame_19.png", "mr_train_1017_image_frame_125.png", "mr_train_1003_image_frame_151.png", "mr_train_1015_image_frame_138.png", "mr_train_1016_image_frame_95.png", "mr_train_1006_image_frame_123.png", "mr_train_1001_image_frame_74.png", "mr_train_1014_image_frame_24.png", "mr_train_1016_image_frame_69.png", "mr_train_1018_image_frame_113.png", "mr_train_1014_image_frame_96.png", "mr_train_1017_image_frame_122.png", "mr_train_1009_image_frame_70.png", "mr_train_1017_image_frame_107.png", "mr_train_1009_image_frame_68.png", "mr_train_1014_image_frame_150.png", "mr_train_1008_image_frame_54.png", "mr_train_1017_image_frame_36.png", "mr_train_1015_image_frame_28.png", "mr_train_1020_image_frame_102.png", "mr_train_1003_image_frame_116.png", "mr_train_1009_image_frame_62.png", "mr_train_1014_image_frame_5.png", "mr_train_1014_image_frame_138.png", "mr_train_1003_image_frame_63.png", "mr_train_1008_image_frame_123.png", "mr_train_1018_image_frame_6.png", "mr_train_1008_image_frame_2.png", "mr_train_1020_image_frame_82.png", "mr_train_1015_image_frame_119.png", "mr_train_1020_image_frame_114.png", "mr_train_1017_image_frame_15.png", "mr_train_1017_image_frame_135.png", "mr_train_1018_image_frame_72.png", "mr_train_1016_image_frame_11.png", "mr_train_1005_image_frame_111.png", "mr_train_1015_image_frame_177.png", "mr_train_1003_image_frame_68.png", "mr_train_1018_image_frame_27.png", "mr_train_1006_image_frame_41.png", "mr_train_1009_image_frame_55.png", "mr_train_1015_image_frame_170.png", "mr_train_1016_image_frame_25.png", "mr_train_1018_image_frame_48.png", "mr_train_1005_image_frame_1.png", "mr_train_1017_image_frame_44.png", "mr_train_1009_image_frame_7.png", "mr_train_1015_image_frame_110.png", "mr_train_1020_image_frame_41.png", "mr_train_1001_image_frame_81.png", "mr_train_1005_image_frame_35.png", "mr_train_1006_image_frame_98.png", "mr_train_1001_image_frame_12.png", "mr_train_1016_image_frame_56.png", "mr_train_1001_image_frame_122.png", "mr_train_1001_image_frame_138.png", "mr_train_1006_image_frame_15.png", "mr_train_1016_image_frame_1.png", "mr_train_1018_image_frame_29.png", "mr_train_1014_image_frame_60.png", "mr_train_1015_image_frame_169.png", "mr_train_1006_image_frame_111.png", "mr_train_1003_image_frame_140.png", "mr_train_1014_image_frame_11.png", "mr_train_1015_image_frame_171.png", "mr_train_1017_image_frame_65.png", "mr_train_1015_image_frame_86.png", "mr_train_1006_image_frame_45.png", "mr_train_1005_image_frame_41.png", "mr_train_1017_image_frame_95.png", "mr_train_1009_image_frame_82.png", "mr_train_1016_image_frame_121.png", "mr_train_1017_image_frame_43.png", "mr_train_1005_image_frame_46.png", "mr_train_1006_image_frame_29.png", "mr_train_1009_image_frame_107.png", "mr_train_1001_image_frame_61.png", "mr_train_1009_image_frame_51.png", "mr_train_1018_image_frame_55.png", "mr_train_1001_image_frame_137.png", "mr_train_1011_image_frame_29.png", "mr_train_1005_image_frame_98.png", "mr_train_1009_image_frame_113.png", "mr_train_1009_image_frame_3.png", "mr_train_1020_image_frame_86.png", "mr_train_1005_image_frame_47.png", "mr_train_1017_image_frame_53.png", "mr_train_1014_image_frame_118.png", "mr_train_1003_image_frame_137.png", "mr_train_1001_image_frame_1.png", "mr_train_1003_image_frame_118.png", "mr_train_1001_image_frame_93.png", "mr_train_1011_image_frame_7.png", "mr_train_1017_image_frame_30.png", "mr_train_1003_image_frame_60.png", "mr_train_1015_image_frame_139.png", "mr_train_1011_image_frame_75.png", "mr_train_1015_image_frame_71.png", "mr_train_1006_image_frame_105.png", "mr_train_1005_image_frame_44.png", "mr_train_1014_image_frame_95.png", "mr_train_1017_image_frame_108.png", "mr_train_1016_image_frame_38.png", "mr_train_1003_image_frame_59.png", "mr_train_1005_image_frame_27.png", "mr_train_1008_image_frame_34.png", "mr_train_1005_image_frame_101.png", "mr_train_1015_image_frame_196.png", "mr_train_1009_image_frame_100.png", "mr_train_1005_image_frame_102.png", "mr_train_1005_image_frame_7.png", "mr_train_1006_image_frame_117.png", "mr_train_1009_image_frame_91.png", "mr_train_1001_image_frame_38.png", "mr_train_1011_image_frame_157.png", "mr_train_1006_image_frame_46.png", "mr_train_1017_image_frame_87.png", "mr_train_1003_image_frame_67.png", "mr_train_1018_image_frame_146.png", "mr_train_1017_image_frame_92.png", "mr_train_1014_image_frame_46.png", "mr_train_1015_image_frame_72.png", "mr_train_1018_image_frame_69.png", "mr_train_1020_image_frame_0.png", "mr_train_1014_image_frame_2.png", "mr_train_1001_image_frame_73.png", "mr_train_1020_image_frame_32.png", "mr_train_1003_image_frame_138.png", "mr_train_1018_image_frame_78.png", "mr_train_1009_image_frame_101.png", "mr_train_1015_image_frame_156.png", "mr_train_1006_image_frame_11.png", "mr_train_1011_image_frame_127.png", "mr_train_1006_image_frame_13.png", "mr_train_1008_image_frame_4.png", "mr_train_1008_image_frame_129.png", "mr_train_1017_image_frame_138.png", "mr_train_1006_image_frame_9.png", "mr_train_1017_image_frame_116.png", "mr_train_1016_image_frame_103.png", "mr_train_1001_image_frame_13.png", "mr_train_1016_image_frame_81.png", "mr_train_1015_image_frame_10.png", "mr_train_1015_image_frame_121.png", "mr_train_1018_image_frame_5.png", "mr_train_1011_image_frame_133.png", "mr_train_1014_image_frame_43.png", "mr_train_1009_image_frame_34.png", "mr_train_1014_image_frame_127.png", "mr_train_1014_image_frame_105.png", "mr_train_1011_image_frame_142.png", "mr_train_1015_image_frame_35.png", "mr_train_1017_image_frame_115.png", "mr_train_1018_image_frame_121.png", "mr_train_1016_image_frame_88.png", "mr_train_1020_image_frame_3.png", "mr_train_1005_image_frame_103.png", "mr_train_1020_image_frame_2.png", "mr_train_1014_image_frame_56.png", "mr_train_1020_image_frame_25.png", "mr_train_1018_image_frame_95.png", "mr_train_1008_image_frame_119.png", "mr_train_1016_image_frame_71.png", "mr_train_1017_image_frame_139.png", "mr_train_1001_image_frame_47.png", "mr_train_1008_image_frame_32.png", "mr_train_1017_image_frame_67.png", "mr_train_1018_image_frame_54.png", "mr_train_1008_image_frame_58.png", "mr_train_1015_image_frame_49.png", "mr_train_1009_image_frame_33.png", "mr_train_1020_image_frame_72.png", "mr_train_1015_image_frame_29.png", "mr_train_1015_image_frame_143.png", "mr_train_1015_image_frame_184.png", "mr_train_1009_image_frame_105.png", "mr_train_1014_image_frame_25.png", "mr_train_1011_image_frame_117.png", "mr_train_1016_image_frame_6.png", "mr_train_1011_image_frame_50.png", "mr_train_1011_image_frame_145.png", "mr_train_1015_image_frame_115.png", "mr_train_1014_image_frame_158.png", "mr_train_1014_image_frame_77.png", "mr_train_1014_image_frame_125.png", "mr_train_1001_image_frame_17.png", "mr_train_1009_image_frame_71.png", "mr_train_1015_image_frame_128.png", "mr_train_1016_image_frame_65.png", "mr_train_1005_image_frame_74.png", "mr_train_1015_image_frame_18.png", "mr_train_1015_image_frame_130.png", "mr_train_1018_image_frame_66.png", "mr_train_1014_image_frame_102.png", "mr_train_1020_image_frame_55.png", "mr_train_1017_image_frame_52.png", "mr_train_1018_image_frame_33.png", "mr_train_1020_image_frame_21.png", "mr_train_1001_image_frame_135.png", "mr_train_1003_image_frame_15.png", "mr_train_1011_image_frame_47.png", "mr_train_1003_image_frame_25.png", "mr_train_1009_image_frame_116.png", "mr_train_1003_image_frame_39.png", "mr_train_1011_image_frame_134.png", "mr_train_1011_image_frame_64.png", "mr_train_1017_image_frame_5.png", "mr_train_1016_image_frame_55.png", "mr_train_1020_image_frame_107.png", "mr_train_1014_image_frame_30.png", "mr_train_1018_image_frame_144.png", "mr_train_1014_image_frame_126.png", "mr_train_1020_image_frame_48.png", "mr_train_1005_image_frame_104.png", "mr_train_1014_image_frame_122.png", "mr_train_1008_image_frame_113.png", "mr_train_1009_image_frame_6.png", "mr_train_1003_image_frame_143.png", "mr_train_1020_image_frame_34.png", "mr_train_1018_image_frame_20.png", "mr_train_1020_image_frame_95.png", "mr_train_1011_image_frame_76.png", "mr_train_1008_image_frame_101.png", "mr_train_1005_image_frame_37.png", "mr_train_1017_image_frame_119.png", "mr_train_1016_image_frame_20.png", "mr_train_1015_image_frame_155.png", "mr_train_1006_image_frame_37.png", "mr_train_1009_image_frame_39.png", "mr_train_1008_image_frame_62.png", "mr_train_1001_image_frame_92.png", "mr_train_1008_image_frame_3.png", "mr_train_1015_image_frame_30.png", "mr_train_1017_image_frame_16.png", "mr_train_1016_image_frame_128.png", "mr_train_1015_image_frame_85.png", "mr_train_1011_image_frame_82.png", "mr_train_1018_image_frame_77.png", "mr_train_1020_image_frame_104.png", "mr_train_1015_image_frame_62.png", "mr_train_1015_image_frame_4.png", "mr_train_1005_image_frame_109.png", "mr_train_1015_image_frame_108.png", "mr_train_1001_image_frame_62.png", "mr_train_1008_image_frame_45.png", "mr_train_1006_image_frame_112.png", "mr_train_1003_image_frame_111.png", "mr_train_1015_image_frame_32.png", "mr_train_1016_image_frame_73.png", "mr_train_1006_image_frame_140.png", "mr_train_1006_image_frame_21.png", "mr_train_1015_image_frame_8.png", "mr_train_1015_image_frame_191.png", "mr_train_1018_image_frame_60.png", "mr_train_1014_image_frame_75.png", "mr_train_1009_image_frame_1.png", "mr_train_1017_image_frame_1.png", "mr_train_1016_image_frame_108.png", "mr_train_1015_image_frame_159.png", "mr_train_1015_image_frame_173.png", "mr_train_1011_image_frame_137.png", "mr_train_1003_image_frame_88.png", "mr_train_1016_image_frame_112.png", "mr_train_1016_image_frame_91.png", "mr_train_1006_image_frame_110.png", "mr_train_1011_image_frame_71.png", "mr_train_1015_image_frame_79.png", "mr_train_1008_image_frame_10.png", "mr_train_1015_image_frame_172.png", "mr_train_1008_image_frame_41.png", "mr_train_1008_image_frame_92.png", "mr_train_1003_image_frame_56.png", "mr_train_1008_image_frame_29.png", "mr_train_1006_image_frame_40.png", "mr_train_1008_image_frame_60.png", "mr_train_1011_image_frame_17.png", "mr_train_1015_image_frame_158.png", "mr_train_1015_image_frame_146.png", "mr_train_1014_image_frame_113.png", "mr_train_1016_image_frame_43.png", "mr_train_1003_image_frame_141.png", "mr_train_1015_image_frame_38.png", "mr_train_1005_image_frame_105.png", "mr_train_1001_image_frame_45.png", "mr_train_1009_image_frame_36.png", "mr_train_1008_image_frame_63.png", "mr_train_1017_image_frame_18.png", "mr_train_1005_image_frame_61.png", "mr_train_1003_image_frame_80.png", "mr_train_1018_image_frame_1.png", "mr_train_1003_image_frame_158.png", "mr_train_1020_image_frame_29.png", "mr_train_1009_image_frame_117.png", "mr_train_1005_image_frame_10.png", "mr_train_1001_image_frame_105.png", "mr_train_1016_image_frame_63.png", "mr_train_1017_image_frame_75.png", "mr_train_1018_image_frame_42.png", "mr_train_1006_image_frame_132.png", "mr_train_1003_image_frame_127.png", "mr_train_1005_image_frame_117.png", "mr_train_1006_image_frame_101.png", "mr_train_1015_image_frame_151.png", "mr_train_1008_image_frame_83.png", "mr_train_1001_image_frame_52.png", "mr_train_1009_image_frame_28.png", "mr_train_1003_image_frame_101.png", "mr_train_1016_image_frame_14.png", "mr_train_1016_image_frame_24.png", "mr_train_1015_image_frame_109.png", "mr_train_1018_image_frame_107.png", "mr_train_1001_image_frame_148.png", "mr_train_1006_image_frame_96.png", "mr_train_1008_image_frame_30.png", "mr_train_1003_image_frame_14.png", "mr_train_1006_image_frame_67.png", "mr_train_1011_image_frame_106.png", "mr_train_1015_image_frame_97.png", "mr_train_1001_image_frame_95.png", "mr_train_1014_image_frame_51.png", "mr_train_1006_image_frame_59.png", "mr_train_1001_image_frame_40.png", "mr_train_1001_image_frame_84.png", "mr_train_1018_image_frame_19.png", "mr_train_1018_image_frame_96.png", "mr_train_1011_image_frame_32.png", "mr_train_1005_image_frame_2.png", "mr_train_1008_image_frame_72.png", "mr_train_1017_image_frame_83.png", "mr_train_1009_image_frame_108.png", "mr_train_1015_image_frame_90.png", "mr_train_1008_image_frame_89.png", "mr_train_1005_image_frame_51.png", "mr_train_1001_image_frame_28.png", "mr_train_1003_image_frame_71.png", "mr_train_1003_image_frame_3.png", "mr_train_1020_image_frame_106.png", "mr_train_1020_image_frame_51.png", "mr_train_1005_image_frame_121.png", "mr_train_1003_image_frame_94.png", "mr_train_1017_image_frame_97.png", "mr_train_1009_image_frame_22.png", "mr_train_1001_image_frame_77.png", "mr_train_1008_image_frame_24.png", "mr_train_1011_image_frame_128.png", "mr_train_1006_image_frame_108.png", "mr_train_1015_image_frame_96.png", "mr_train_1006_image_frame_54.png", "mr_train_1017_image_frame_8.png", "mr_train_1009_image_frame_106.png", "mr_train_1001_image_frame_149.png", "mr_train_1011_image_frame_105.png", "mr_train_1020_image_frame_94.png", "mr_train_1006_image_frame_79.png", "mr_train_1018_image_frame_12.png", "mr_train_1006_image_frame_20.png", "mr_train_1018_image_frame_138.png", "mr_train_1006_image_frame_127.png", "mr_train_1008_image_frame_76.png", "mr_train_1018_image_frame_139.png", "mr_train_1006_image_frame_61.png", "mr_train_1008_image_frame_111.png", "mr_train_1020_image_frame_4.png", "mr_train_1006_image_frame_50.png", "mr_train_1001_image_frame_120.png", "mr_train_1005_image_frame_3.png", "mr_train_1006_image_frame_23.png", "mr_train_1001_image_frame_56.png", "mr_train_1006_image_frame_99.png", "mr_train_1011_image_frame_116.png", "mr_train_1017_image_frame_109.png", "mr_train_1003_image_frame_11.png", "mr_train_1006_image_frame_62.png", "mr_train_1008_image_frame_97.png", "mr_train_1001_image_frame_87.png", "mr_train_1018_image_frame_49.png", "mr_train_1009_image_frame_43.png", "mr_train_1003_image_frame_40.png", "mr_train_1003_image_frame_96.png", "mr_train_1009_image_frame_20.png", "mr_train_1008_image_frame_40.png", "mr_train_1006_image_frame_87.png", "mr_train_1015_image_frame_144.png", "mr_train_1001_image_frame_117.png", "mr_train_1006_image_frame_80.png", "mr_train_1003_image_frame_149.png", "mr_train_1008_image_frame_115.png", "mr_train_1011_image_frame_31.png", "mr_train_1003_image_frame_95.png", "mr_train_1015_image_frame_137.png", "mr_train_1009_image_frame_8.png", "mr_train_1014_image_frame_124.png", "mr_train_1015_image_frame_31.png", "mr_train_1018_image_frame_14.png", "mr_train_1008_image_frame_81.png", "mr_train_1016_image_frame_51.png", "mr_train_1016_image_frame_102.png", "mr_train_1014_image_frame_74.png", "mr_train_1014_image_frame_78.png", "mr_train_1015_image_frame_1.png"], "valid": ["mr_train_1007_image_frame_149.png", "mr_train_1013_image_frame_93.png", "mr_train_1007_image_frame_70.png", "mr_train_1013_image_frame_90.png", "mr_train_1013_image_frame_15.png", "mr_train_1007_image_frame_135.png", "mr_train_1004_image_frame_49.png", "mr_train_1004_image_frame_63.png", "mr_train_1013_image_frame_22.png", "mr_train_1007_image_frame_7.png", "mr_train_1007_image_frame_40.png", "mr_train_1013_image_frame_60.png", "mr_train_1007_image_frame_140.png", "mr_train_1004_image_frame_3.png", "mr_train_1004_image_frame_114.png", "mr_train_1007_image_frame_100.png", "mr_train_1007_image_frame_141.png", "mr_train_1007_image_frame_103.png", "mr_train_1004_image_frame_70.png", "mr_train_1004_image_frame_18.png", "mr_train_1007_image_frame_164.png", "mr_train_1007_image_frame_81.png", "mr_train_1007_image_frame_109.png", "mr_train_1007_image_frame_83.png", "mr_train_1013_image_frame_39.png", "mr_train_1007_image_frame_21.png", "mr_train_1007_image_frame_146.png", "mr_train_1004_image_frame_89.png", "mr_train_1004_image_frame_103.png", "mr_train_1007_image_frame_14.png", "mr_train_1004_image_frame_86.png", "mr_train_1007_image_frame_39.png", "mr_train_1007_image_frame_4.png", "mr_train_1013_image_frame_56.png", "mr_train_1007_image_frame_42.png", "mr_train_1007_image_frame_154.png", "mr_train_1013_image_frame_101.png", "mr_train_1004_image_frame_112.png", "mr_train_1007_image_frame_107.png", "mr_train_1007_image_frame_99.png", "mr_train_1007_image_frame_37.png", "mr_train_1007_image_frame_172.png", "mr_train_1013_image_frame_45.png", "mr_train_1013_image_frame_23.png", "mr_train_1004_image_frame_75.png", "mr_train_1007_image_frame_91.png", "mr_train_1004_image_frame_61.png", "mr_train_1013_image_frame_25.png", "mr_train_1004_image_frame_111.png", "mr_train_1004_image_frame_87.png", "mr_train_1007_image_frame_82.png", "mr_train_1013_image_frame_19.png", "mr_train_1007_image_frame_74.png", "mr_train_1004_image_frame_10.png", "mr_train_1007_image_frame_33.png", "mr_train_1007_image_frame_0.png", "mr_train_1013_image_frame_105.png", "mr_train_1004_image_frame_72.png", "mr_train_1007_image_frame_66.png", "mr_train_1007_image_frame_67.png", "mr_train_1007_image_frame_97.png", "mr_train_1004_image_frame_25.png", "mr_train_1004_image_frame_11.png", "mr_train_1004_image_frame_4.png", "mr_train_1007_image_frame_17.png", "mr_train_1013_image_frame_14.png", "mr_train_1013_image_frame_4.png", "mr_train_1007_image_frame_56.png", "mr_train_1004_image_frame_83.png", "mr_train_1007_image_frame_174.png", "mr_train_1007_image_frame_15.png", "mr_train_1007_image_frame_133.png", "mr_train_1004_image_frame_24.png", "mr_train_1004_image_frame_73.png", "mr_train_1007_image_frame_130.png", "mr_train_1013_image_frame_89.png", "mr_train_1007_image_frame_157.png", "mr_train_1013_image_frame_69.png", "mr_train_1007_image_frame_137.png", "mr_train_1013_image_frame_34.png", "mr_train_1007_image_frame_111.png", "mr_train_1004_image_frame_105.png", "mr_train_1013_image_frame_87.png", "mr_train_1013_image_frame_18.png", "mr_train_1007_image_frame_162.png", "mr_train_1004_image_frame_94.png", "mr_train_1013_image_frame_76.png", "mr_train_1004_image_frame_60.png", "mr_train_1013_image_frame_32.png", "mr_train_1004_image_frame_45.png", "mr_train_1013_image_frame_91.png", "mr_train_1013_image_frame_41.png", "mr_train_1004_image_frame_7.png", "mr_train_1004_image_frame_104.png", "mr_train_1013_image_frame_83.png", "mr_train_1007_image_frame_112.png", "mr_train_1004_image_frame_36.png", "mr_train_1004_image_frame_46.png", "mr_train_1013_image_frame_109.png", "mr_train_1013_image_frame_77.png", "mr_train_1004_image_frame_76.png", "mr_train_1013_image_frame_81.png", "mr_train_1004_image_frame_115.png", "mr_train_1004_image_frame_51.png", "mr_train_1007_image_frame_60.png", "mr_train_1004_image_frame_64.png", "mr_train_1004_image_frame_28.png", "mr_train_1004_image_frame_2.png", "mr_train_1007_image_frame_177.png", "mr_train_1004_image_frame_59.png", "mr_train_1007_image_frame_108.png", "mr_train_1007_image_frame_138.png", "mr_train_1004_image_frame_50.png", "mr_train_1004_image_frame_21.png", "mr_train_1013_image_frame_106.png", "mr_train_1007_image_frame_131.png", "mr_train_1007_image_frame_143.png", "mr_train_1004_image_frame_80.png", "mr_train_1007_image_frame_117.png", "mr_train_1004_image_frame_116.png", "mr_train_1007_image_frame_92.png", "mr_train_1007_image_frame_8.png", "mr_train_1004_image_frame_15.png", "mr_train_1004_image_frame_14.png", "mr_train_1013_image_frame_20.png", "mr_train_1013_image_frame_6.png", "mr_train_1013_image_frame_80.png", "mr_train_1004_image_frame_0.png", "mr_train_1013_image_frame_12.png", "mr_train_1007_image_frame_1.png", "mr_train_1013_image_frame_94.png", "mr_train_1007_image_frame_55.png", "mr_train_1004_image_frame_41.png", "mr_train_1007_image_frame_57.png", "mr_train_1004_image_frame_113.png", "mr_train_1007_image_frame_156.png", "mr_train_1007_image_frame_163.png", "mr_train_1007_image_frame_106.png", "mr_train_1013_image_frame_82.png", "mr_train_1004_image_frame_101.png", "mr_train_1004_image_frame_12.png", "mr_train_1007_image_frame_155.png", "mr_train_1007_image_frame_34.png", "mr_train_1007_image_frame_169.png", "mr_train_1013_image_frame_75.png", "mr_train_1004_image_frame_62.png", "mr_train_1007_image_frame_178.png", "mr_train_1007_image_frame_159.png", "mr_train_1013_image_frame_30.png", "mr_train_1013_image_frame_63.png", "mr_train_1004_image_frame_8.png", "mr_train_1007_image_frame_136.png", "mr_train_1007_image_frame_49.png", "mr_train_1004_image_frame_43.png", "mr_train_1007_image_frame_78.png", "mr_train_1007_image_frame_72.png", "mr_train_1007_image_frame_93.png", "mr_train_1007_image_frame_62.png", "mr_train_1004_image_frame_100.png", "mr_train_1013_image_frame_74.png", "mr_train_1007_image_frame_89.png", "mr_train_1007_image_frame_165.png", "mr_train_1004_image_frame_110.png", "mr_train_1007_image_frame_114.png", "mr_train_1007_image_frame_27.png", "mr_train_1007_image_frame_147.png", "mr_train_1007_image_frame_88.png", "mr_train_1007_image_frame_18.png", "mr_train_1013_image_frame_53.png", "mr_train_1007_image_frame_139.png", "mr_train_1007_image_frame_171.png", "mr_train_1007_image_frame_119.png", "mr_train_1013_image_frame_71.png", "mr_train_1004_image_frame_40.png", "mr_train_1013_image_frame_104.png", "mr_train_1004_image_frame_56.png", "mr_train_1007_image_frame_3.png", "mr_train_1013_image_frame_88.png", "mr_train_1007_image_frame_13.png", "mr_train_1007_image_frame_150.png", "mr_train_1013_image_frame_100.png", "mr_train_1013_image_frame_110.png", "mr_train_1004_image_frame_106.png", "mr_train_1013_image_frame_38.png", "mr_train_1004_image_frame_74.png", "mr_train_1013_image_frame_26.png", "mr_train_1013_image_frame_67.png", "mr_train_1004_image_frame_31.png", "mr_train_1013_image_frame_108.png", "mr_train_1013_image_frame_70.png", "mr_train_1004_image_frame_13.png", "mr_train_1004_image_frame_97.png", "mr_train_1013_image_frame_52.png", "mr_train_1007_image_frame_122.png", "mr_train_1007_image_frame_19.png", "mr_train_1004_image_frame_93.png", "mr_train_1007_image_frame_59.png", "mr_train_1004_image_frame_6.png", "mr_train_1013_image_frame_68.png", "mr_train_1007_image_frame_71.png", "mr_train_1007_image_frame_113.png", "mr_train_1013_image_frame_64.png", "mr_train_1013_image_frame_29.png", "mr_train_1013_image_frame_103.png", "mr_train_1013_image_frame_37.png", "mr_train_1007_image_frame_28.png", "mr_train_1004_image_frame_81.png", "mr_train_1004_image_frame_32.png", "mr_train_1007_image_frame_101.png", "mr_train_1013_image_frame_24.png", "mr_train_1007_image_frame_26.png", "mr_train_1007_image_frame_44.png", "mr_train_1013_image_frame_10.png", "mr_train_1007_image_frame_63.png", "mr_train_1007_image_frame_47.png", "mr_train_1004_image_frame_48.png", "mr_train_1004_image_frame_58.png", "mr_train_1007_image_frame_176.png", "mr_train_1007_image_frame_24.png", "mr_train_1004_image_frame_39.png", "mr_train_1013_image_frame_1.png", "mr_train_1013_image_frame_9.png", "mr_train_1007_image_frame_85.png", "mr_train_1007_image_frame_118.png", "mr_train_1004_image_frame_118.png", "mr_train_1004_image_frame_92.png", "mr_train_1013_image_frame_50.png", "mr_train_1013_image_frame_57.png", "mr_train_1007_image_frame_76.png", "mr_train_1007_image_frame_168.png", "mr_train_1013_image_frame_3.png", "mr_train_1007_image_frame_144.png", "mr_train_1004_image_frame_47.png", "mr_train_1013_image_frame_46.png", "mr_train_1004_image_frame_34.png", "mr_train_1013_image_frame_65.png", "mr_train_1007_image_frame_46.png", "mr_train_1007_image_frame_170.png", "mr_train_1004_image_frame_67.png", "mr_train_1007_image_frame_43.png", "mr_train_1007_image_frame_6.png", "mr_train_1013_image_frame_48.png", "mr_train_1004_image_frame_77.png", "mr_train_1007_image_frame_5.png", "mr_train_1013_image_frame_47.png", "mr_train_1007_image_frame_95.png", "mr_train_1004_image_frame_20.png", "mr_train_1013_image_frame_98.png", "mr_train_1007_image_frame_115.png", "mr_train_1007_image_frame_105.png", "mr_train_1004_image_frame_99.png", "mr_train_1007_image_frame_79.png", "mr_train_1004_image_frame_5.png", "mr_train_1004_image_frame_17.png", "mr_train_1004_image_frame_88.png", "mr_train_1007_image_frame_35.png", "mr_train_1004_image_frame_30.png", "mr_train_1013_image_frame_73.png", "mr_train_1007_image_frame_80.png", "mr_train_1007_image_frame_73.png", "mr_train_1007_image_frame_65.png", "mr_train_1007_image_frame_160.png", "mr_train_1004_image_frame_102.png", "mr_train_1004_image_frame_68.png", "mr_train_1013_image_frame_28.png", "mr_train_1007_image_frame_52.png", "mr_train_1004_image_frame_26.png", "mr_train_1007_image_frame_124.png", "mr_train_1013_image_frame_44.png", "mr_train_1013_image_frame_97.png", "mr_train_1004_image_frame_85.png", "mr_train_1007_image_frame_86.png", "mr_train_1013_image_frame_27.png", "mr_train_1007_image_frame_75.png", "mr_train_1013_image_frame_36.png", "mr_train_1007_image_frame_41.png", "mr_train_1004_image_frame_52.png", "mr_train_1004_image_frame_71.png", "mr_train_1007_image_frame_16.png", "mr_train_1013_image_frame_17.png", "mr_train_1004_image_frame_33.png", "mr_train_1004_image_frame_57.png", "mr_train_1013_image_frame_86.png", "mr_train_1004_image_frame_42.png", "mr_train_1007_image_frame_22.png", "mr_train_1013_image_frame_31.png", "mr_train_1013_image_frame_42.png", "mr_train_1004_image_frame_9.png", "mr_train_1013_image_frame_21.png", "mr_train_1007_image_frame_84.png", "mr_train_1013_image_frame_79.png", "mr_train_1004_image_frame_66.png", "mr_train_1004_image_frame_65.png", "mr_train_1007_image_frame_145.png", "mr_train_1004_image_frame_22.png", "mr_train_1013_image_frame_66.png", "mr_train_1013_image_frame_13.png", "mr_train_1007_image_frame_179.png", "mr_train_1004_image_frame_35.png", "mr_train_1007_image_frame_51.png", "mr_train_1007_image_frame_126.png", "mr_train_1007_image_frame_161.png", "mr_train_1007_image_frame_10.png", "mr_train_1007_image_frame_54.png", "mr_train_1013_image_frame_55.png", "mr_train_1007_image_frame_153.png", "mr_train_1004_image_frame_37.png", "mr_train_1013_image_frame_49.png", "mr_train_1004_image_frame_16.png", "mr_train_1004_image_frame_108.png", "mr_train_1007_image_frame_142.png", "mr_train_1007_image_frame_120.png", "mr_train_1004_image_frame_90.png", "mr_train_1007_image_frame_98.png", "mr_train_1007_image_frame_102.png", "mr_train_1013_image_frame_107.png", "mr_train_1007_image_frame_45.png", "mr_train_1013_image_frame_85.png", "mr_train_1007_image_frame_152.png", "mr_train_1007_image_frame_50.png", "mr_train_1013_image_frame_99.png", "mr_train_1004_image_frame_96.png", "mr_train_1007_image_frame_116.png", "mr_train_1013_image_frame_35.png", "mr_train_1004_image_frame_38.png", "mr_train_1007_image_frame_25.png", "mr_train_1007_image_frame_68.png", "mr_train_1013_image_frame_62.png", "mr_train_1007_image_frame_29.png", "mr_train_1013_image_frame_96.png", "mr_train_1007_image_frame_48.png", "mr_train_1007_image_frame_58.png", "mr_train_1013_image_frame_72.png", "mr_train_1013_image_frame_59.png", "mr_train_1004_image_frame_55.png", "mr_train_1007_image_frame_23.png", "mr_train_1013_image_frame_40.png", "mr_train_1007_image_frame_166.png", "mr_train_1004_image_frame_84.png", "mr_train_1007_image_frame_151.png", "mr_train_1013_image_frame_92.png", "mr_train_1007_image_frame_90.png", "mr_train_1013_image_frame_43.png", "mr_train_1013_image_frame_61.png", "mr_train_1013_image_frame_51.png", "mr_train_1007_image_frame_94.png", "mr_train_1004_image_frame_98.png", "mr_train_1007_image_frame_134.png", "mr_train_1007_image_frame_61.png", "mr_train_1013_image_frame_95.png", "mr_train_1004_image_frame_117.png", "mr_train_1004_image_frame_79.png", "mr_train_1007_image_frame_173.png", "mr_train_1007_image_frame_12.png", "mr_train_1013_image_frame_54.png", "mr_train_1007_image_frame_20.png", "mr_train_1007_image_frame_77.png", "mr_train_1007_image_frame_123.png", "mr_train_1013_image_frame_8.png", "mr_train_1007_image_frame_158.png", "mr_train_1007_image_frame_87.png", "mr_train_1007_image_frame_110.png", "mr_train_1007_image_frame_175.png", "mr_train_1007_image_frame_9.png", "mr_train_1013_image_frame_2.png", "mr_train_1004_image_frame_107.png", "mr_train_1013_image_frame_102.png", "mr_train_1013_image_frame_7.png", "mr_train_1013_image_frame_5.png", "mr_train_1007_image_frame_167.png", "mr_train_1007_image_frame_53.png", "mr_train_1007_image_frame_96.png", "mr_train_1007_image_frame_125.png", "mr_train_1013_image_frame_16.png", "mr_train_1007_image_frame_104.png", "mr_train_1013_image_frame_0.png", "mr_train_1004_image_frame_95.png", "mr_train_1007_image_frame_32.png", "mr_train_1007_image_frame_31.png", "mr_train_1007_image_frame_30.png", "mr_train_1007_image_frame_38.png", "mr_train_1004_image_frame_29.png", "mr_train_1013_image_frame_111.png", "mr_train_1004_image_frame_19.png", "mr_train_1007_image_frame_127.png", "mr_train_1004_image_frame_27.png", "mr_train_1004_image_frame_53.png", "mr_train_1013_image_frame_84.png", "mr_train_1004_image_frame_109.png", "mr_train_1007_image_frame_2.png", "mr_train_1013_image_frame_33.png", "mr_train_1007_image_frame_121.png", "mr_train_1007_image_frame_148.png", "mr_train_1013_image_frame_78.png", "mr_train_1007_image_frame_132.png", "mr_train_1004_image_frame_78.png", "mr_train_1004_image_frame_1.png", "mr_train_1004_image_frame_91.png", "mr_train_1004_image_frame_69.png", "mr_train_1004_image_frame_54.png", "mr_train_1007_image_frame_69.png", "mr_train_1007_image_frame_36.png", "mr_train_1004_image_frame_119.png", "mr_train_1004_image_frame_44.png", "mr_train_1013_image_frame_11.png", "mr_train_1007_image_frame_11.png", "mr_train_1007_image_frame_129.png", "mr_train_1004_image_frame_82.png", "mr_train_1004_image_frame_23.png", "mr_train_1007_image_frame_128.png", "mr_train_1013_image_frame_58.png", "mr_train_1007_image_frame_64.png"], "test": ["mr_train_1010_image_frame_114.png", "mr_train_1019_image_frame_63.png", "mr_train_1010_image_frame_132.png", "mr_train_1019_image_frame_80.png", "mr_train_1012_image_frame_72.png", "mr_train_1019_image_frame_129.png", "mr_train_1002_image_frame_82.png", "mr_train_1010_image_frame_55.png", "mr_train_1010_image_frame_56.png", "mr_train_1010_image_frame_134.png", "mr_train_1010_image_frame_81.png", "mr_train_1012_image_frame_3.png", "mr_train_1012_image_frame_5.png", "mr_train_1012_image_frame_24.png", "mr_train_1012_image_frame_107.png", "mr_train_1019_image_frame_130.png", "mr_train_1012_image_frame_119.png", "mr_train_1010_image_frame_71.png", "mr_train_1019_image_frame_105.png", "mr_train_1002_image_frame_69.png", "mr_train_1002_image_frame_60.png", "mr_train_1019_image_frame_123.png", "mr_train_1010_image_frame_75.png", "mr_train_1012_image_frame_58.png", "mr_train_1012_image_frame_4.png", "mr_train_1002_image_frame_3.png", "mr_train_1012_image_frame_55.png", "mr_train_1010_image_frame_121.png", "mr_train_1012_image_frame_121.png", "mr_train_1002_image_frame_40.png", "mr_train_1010_image_frame_140.png", "mr_train_1012_image_frame_7.png", "mr_train_1019_image_frame_120.png", "mr_train_1019_image_frame_111.png", "mr_train_1002_image_frame_44.png", "mr_train_1012_image_frame_32.png", "mr_train_1019_image_frame_77.png", "mr_train_1019_image_frame_132.png", "mr_train_1019_image_frame_15.png", "mr_train_1002_image_frame_58.png", "mr_train_1012_image_frame_41.png", "mr_train_1019_image_frame_76.png", "mr_train_1012_image_frame_70.png", "mr_train_1010_image_frame_25.png", "mr_train_1019_image_frame_12.png", "mr_train_1012_image_frame_82.png", "mr_train_1012_image_frame_91.png", "mr_train_1002_image_frame_37.png", "mr_train_1012_image_frame_39.png", "mr_train_1002_image_frame_7.png", "mr_train_1002_image_frame_43.png", "mr_train_1012_image_frame_36.png", "mr_train_1010_image_frame_34.png", "mr_train_1019_image_frame_22.png", "mr_train_1019_image_frame_121.png", "mr_train_1019_image_frame_117.png", "mr_train_1002_image_frame_11.png", "mr_train_1010_image_frame_33.png", "mr_train_1002_image_frame_64.png", "mr_train_1002_image_frame_5.png", "mr_train_1010_image_frame_83.png", "mr_train_1010_image_frame_41.png", "mr_train_1010_image_frame_101.png", "mr_train_1012_image_frame_6.png", "mr_train_1012_image_frame_69.png", "mr_train_1019_image_frame_93.png", "mr_train_1010_image_frame_13.png", "mr_train_1019_image_frame_46.png", "mr_train_1010_image_frame_52.png", "mr_train_1010_image_frame_54.png", "mr_train_1019_image_frame_70.png", "mr_train_1010_image_frame_110.png", "mr_train_1012_image_frame_106.png", "mr_train_1012_image_frame_20.png", "mr_train_1002_image_frame_66.png", "mr_train_1012_image_frame_25.png", "mr_train_1002_image_frame_54.png", "mr_train_1012_image_frame_81.png", "mr_train_1019_image_frame_1.png", "mr_train_1002_image_frame_39.png", "mr_train_1010_image_frame_17.png", "mr_train_1019_image_frame_79.png", "mr_train_1012_image_frame_120.png", "mr_train_1010_image_frame_84.png", "mr_train_1012_image_frame_47.png", "mr_train_1019_image_frame_83.png", "mr_train_1010_image_frame_16.png", "mr_train_1010_image_frame_39.png", "mr_train_1012_image_frame_14.png", "mr_train_1002_image_frame_47.png", "mr_train_1010_image_frame_126.png", "mr_train_1012_image_frame_1.png", "mr_train_1010_image_frame_158.png", "mr_train_1010_image_frame_28.png", "mr_train_1010_image_frame_24.png", "mr_train_1010_image_frame_138.png", "mr_train_1002_image_frame_6.png", "mr_train_1010_image_frame_116.png", "mr_train_1012_image_frame_79.png", "mr_train_1019_image_frame_66.png", "mr_train_1019_image_frame_58.png", "mr_train_1019_image_frame_31.png", "mr_train_1012_image_frame_64.png", "mr_train_1010_image_frame_129.png", "mr_train_1012_image_frame_42.png", "mr_train_1019_image_frame_44.png", "mr_train_1002_image_frame_2.png", "mr_train_1012_image_frame_116.png", "mr_train_1012_image_frame_50.png", "mr_train_1002_image_frame_71.png", "mr_train_1010_image_frame_69.png", "mr_train_1019_image_frame_6.png", "mr_train_1010_image_frame_98.png", "mr_train_1002_image_frame_118.png", "mr_train_1019_image_frame_108.png", "mr_train_1002_image_frame_73.png", "mr_train_1019_image_frame_115.png", "mr_train_1012_image_frame_85.png", "mr_train_1012_image_frame_45.png", "mr_train_1002_image_frame_91.png", "mr_train_1012_image_frame_111.png", "mr_train_1012_image_frame_48.png", "mr_train_1002_image_frame_123.png", "mr_train_1019_image_frame_16.png", "mr_train_1019_image_frame_36.png", "mr_train_1019_image_frame_43.png", "mr_train_1002_image_frame_45.png", "mr_train_1012_image_frame_34.png", "mr_train_1019_image_frame_126.png", "mr_train_1010_image_frame_120.png", "mr_train_1002_image_frame_113.png", "mr_train_1010_image_frame_67.png", "mr_train_1019_image_frame_18.png", "mr_train_1010_image_frame_60.png", "mr_train_1002_image_frame_25.png", "mr_train_1002_image_frame_112.png", "mr_train_1012_image_frame_8.png", "mr_train_1012_image_frame_84.png", "mr_train_1012_image_frame_97.png", "mr_train_1002_image_frame_4.png", "mr_train_1002_image_frame_48.png", "mr_train_1002_image_frame_33.png", "mr_train_1019_image_frame_42.png", "mr_train_1010_image_frame_0.png", "mr_train_1010_image_frame_19.png", "mr_train_1012_image_frame_90.png", "mr_train_1019_image_frame_134.png", "mr_train_1010_image_frame_22.png", "mr_train_1019_image_frame_9.png", "mr_train_1012_image_frame_71.png", "mr_train_1019_image_frame_52.png", "mr_train_1012_image_frame_105.png", "mr_train_1012_image_frame_115.png", "mr_train_1010_image_frame_159.png", "mr_train_1002_image_frame_96.png", "mr_train_1010_image_frame_150.png", "mr_train_1002_image_frame_61.png", "mr_train_1012_image_frame_60.png", "mr_train_1012_image_frame_65.png", "mr_train_1012_image_frame_75.png", "mr_train_1002_image_frame_127.png", "mr_train_1010_image_frame_73.png", "mr_train_1010_image_frame_59.png", "mr_train_1019_image_frame_82.png", "mr_train_1010_image_frame_123.png", "mr_train_1002_image_frame_122.png", "mr_train_1002_image_frame_10.png", "mr_train_1002_image_frame_36.png", "mr_train_1002_image_frame_55.png", "mr_train_1019_image_frame_112.png", "mr_train_1010_image_frame_30.png", "mr_train_1012_image_frame_108.png", "mr_train_1002_image_frame_103.png", "mr_train_1010_image_frame_87.png", "mr_train_1002_image_frame_63.png", "mr_train_1012_image_frame_114.png", "mr_train_1010_image_frame_47.png", "mr_train_1002_image_frame_124.png", "mr_train_1002_image_frame_15.png", "mr_train_1010_image_frame_88.png", "mr_train_1012_image_frame_13.png", "mr_train_1002_image_frame_110.png", "mr_train_1002_image_frame_27.png", "mr_train_1010_image_frame_49.png", "mr_train_1002_image_frame_74.png", "mr_train_1012_image_frame_29.png", "mr_train_1010_image_frame_43.png", "mr_train_1012_image_frame_102.png", "mr_train_1019_image_frame_5.png", "mr_train_1019_image_frame_20.png", "mr_train_1010_image_frame_100.png", "mr_train_1010_image_frame_102.png", "mr_train_1012_image_frame_30.png", "mr_train_1002_image_frame_59.png", "mr_train_1010_image_frame_90.png", "mr_train_1002_image_frame_119.png", "mr_train_1019_image_frame_102.png", "mr_train_1010_image_frame_14.png", "mr_train_1012_image_frame_123.png", "mr_train_1019_image_frame_8.png", "mr_train_1002_image_frame_57.png", "mr_train_1019_image_frame_90.png", "mr_train_1002_image_frame_80.png", "mr_train_1010_image_frame_115.png", "mr_train_1002_image_frame_106.png", "mr_train_1002_image_frame_85.png", "mr_train_1002_image_frame_93.png", "mr_train_1010_image_frame_8.png", "mr_train_1019_image_frame_100.png", "mr_train_1002_image_frame_76.png", "mr_train_1010_image_frame_2.png", "mr_train_1019_image_frame_59.png", "mr_train_1019_image_frame_48.png", "mr_train_1019_image_frame_133.png", "mr_train_1002_image_frame_35.png", "mr_train_1019_image_frame_106.png", "mr_train_1019_image_frame_13.png", "mr_train_1012_image_frame_57.png", "mr_train_1002_image_frame_0.png", "mr_train_1010_image_frame_128.png", "mr_train_1012_image_frame_87.png", "mr_train_1019_image_frame_24.png", "mr_train_1010_image_frame_36.png", "mr_train_1010_image_frame_141.png", "mr_train_1002_image_frame_72.png", "mr_train_1002_image_frame_42.png", "mr_train_1010_image_frame_153.png", "mr_train_1010_image_frame_109.png", "mr_train_1002_image_frame_12.png", "mr_train_1010_image_frame_46.png", "mr_train_1002_image_frame_94.png", "mr_train_1002_image_frame_99.png", "mr_train_1019_image_frame_127.png", "mr_train_1012_image_frame_113.png", "mr_train_1002_image_frame_89.png", "mr_train_1010_image_frame_77.png", "mr_train_1012_image_frame_54.png", "mr_train_1019_image_frame_11.png", "mr_train_1012_image_frame_110.png", "mr_train_1012_image_frame_126.png", "mr_train_1012_image_frame_10.png", "mr_train_1012_image_frame_66.png", "mr_train_1019_image_frame_10.png", "mr_train_1002_image_frame_105.png", "mr_train_1002_image_frame_30.png", "mr_train_1012_image_frame_59.png", "mr_train_1002_image_frame_84.png", "mr_train_1002_image_frame_9.png", "mr_train_1019_image_frame_91.png", "mr_train_1010_image_frame_143.png", "mr_train_1019_image_frame_74.png", "mr_train_1019_image_frame_14.png", "mr_train_1002_image_frame_24.png", "mr_train_1012_image_frame_2.png", "mr_train_1019_image_frame_86.png", "mr_train_1019_image_frame_56.png", "mr_train_1002_image_frame_56.png", "mr_train_1019_image_frame_55.png", "mr_train_1010_image_frame_131.png", "mr_train_1019_image_frame_78.png", "mr_train_1002_image_frame_88.png", "mr_train_1019_image_frame_98.png", "mr_train_1019_image_frame_60.png", "mr_train_1012_image_frame_35.png", "mr_train_1019_image_frame_17.png", "mr_train_1010_image_frame_137.png", "mr_train_1010_image_frame_10.png", "mr_train_1010_image_frame_122.png", "mr_train_1010_image_frame_4.png", "mr_train_1019_image_frame_103.png", "mr_train_1012_image_frame_93.png", "mr_train_1012_image_frame_26.png", "mr_train_1012_image_frame_99.png", "mr_train_1019_image_frame_38.png", "mr_train_1019_image_frame_23.png", "mr_train_1002_image_frame_109.png", "mr_train_1010_image_frame_37.png", "mr_train_1012_image_frame_86.png", "mr_train_1010_image_frame_124.png", "mr_train_1012_image_frame_51.png", "mr_train_1010_image_frame_53.png", "mr_train_1012_image_frame_37.png", "mr_train_1010_image_frame_40.png", "mr_train_1002_image_frame_50.png", "mr_train_1012_image_frame_38.png", "mr_train_1002_image_frame_126.png", "mr_train_1010_image_frame_42.png", "mr_train_1019_image_frame_87.png", "mr_train_1012_image_frame_76.png", "mr_train_1002_image_frame_16.png", "mr_train_1012_image_frame_16.png", "mr_train_1019_image_frame_81.png", "mr_train_1012_image_frame_18.png", "mr_train_1012_image_frame_117.png", "mr_train_1010_image_frame_78.png", "mr_train_1012_image_frame_127.png", "mr_train_1019_image_frame_45.png", "mr_train_1019_image_frame_49.png", "mr_train_1010_image_frame_136.png", "mr_train_1010_image_frame_68.png", "mr_train_1019_image_frame_131.png", "mr_train_1012_image_frame_68.png", "mr_train_1002_image_frame_97.png", "mr_train_1012_image_frame_62.png", "mr_train_1012_image_frame_0.png", "mr_train_1002_image_frame_79.png", "mr_train_1010_image_frame_96.png", "mr_train_1012_image_frame_100.png", "mr_train_1019_image_frame_94.png", "mr_train_1012_image_frame_56.png", "mr_train_1002_image_frame_21.png", "mr_train_1002_image_frame_26.png", "mr_train_1010_image_frame_95.png", "mr_train_1019_image_frame_124.png", "mr_train_1002_image_frame_19.png", "mr_train_1019_image_frame_96.png", "mr_train_1019_image_frame_2.png", "mr_train_1012_image_frame_52.png", "mr_train_1010_image_frame_32.png", "mr_train_1010_image_frame_76.png", "mr_train_1002_image_frame_120.png", "mr_train_1010_image_frame_157.png", "mr_train_1019_image_frame_75.png", "mr_train_1010_image_frame_50.png", "mr_train_1019_image_frame_7.png", "mr_train_1010_image_frame_26.png", "mr_train_1012_image_frame_61.png", "mr_train_1012_image_frame_44.png", "mr_train_1019_image_frame_26.png", "mr_train_1002_image_frame_38.png", "mr_train_1010_image_frame_147.png", "mr_train_1019_image_frame_64.png", "mr_train_1019_image_frame_47.png", "mr_train_1010_image_frame_70.png", "mr_train_1010_image_frame_7.png", "mr_train_1010_image_frame_106.png", "mr_train_1012_image_frame_83.png", "mr_train_1012_image_frame_101.png", "mr_train_1019_image_frame_4.png", "mr_train_1002_image_frame_67.png", "mr_train_1002_image_frame_65.png", "mr_train_1010_image_frame_117.png", "mr_train_1010_image_frame_66.png", "mr_train_1010_image_frame_18.png", "mr_train_1002_image_frame_121.png", "mr_train_1010_image_frame_38.png", "mr_train_1010_image_frame_51.png", "mr_train_1002_image_frame_77.png", "mr_train_1019_image_frame_114.png", "mr_train_1010_image_frame_154.png", "mr_train_1010_image_frame_23.png", "mr_train_1002_image_frame_98.png", "mr_train_1002_image_frame_34.png", "mr_train_1019_image_frame_104.png", "mr_train_1002_image_frame_41.png", "mr_train_1010_image_frame_29.png", "mr_train_1010_image_frame_12.png", "mr_train_1010_image_frame_103.png", "mr_train_1010_image_frame_133.png", "mr_train_1010_image_frame_142.png", "mr_train_1002_image_frame_52.png", "mr_train_1010_image_frame_148.png", "mr_train_1002_image_frame_1.png", "mr_train_1010_image_frame_79.png", "mr_train_1002_image_frame_14.png", "mr_train_1010_image_frame_89.png", "mr_train_1010_image_frame_151.png", "mr_train_1010_image_frame_104.png", "mr_train_1012_image_frame_9.png", "mr_train_1019_image_frame_71.png", "mr_train_1002_image_frame_115.png", "mr_train_1019_image_frame_97.png", "mr_train_1002_image_frame_23.png", "mr_train_1010_image_frame_65.png", "mr_train_1010_image_frame_6.png", "mr_train_1012_image_frame_53.png", "mr_train_1012_image_frame_118.png", "mr_train_1019_image_frame_85.png", "mr_train_1019_image_frame_61.png", "mr_train_1002_image_frame_92.png", "mr_train_1002_image_frame_87.png", "mr_train_1010_image_frame_155.png", "mr_train_1012_image_frame_74.png", "mr_train_1002_image_frame_22.png", "mr_train_1010_image_frame_107.png", "mr_train_1012_image_frame_112.png", "mr_train_1012_image_frame_63.png", "mr_train_1010_image_frame_118.png", "mr_train_1012_image_frame_19.png", "mr_train_1012_image_frame_31.png", "mr_train_1010_image_frame_144.png", "mr_train_1002_image_frame_78.png", "mr_train_1010_image_frame_35.png", "mr_train_1010_image_frame_111.png", "mr_train_1010_image_frame_20.png", "mr_train_1010_image_frame_105.png", "mr_train_1002_image_frame_8.png", "mr_train_1012_image_frame_27.png", "mr_train_1012_image_frame_22.png", "mr_train_1012_image_frame_96.png", "mr_train_1002_image_frame_28.png", "mr_train_1019_image_frame_95.png", "mr_train_1010_image_frame_80.png", "mr_train_1010_image_frame_112.png", "mr_train_1019_image_frame_28.png", "mr_train_1010_image_frame_86.png", "mr_train_1012_image_frame_40.png", "mr_train_1010_image_frame_74.png", "mr_train_1010_image_frame_125.png", "mr_train_1002_image_frame_53.png", "mr_train_1010_image_frame_61.png", "mr_train_1010_image_frame_113.png", "mr_train_1019_image_frame_88.png", "mr_train_1019_image_frame_50.png", "mr_train_1002_image_frame_18.png", "mr_train_1012_image_frame_12.png", "mr_train_1010_image_frame_48.png", "mr_train_1002_image_frame_62.png", "mr_train_1019_image_frame_21.png", "mr_train_1010_image_frame_62.png", "mr_train_1012_image_frame_77.png", "mr_train_1002_image_frame_29.png", "mr_train_1019_image_frame_73.png", "mr_train_1002_image_frame_107.png", "mr_train_1002_image_frame_17.png", "mr_train_1012_image_frame_94.png", "mr_train_1002_image_frame_125.png", "mr_train_1010_image_frame_127.png", "mr_train_1019_image_frame_128.png", "mr_train_1010_image_frame_11.png", "mr_train_1012_image_frame_15.png", "mr_train_1012_image_frame_98.png", "mr_train_1019_image_frame_84.png", "mr_train_1010_image_frame_9.png", "mr_train_1010_image_frame_92.png", "mr_train_1012_image_frame_88.png", "mr_train_1019_image_frame_72.png", "mr_train_1012_image_frame_122.png", "mr_train_1010_image_frame_31.png", "mr_train_1019_image_frame_99.png", "mr_train_1012_image_frame_23.png", "mr_train_1019_image_frame_0.png", "mr_train_1010_image_frame_85.png", "mr_train_1019_image_frame_41.png", "mr_train_1019_image_frame_65.png", "mr_train_1002_image_frame_70.png", "mr_train_1019_image_frame_27.png", "mr_train_1012_image_frame_33.png", "mr_train_1010_image_frame_139.png", "mr_train_1002_image_frame_101.png", "mr_train_1019_image_frame_35.png", "mr_train_1019_image_frame_113.png", "mr_train_1019_image_frame_30.png", "mr_train_1012_image_frame_28.png", "mr_train_1010_image_frame_58.png", "mr_train_1019_image_frame_39.png", "mr_train_1010_image_frame_135.png", "mr_train_1010_image_frame_156.png", "mr_train_1002_image_frame_31.png", "mr_train_1010_image_frame_108.png", "mr_train_1010_image_frame_5.png", "mr_train_1019_image_frame_101.png", "mr_train_1010_image_frame_44.png", "mr_train_1010_image_frame_99.png", "mr_train_1002_image_frame_90.png", "mr_train_1019_image_frame_109.png", "mr_train_1012_image_frame_92.png", "mr_train_1010_image_frame_91.png", "mr_train_1019_image_frame_37.png", "mr_train_1002_image_frame_13.png", "mr_train_1002_image_frame_75.png", "mr_train_1010_image_frame_1.png", "mr_train_1012_image_frame_49.png", "mr_train_1012_image_frame_89.png", "mr_train_1002_image_frame_86.png", "mr_train_1010_image_frame_119.png", "mr_train_1002_image_frame_111.png", "mr_train_1019_image_frame_116.png", "mr_train_1002_image_frame_51.png", "mr_train_1019_image_frame_19.png", "mr_train_1019_image_frame_110.png", "mr_train_1012_image_frame_104.png", "mr_train_1019_image_frame_40.png", "mr_train_1010_image_frame_57.png", "mr_train_1002_image_frame_46.png", "mr_train_1002_image_frame_102.png", "mr_train_1010_image_frame_64.png", "mr_train_1010_image_frame_130.png", "mr_train_1010_image_frame_21.png", "mr_train_1019_image_frame_67.png", "mr_train_1010_image_frame_94.png", "mr_train_1019_image_frame_62.png", "mr_train_1002_image_frame_68.png", "mr_train_1010_image_frame_27.png", "mr_train_1019_image_frame_54.png", "mr_train_1010_image_frame_146.png", "mr_train_1010_image_frame_45.png", "mr_train_1010_image_frame_149.png", "mr_train_1019_image_frame_3.png", "mr_train_1019_image_frame_118.png", "mr_train_1019_image_frame_51.png", "mr_train_1012_image_frame_67.png", "mr_train_1019_image_frame_69.png", "mr_train_1012_image_frame_95.png", "mr_train_1010_image_frame_82.png", "mr_train_1019_image_frame_107.png", "mr_train_1019_image_frame_119.png", "mr_train_1012_image_frame_80.png", "mr_train_1012_image_frame_46.png", "mr_train_1019_image_frame_89.png", "mr_train_1010_image_frame_97.png", "mr_train_1002_image_frame_83.png", "mr_train_1012_image_frame_109.png", "mr_train_1019_image_frame_92.png", "mr_train_1019_image_frame_29.png", "mr_train_1002_image_frame_95.png", "mr_train_1019_image_frame_53.png", "mr_train_1010_image_frame_72.png", "mr_train_1010_image_frame_152.png", "mr_train_1010_image_frame_63.png", "mr_train_1002_image_frame_114.png", "mr_train_1010_image_frame_15.png", "mr_train_1002_image_frame_116.png", "mr_train_1002_image_frame_108.png", "mr_train_1019_image_frame_122.png", "mr_train_1019_image_frame_125.png", "mr_train_1012_image_frame_125.png", "mr_train_1012_image_frame_17.png", "mr_train_1012_image_frame_124.png", "mr_train_1010_image_frame_93.png", "mr_train_1002_image_frame_32.png", "mr_train_1019_image_frame_25.png", "mr_train_1010_image_frame_145.png", "mr_train_1012_image_frame_21.png", "mr_train_1019_image_frame_32.png", "mr_train_1012_image_frame_11.png", "mr_train_1002_image_frame_81.png", "mr_train_1019_image_frame_34.png", "mr_train_1012_image_frame_78.png", "mr_train_1019_image_frame_57.png", "mr_train_1002_image_frame_104.png", "mr_train_1002_image_frame_100.png", "mr_train_1002_image_frame_20.png", "mr_train_1002_image_frame_117.png", "mr_train_1019_image_frame_33.png", "mr_train_1012_image_frame_103.png", "mr_train_1019_image_frame_68.png", "mr_train_1012_image_frame_73.png", "mr_train_1002_image_frame_49.png", "mr_train_1010_image_frame_3.png", "mr_train_1012_image_frame_43.png"], "train_labels": ["mr_train_1008_label_frame_8.png", "mr_train_1020_label_frame_84.png", "mr_train_1020_label_frame_64.png", "mr_train_1011_label_frame_115.png", "mr_train_1017_label_frame_66.png", "mr_train_1020_label_frame_78.png", "mr_train_1014_label_frame_130.png", "mr_train_1020_label_frame_85.png", "mr_train_1005_label_frame_104.png", "mr_train_1001_label_frame_69.png", "mr_train_1001_label_frame_45.png", "mr_train_1006_label_frame_60.png", "mr_train_1016_label_frame_65.png", "mr_train_1009_label_frame_7.png", "mr_train_1015_label_frame_61.png", "mr_train_1020_label_frame_24.png", "mr_train_1001_label_frame_89.png", "mr_train_1008_label_frame_92.png", "mr_train_1015_label_frame_52.png", "mr_train_1006_label_frame_35.png", "mr_train_1003_label_frame_130.png", "mr_train_1001_label_frame_16.png", "mr_train_1014_label_frame_85.png", "mr_train_1015_label_frame_117.png", "mr_train_1006_label_frame_84.png", "mr_train_1001_label_frame_66.png", "mr_train_1015_label_frame_169.png", "mr_train_1009_label_frame_67.png", "mr_train_1015_label_frame_171.png", "mr_train_1006_label_frame_120.png", "mr_train_1014_label_frame_72.png", "mr_train_1009_label_frame_33.png", "mr_train_1015_label_frame_89.png", "mr_train_1011_label_frame_24.png", "mr_train_1001_label_frame_87.png", "mr_train_1018_label_frame_73.png", "mr_train_1001_label_frame_22.png", "mr_train_1017_label_frame_0.png", "mr_train_1005_label_frame_24.png", "mr_train_1011_label_frame_76.png", "mr_train_1005_label_frame_115.png", "mr_train_1003_label_frame_104.png", "mr_train_1017_label_frame_123.png", "mr_train_1014_label_frame_137.png", "mr_train_1006_label_frame_96.png", "mr_train_1017_label_frame_14.png", "mr_train_1009_label_frame_30.png", "mr_train_1001_label_frame_94.png", "mr_train_1008_label_frame_38.png", "mr_train_1011_label_frame_59.png", "mr_train_1017_label_frame_22.png", "mr_train_1005_label_frame_118.png", "mr_train_1015_label_frame_66.png", "mr_train_1018_label_frame_4.png", "mr_train_1011_label_frame_22.png", "mr_train_1003_label_frame_70.png", "mr_train_1006_label_frame_94.png", "mr_train_1006_label_frame_90.png", "mr_train_1015_label_frame_27.png", "mr_train_1009_label_frame_50.png", "mr_train_1017_label_frame_27.png", "mr_train_1005_label_frame_20.png", "mr_train_1017_label_frame_43.png", "mr_train_1020_label_frame_19.png", "mr_train_1016_label_frame_105.png", "mr_train_1003_label_frame_129.png", "mr_train_1017_label_frame_54.png", "mr_train_1011_label_frame_47.png", "mr_train_1017_label_frame_25.png", "mr_train_1009_label_frame_97.png", "mr_train_1015_label_frame_119.png", "mr_train_1003_label_frame_34.png", "mr_train_1018_label_frame_55.png", "mr_train_1016_label_frame_108.png", "mr_train_1001_label_frame_140.png", "mr_train_1018_label_frame_89.png", "mr_train_1001_label_frame_151.png", "mr_train_1017_label_frame_68.png", "mr_train_1011_label_frame_9.png", "mr_train_1006_label_frame_129.png", "mr_train_1008_label_frame_36.png", "mr_train_1001_label_frame_68.png", "mr_train_1005_label_frame_78.png", "mr_train_1009_label_frame_8.png", "mr_train_1009_label_frame_115.png", "mr_train_1009_label_frame_90.png", "mr_train_1001_label_frame_125.png", "mr_train_1006_label_frame_9.png", "mr_train_1015_label_frame_73.png", "mr_train_1005_label_frame_128.png", "mr_train_1009_label_frame_96.png", "mr_train_1014_label_frame_111.png", "mr_train_1014_label_frame_2.png", "mr_train_1018_label_frame_50.png", "mr_train_1016_label_frame_64.png", "mr_train_1018_label_frame_84.png", "mr_train_1006_label_frame_41.png", "mr_train_1001_label_frame_126.png", "mr_train_1005_label_frame_80.png", "mr_train_1020_label_frame_74.png", "mr_train_1016_label_frame_114.png", "mr_train_1018_label_frame_86.png", "mr_train_1001_label_frame_53.png", "mr_train_1011_label_frame_18.png", "mr_train_1015_label_frame_134.png", "mr_train_1018_label_frame_31.png", "mr_train_1018_label_frame_127.png", "mr_train_1009_label_frame_34.png", "mr_train_1018_label_frame_6.png", "mr_train_1001_label_frame_30.png", "mr_train_1005_label_frame_18.png", "mr_train_1006_label_frame_20.png", "mr_train_1003_label_frame_29.png", "mr_train_1001_label_frame_81.png", "mr_train_1011_label_frame_25.png", "mr_train_1005_label_frame_62.png", "mr_train_1011_label_frame_121.png", "mr_train_1011_label_frame_45.png", "mr_train_1006_label_frame_56.png", "mr_train_1008_label_frame_3.png", "mr_train_1008_label_frame_55.png", "mr_train_1011_label_frame_32.png", "mr_train_1014_label_frame_61.png", "mr_train_1014_label_frame_59.png", "mr_train_1018_label_frame_77.png", "mr_train_1003_label_frame_135.png", "mr_train_1009_label_frame_77.png", "mr_train_1018_label_frame_137.png", "mr_train_1016_label_frame_8.png", "mr_train_1006_label_frame_66.png", "mr_train_1016_label_frame_41.png", "mr_train_1016_label_frame_25.png", "mr_train_1001_label_frame_21.png", "mr_train_1018_label_frame_115.png", "mr_train_1015_label_frame_142.png", "mr_train_1015_label_frame_2.png", "mr_train_1003_label_frame_122.png", "mr_train_1015_label_frame_99.png", "mr_train_1015_label_frame_108.png", "mr_train_1016_label_frame_115.png", "mr_train_1011_label_frame_12.png", "mr_train_1003_label_frame_145.png", "mr_train_1001_label_frame_109.png", "mr_train_1003_label_frame_1.png", "mr_train_1006_label_frame_17.png", "mr_train_1009_label_frame_117.png", "mr_train_1006_label_frame_1.png", "mr_train_1006_label_frame_124.png", "mr_train_1011_label_frame_139.png", "mr_train_1018_label_frame_99.png", "mr_train_1014_label_frame_99.png", "mr_train_1003_label_frame_20.png", "mr_train_1011_label_frame_101.png", "mr_train_1018_label_frame_119.png", "mr_train_1016_label_frame_78.png", "mr_train_1020_label_frame_130.png", "mr_train_1014_label_frame_24.png", "mr_train_1005_label_frame_40.png", "mr_train_1001_label_frame_43.png", "mr_train_1015_label_frame_111.png", "mr_train_1006_label_frame_128.png", "mr_train_1011_label_frame_3.png", "mr_train_1014_label_frame_104.png", "mr_train_1008_label_frame_95.png", "mr_train_1011_label_frame_92.png", "mr_train_1008_label_frame_20.png", "mr_train_1001_label_frame_62.png", "mr_train_1011_label_frame_119.png", "mr_train_1016_label_frame_67.png", "mr_train_1016_label_frame_0.png", "mr_train_1015_label_frame_63.png", "mr_train_1018_label_frame_14.png", "mr_train_1006_label_frame_39.png", "mr_train_1001_label_frame_99.png", "mr_train_1008_label_frame_123.png", "mr_train_1008_label_frame_39.png", "mr_train_1003_label_frame_24.png", "mr_train_1015_label_frame_69.png", "mr_train_1009_label_frame_9.png", "mr_train_1016_label_frame_40.png", "mr_train_1015_label_frame_32.png", "mr_train_1011_label_frame_29.png", "mr_train_1014_label_frame_92.png", "mr_train_1008_label_frame_14.png", "mr_train_1011_label_frame_138.png", "mr_train_1018_label_frame_21.png", "mr_train_1005_label_frame_81.png", "mr_train_1006_label_frame_21.png", "mr_train_1006_label_frame_89.png", "mr_train_1005_label_frame_55.png", "mr_train_1018_label_frame_90.png", "mr_train_1014_label_frame_56.png", "mr_train_1006_label_frame_2.png", "mr_train_1005_label_frame_30.png", "mr_train_1020_label_frame_133.png", "mr_train_1016_label_frame_10.png", "mr_train_1003_label_frame_96.png", "mr_train_1018_label_frame_13.png", "mr_train_1006_label_frame_38.png", "mr_train_1020_label_frame_29.png", "mr_train_1006_label_frame_139.png", "mr_train_1005_label_frame_56.png", "mr_train_1015_label_frame_41.png", "mr_train_1017_label_frame_121.png", "mr_train_1011_label_frame_6.png", "mr_train_1006_label_frame_47.png", "mr_train_1016_label_frame_118.png", "mr_train_1016_label_frame_22.png", "mr_train_1014_label_frame_120.png", "mr_train_1011_label_frame_159.png", "mr_train_1008_label_frame_69.png", "mr_train_1009_label_frame_73.png", "mr_train_1011_label_frame_67.png", "mr_train_1008_label_frame_71.png", "mr_train_1001_label_frame_80.png", "mr_train_1017_label_frame_107.png", "mr_train_1018_label_frame_56.png", "mr_train_1008_label_frame_98.png", "mr_train_1006_label_frame_45.png", "mr_train_1003_label_frame_141.png", "mr_train_1016_label_frame_44.png", "mr_train_1020_label_frame_15.png", "mr_train_1018_label_frame_100.png", "mr_train_1003_label_frame_31.png", "mr_train_1008_label_frame_72.png", "mr_train_1001_label_frame_157.png", "mr_train_1008_label_frame_107.png", "mr_train_1009_label_frame_17.png", "mr_train_1001_label_frame_19.png", "mr_train_1005_label_frame_10.png", "mr_train_1009_label_frame_88.png", "mr_train_1016_label_frame_86.png", "mr_train_1015_label_frame_190.png", "mr_train_1017_label_frame_108.png", "mr_train_1008_label_frame_124.png", "mr_train_1005_label_frame_1.png", "mr_train_1008_label_frame_37.png", "mr_train_1015_label_frame_178.png", "mr_train_1009_label_frame_46.png", "mr_train_1017_label_frame_17.png", "mr_train_1011_label_frame_28.png", "mr_train_1011_label_frame_42.png", "mr_train_1003_label_frame_140.png", "mr_train_1017_label_frame_120.png", "mr_train_1005_label_frame_126.png", "mr_train_1008_label_frame_118.png", "mr_train_1006_label_frame_0.png", "mr_train_1001_label_frame_123.png", "mr_train_1001_label_frame_17.png", "mr_train_1014_label_frame_138.png", "mr_train_1016_label_frame_80.png", "mr_train_1015_label_frame_166.png", "mr_train_1006_label_frame_34.png", "mr_train_1009_label_frame_61.png", "mr_train_1011_label_frame_30.png", "mr_train_1006_label_frame_54.png", "mr_train_1003_label_frame_62.png", "mr_train_1020_label_frame_80.png", "mr_train_1015_label_frame_174.png", "mr_train_1014_label_frame_13.png", "mr_train_1015_label_frame_30.png", "mr_train_1005_label_frame_16.png", "mr_train_1014_label_frame_34.png", "mr_train_1015_label_frame_80.png", "mr_train_1015_label_frame_191.png", "mr_train_1018_label_frame_112.png", "mr_train_1001_label_frame_96.png", "mr_train_1011_label_frame_19.png", "mr_train_1006_label_frame_15.png", "mr_train_1017_label_frame_127.png", "mr_train_1006_label_frame_26.png", "mr_train_1017_label_frame_114.png", "mr_train_1001_label_frame_82.png", "mr_train_1005_label_frame_116.png", "mr_train_1016_label_frame_89.png", "mr_train_1008_label_frame_66.png", "mr_train_1006_label_frame_30.png", "mr_train_1017_label_frame_2.png", "mr_train_1009_label_frame_22.png", "mr_train_1016_label_frame_57.png", "mr_train_1018_label_frame_85.png", "mr_train_1015_label_frame_51.png", "mr_train_1016_label_frame_58.png", "mr_train_1020_label_frame_104.png", "mr_train_1017_label_frame_133.png", "mr_train_1005_label_frame_67.png", "mr_train_1018_label_frame_130.png", "mr_train_1017_label_frame_18.png", "mr_train_1015_label_frame_20.png", "mr_train_1003_label_frame_66.png", "mr_train_1006_label_frame_79.png", "mr_train_1011_label_frame_56.png", "mr_train_1003_label_frame_95.png", "mr_train_1011_label_frame_157.png", "mr_train_1006_label_frame_99.png", "mr_train_1008_label_frame_100.png", "mr_train_1001_label_frame_47.png", "mr_train_1018_label_frame_25.png", "mr_train_1009_label_frame_119.png", "mr_train_1009_label_frame_20.png", "mr_train_1014_label_frame_30.png", "mr_train_1011_label_frame_39.png", "mr_train_1001_label_frame_25.png", "mr_train_1009_label_frame_38.png", "mr_train_1014_label_frame_89.png", "mr_train_1001_label_frame_124.png", "mr_train_1001_label_frame_57.png", "mr_train_1018_label_frame_126.png", "mr_train_1006_label_frame_63.png", "mr_train_1015_label_frame_144.png", "mr_train_1014_label_frame_158.png", "mr_train_1006_label_frame_74.png", "mr_train_1018_label_frame_149.png", "mr_train_1015_label_frame_115.png", "mr_train_1016_label_frame_74.png", "mr_train_1009_label_frame_114.png", "mr_train_1003_label_frame_90.png", "mr_train_1014_label_frame_97.png", "mr_train_1006_label_frame_130.png", "mr_train_1003_label_frame_79.png", "mr_train_1001_label_frame_159.png", "mr_train_1016_label_frame_94.png", "mr_train_1009_label_frame_44.png", "mr_train_1014_label_frame_123.png", "mr_train_1017_label_frame_87.png", "mr_train_1016_label_frame_38.png", "mr_train_1011_label_frame_141.png", "mr_train_1006_label_frame_138.png", "mr_train_1011_label_frame_74.png", "mr_train_1018_label_frame_122.png", "mr_train_1005_label_frame_47.png", "mr_train_1014_label_frame_110.png", "mr_train_1001_label_frame_88.png", "mr_train_1005_label_frame_112.png", "mr_train_1003_label_frame_142.png", "mr_train_1018_label_frame_1.png", "mr_train_1014_label_frame_78.png", "mr_train_1001_label_frame_132.png", "mr_train_1008_label_frame_90.png", "mr_train_1011_label_frame_126.png", "mr_train_1015_label_frame_138.png", "mr_train_1015_label_frame_175.png", "mr_train_1005_label_frame_53.png", "mr_train_1014_label_frame_18.png", "mr_train_1015_label_frame_75.png", "mr_train_1005_label_frame_32.png", "mr_train_1003_label_frame_52.png", "mr_train_1009_label_frame_60.png", "mr_train_1018_label_frame_134.png", "mr_train_1009_label_frame_39.png", "mr_train_1005_label_frame_42.png", "mr_train_1011_label_frame_122.png", "mr_train_1011_label_frame_15.png", "mr_train_1015_label_frame_185.png", "mr_train_1009_label_frame_91.png", "mr_train_1015_label_frame_59.png", "mr_train_1018_label_frame_83.png", "mr_train_1006_label_frame_55.png", "mr_train_1017_label_frame_71.png", "mr_train_1017_label_frame_74.png", "mr_train_1020_label_frame_14.png", "mr_train_1016_label_frame_125.png", "mr_train_1011_label_frame_31.png", "mr_train_1015_label_frame_64.png", "mr_train_1014_label_frame_5.png", "mr_train_1018_label_frame_41.png", "mr_train_1011_label_frame_79.png", "mr_train_1005_label_frame_82.png", "mr_train_1001_label_frame_73.png", "mr_train_1001_label_frame_60.png", "mr_train_1015_label_frame_113.png", "mr_train_1015_label_frame_88.png", "mr_train_1006_label_frame_140.png", "mr_train_1001_label_frame_131.png", "mr_train_1015_label_frame_98.png", "mr_train_1020_label_frame_36.png", "mr_train_1020_label_frame_63.png", "mr_train_1015_label_frame_9.png", "mr_train_1018_label_frame_42.png", "mr_train_1018_label_frame_40.png", "mr_train_1011_label_frame_78.png", "mr_train_1003_label_frame_78.png", "mr_train_1003_label_frame_102.png", "mr_train_1001_label_frame_28.png", "mr_train_1005_label_frame_85.png", "mr_train_1001_label_frame_2.png", "mr_train_1011_label_frame_50.png", "mr_train_1011_label_frame_88.png", "mr_train_1006_label_frame_149.png", "mr_train_1001_label_frame_35.png", "mr_train_1003_label_frame_72.png", "mr_train_1005_label_frame_92.png", "mr_train_1018_label_frame_147.png", "mr_train_1003_label_frame_57.png", "mr_train_1005_label_frame_54.png", "mr_train_1018_label_frame_76.png", "mr_train_1014_label_frame_144.png", "mr_train_1008_label_frame_24.png", "mr_train_1020_label_frame_134.png", "mr_train_1020_label_frame_45.png", "mr_train_1006_label_frame_25.png", "mr_train_1008_label_frame_102.png", "mr_train_1009_label_frame_28.png", "mr_train_1017_label_frame_6.png", "mr_train_1015_label_frame_60.png", "mr_train_1015_label_frame_87.png", "mr_train_1020_label_frame_111.png", "mr_train_1001_label_frame_111.png", "mr_train_1006_label_frame_18.png", "mr_train_1016_label_frame_55.png", "mr_train_1006_label_frame_108.png", "mr_train_1018_label_frame_39.png", "mr_train_1005_label_frame_100.png", "mr_train_1017_label_frame_36.png", "mr_train_1003_label_frame_157.png", "mr_train_1017_label_frame_135.png", "mr_train_1011_label_frame_91.png", "mr_train_1009_label_frame_98.png", "mr_train_1017_label_frame_13.png", "mr_train_1014_label_frame_112.png", "mr_train_1020_label_frame_10.png", "mr_train_1003_label_frame_33.png", "mr_train_1005_label_frame_97.png", "mr_train_1018_label_frame_22.png", "mr_train_1018_label_frame_34.png", "mr_train_1017_label_frame_111.png", "mr_train_1003_label_frame_50.png", "mr_train_1011_label_frame_0.png", "mr_train_1011_label_frame_107.png", "mr_train_1003_label_frame_144.png", "mr_train_1006_label_frame_48.png", "mr_train_1006_label_frame_11.png", "mr_train_1020_label_frame_68.png", "mr_train_1011_label_frame_44.png", "mr_train_1001_label_frame_90.png", "mr_train_1003_label_frame_85.png", "mr_train_1020_label_frame_17.png", "mr_train_1020_label_frame_115.png", "mr_train_1006_label_frame_109.png", "mr_train_1015_label_frame_4.png", "mr_train_1003_label_frame_16.png", "mr_train_1003_label_frame_9.png", "mr_train_1008_label_frame_7.png", "mr_train_1003_label_frame_61.png", "mr_train_1005_label_frame_89.png", "mr_train_1003_label_frame_108.png", "mr_train_1016_label_frame_50.png", "mr_train_1020_label_frame_43.png", "mr_train_1016_label_frame_61.png", "mr_train_1003_label_frame_109.png", "mr_train_1020_label_frame_53.png", "mr_train_1006_label_frame_116.png", "mr_train_1015_label_frame_95.png", "mr_train_1011_label_frame_68.png", "mr_train_1005_label_frame_105.png", "mr_train_1003_label_frame_64.png", "mr_train_1001_label_frame_127.png", "mr_train_1015_label_frame_153.png", "mr_train_1014_label_frame_113.png", "mr_train_1011_label_frame_85.png", "mr_train_1005_label_frame_60.png", "mr_train_1001_label_frame_154.png", "mr_train_1011_label_frame_82.png", "mr_train_1005_label_frame_2.png", "mr_train_1001_label_frame_135.png", "mr_train_1016_label_frame_82.png", "mr_train_1016_label_frame_56.png", "mr_train_1011_label_frame_83.png", "mr_train_1005_label_frame_125.png", "mr_train_1006_label_frame_69.png", "mr_train_1016_label_frame_69.png", "mr_train_1003_label_frame_38.png", "mr_train_1008_label_frame_27.png", "mr_train_1005_label_frame_34.png", "mr_train_1015_label_frame_0.png", "mr_train_1017_label_frame_39.png", "mr_train_1017_label_frame_137.png", "mr_train_1008_label_frame_23.png", "mr_train_1018_label_frame_80.png", "mr_train_1005_label_frame_103.png", "mr_train_1014_label_frame_135.png", "mr_train_1014_label_frame_16.png", "mr_train_1017_label_frame_115.png", "mr_train_1016_label_frame_46.png", "mr_train_1015_label_frame_43.png", "mr_train_1008_label_frame_32.png", "mr_train_1003_label_frame_158.png", "mr_train_1001_label_frame_93.png", "mr_train_1001_label_frame_26.png", "mr_train_1017_label_frame_96.png", "mr_train_1011_label_frame_55.png", "mr_train_1003_label_frame_15.png", "mr_train_1014_label_frame_127.png", "mr_train_1018_label_frame_140.png", "mr_train_1014_label_frame_57.png", "mr_train_1011_label_frame_158.png", "mr_train_1015_label_frame_107.png", "mr_train_1020_label_frame_106.png", "mr_train_1001_label_frame_149.png", "mr_train_1009_label_frame_104.png", "mr_train_1006_label_frame_10.png", "mr_train_1017_label_frame_63.png", "mr_train_1018_label_frame_9.png", "mr_train_1018_label_frame_58.png", "mr_train_1017_label_frame_7.png", "mr_train_1015_label_frame_83.png", "mr_train_1015_label_frame_12.png", "mr_train_1003_label_frame_111.png", "mr_train_1008_label_frame_61.png", "mr_train_1003_label_frame_25.png", "mr_train_1015_label_frame_129.png", "mr_train_1017_label_frame_40.png", "mr_train_1014_label_frame_70.png", "mr_train_1016_label_frame_42.png", "mr_train_1006_label_frame_72.png", "mr_train_1017_label_frame_41.png", "mr_train_1005_label_frame_59.png", "mr_train_1006_label_frame_85.png", "mr_train_1018_label_frame_117.png", "mr_train_1016_label_frame_49.png", "mr_train_1014_label_frame_156.png", "mr_train_1003_label_frame_103.png", "mr_train_1017_label_frame_15.png", "mr_train_1008_label_frame_70.png", "mr_train_1006_label_frame_58.png", "mr_train_1003_label_frame_100.png", "mr_train_1015_label_frame_162.png", "mr_train_1005_label_frame_8.png", "mr_train_1020_label_frame_94.png", "mr_train_1001_label_frame_84.png", "mr_train_1015_label_frame_17.png", "mr_train_1020_label_frame_8.png", "mr_train_1001_label_frame_112.png", "mr_train_1020_label_frame_89.png", "mr_train_1003_label_frame_3.png", "mr_train_1008_label_frame_0.png", "mr_train_1009_label_frame_3.png", "mr_train_1014_label_frame_27.png", "mr_train_1003_label_frame_87.png", "mr_train_1009_label_frame_59.png", "mr_train_1008_label_frame_30.png", "mr_train_1003_label_frame_127.png", "mr_train_1014_label_frame_50.png", "mr_train_1011_label_frame_72.png", "mr_train_1005_label_frame_122.png", "mr_train_1016_label_frame_12.png", "mr_train_1017_label_frame_46.png", "mr_train_1016_label_frame_14.png", "mr_train_1015_label_frame_55.png", "mr_train_1020_label_frame_44.png", "mr_train_1001_label_frame_122.png", "mr_train_1005_label_frame_114.png", "mr_train_1015_label_frame_137.png", "mr_train_1014_label_frame_15.png", "mr_train_1011_label_frame_99.png", "mr_train_1006_label_frame_127.png", "mr_train_1018_label_frame_125.png", "mr_train_1020_label_frame_105.png", "mr_train_1011_label_frame_16.png", "mr_train_1015_label_frame_193.png", "mr_train_1011_label_frame_48.png", "mr_train_1015_label_frame_125.png", "mr_train_1015_label_frame_143.png", "mr_train_1011_label_frame_105.png", "mr_train_1003_label_frame_106.png", "mr_train_1018_label_frame_63.png", "mr_train_1001_label_frame_70.png", "mr_train_1016_label_frame_62.png", "mr_train_1014_label_frame_73.png", "mr_train_1014_label_frame_38.png", "mr_train_1003_label_frame_82.png", "mr_train_1016_label_frame_124.png", "mr_train_1006_label_frame_132.png", "mr_train_1006_label_frame_4.png", "mr_train_1015_label_frame_197.png", "mr_train_1003_label_frame_88.png", "mr_train_1001_label_frame_46.png", "mr_train_1016_label_frame_15.png", "mr_train_1006_label_frame_8.png", "mr_train_1015_label_frame_189.png", "mr_train_1009_label_frame_71.png", "mr_train_1016_label_frame_18.png", "mr_train_1008_label_frame_67.png", "mr_train_1005_label_frame_33.png", "mr_train_1001_label_frame_76.png", "mr_train_1011_label_frame_37.png", "mr_train_1006_label_frame_14.png", "mr_train_1006_label_frame_93.png", "mr_train_1014_label_frame_95.png", "mr_train_1016_label_frame_70.png", "mr_train_1006_label_frame_159.png", "mr_train_1015_label_frame_183.png", "mr_train_1005_label_frame_124.png", "mr_train_1003_label_frame_121.png", "mr_train_1015_label_frame_49.png", "mr_train_1009_label_frame_49.png", "mr_train_1017_label_frame_72.png", "mr_train_1017_label_frame_21.png", "mr_train_1001_label_frame_116.png", "mr_train_1017_label_frame_93.png", "mr_train_1009_label_frame_87.png", "mr_train_1008_label_frame_28.png", "mr_train_1006_label_frame_62.png", "mr_train_1011_label_frame_2.png", "mr_train_1006_label_frame_146.png", "mr_train_1009_label_frame_54.png", "mr_train_1017_label_frame_89.png", "mr_train_1005_label_frame_101.png", "mr_train_1011_label_frame_64.png", "mr_train_1011_label_frame_41.png", "mr_train_1016_label_frame_53.png", "mr_train_1020_label_frame_126.png", "mr_train_1015_label_frame_192.png", "mr_train_1011_label_frame_36.png", "mr_train_1005_label_frame_71.png", "mr_train_1016_label_frame_97.png", "mr_train_1018_label_frame_33.png", "mr_train_1020_label_frame_42.png", "mr_train_1005_label_frame_46.png", "mr_train_1014_label_frame_14.png", "mr_train_1008_label_frame_64.png", "mr_train_1006_label_frame_98.png", "mr_train_1008_label_frame_65.png", "mr_train_1015_label_frame_182.png", "mr_train_1011_label_frame_84.png", "mr_train_1016_label_frame_45.png", "mr_train_1014_label_frame_41.png", "mr_train_1017_label_frame_23.png", "mr_train_1017_label_frame_80.png", "mr_train_1017_label_frame_77.png", "mr_train_1020_label_frame_127.png", "mr_train_1006_label_frame_107.png", "mr_train_1011_label_frame_26.png", "mr_train_1001_label_frame_83.png", "mr_train_1005_label_frame_88.png", "mr_train_1015_label_frame_163.png", "mr_train_1014_label_frame_74.png", "mr_train_1006_label_frame_36.png", "mr_train_1015_label_frame_140.png", "mr_train_1014_label_frame_80.png", "mr_train_1003_label_frame_101.png", "mr_train_1014_label_frame_126.png", "mr_train_1020_label_frame_92.png", "mr_train_1009_label_frame_16.png", "mr_train_1001_label_frame_38.png", "mr_train_1009_label_frame_84.png", "mr_train_1008_label_frame_62.png", "mr_train_1016_label_frame_16.png", "mr_train_1015_label_frame_35.png", "mr_train_1005_label_frame_61.png", "mr_train_1015_label_frame_186.png", "mr_train_1020_label_frame_102.png", "mr_train_1014_label_frame_58.png", "mr_train_1009_label_frame_64.png", "mr_train_1018_label_frame_23.png", "mr_train_1001_label_frame_13.png", "mr_train_1009_label_frame_19.png", "mr_train_1015_label_frame_96.png", "mr_train_1009_label_frame_36.png", "mr_train_1014_label_frame_35.png", "mr_train_1005_label_frame_119.png", "mr_train_1017_label_frame_57.png", "mr_train_1008_label_frame_17.png", "mr_train_1020_label_frame_5.png", "mr_train_1005_label_frame_9.png", "mr_train_1018_label_frame_74.png", "mr_train_1009_label_frame_1.png", "mr_train_1014_label_frame_33.png", "mr_train_1001_label_frame_106.png", "mr_train_1005_label_frame_111.png", "mr_train_1008_label_frame_109.png", "mr_train_1009_label_frame_2.png", "mr_train_1017_label_frame_91.png", "mr_train_1003_label_frame_8.png", "mr_train_1020_label_frame_118.png", "mr_train_1008_label_frame_99.png", "mr_train_1005_label_frame_70.png", "mr_train_1006_label_frame_156.png", "mr_train_1003_label_frame_143.png", "mr_train_1011_label_frame_52.png", "mr_train_1008_label_frame_129.png", "mr_train_1005_label_frame_5.png", "mr_train_1008_label_frame_33.png", "mr_train_1014_label_frame_145.png", "mr_train_1003_label_frame_120.png", "mr_train_1015_label_frame_126.png", "mr_train_1003_label_frame_56.png", "mr_train_1001_label_frame_78.png", "mr_train_1003_label_frame_149.png", "mr_train_1005_label_frame_95.png", "mr_train_1015_label_frame_135.png", "mr_train_1009_label_frame_74.png", "mr_train_1001_label_frame_52.png", "mr_train_1014_label_frame_64.png", "mr_train_1011_label_frame_14.png", "mr_train_1020_label_frame_128.png", "mr_train_1003_label_frame_110.png", "mr_train_1006_label_frame_68.png", "mr_train_1006_label_frame_121.png", "mr_train_1015_label_frame_118.png", "mr_train_1009_label_frame_95.png", "mr_train_1020_label_frame_67.png", "mr_train_1017_label_frame_138.png", "mr_train_1006_label_frame_126.png", "mr_train_1003_label_frame_47.png", "mr_train_1001_label_frame_100.png", "mr_train_1003_label_frame_84.png", "mr_train_1018_label_frame_15.png", "mr_train_1006_label_frame_32.png", "mr_train_1014_label_frame_152.png", "mr_train_1001_label_frame_85.png", "mr_train_1008_label_frame_4.png", "mr_train_1016_label_frame_106.png", "mr_train_1009_label_frame_53.png", "mr_train_1017_label_frame_12.png", "mr_train_1005_label_frame_68.png", "mr_train_1005_label_frame_43.png", "mr_train_1005_label_frame_29.png", "mr_train_1005_label_frame_94.png", "mr_train_1014_label_frame_39.png", "mr_train_1020_label_frame_122.png", "mr_train_1005_label_frame_79.png", "mr_train_1011_label_frame_77.png", "mr_train_1008_label_frame_125.png", "mr_train_1005_label_frame_49.png", "mr_train_1009_label_frame_65.png", "mr_train_1017_label_frame_70.png", "mr_train_1020_label_frame_30.png", "mr_train_1016_label_frame_21.png", "mr_train_1003_label_frame_125.png", "mr_train_1011_label_frame_60.png", "mr_train_1015_label_frame_50.png", "mr_train_1001_label_frame_156.png", "mr_train_1016_label_frame_76.png", "mr_train_1015_label_frame_149.png", "mr_train_1005_label_frame_91.png", "mr_train_1015_label_frame_104.png", "mr_train_1011_label_frame_20.png", "mr_train_1018_label_frame_118.png", "mr_train_1005_label_frame_3.png", "mr_train_1017_label_frame_11.png", "mr_train_1020_label_frame_112.png", "mr_train_1018_label_frame_11.png", "mr_train_1001_label_frame_34.png", "mr_train_1014_label_frame_60.png", "mr_train_1001_label_frame_18.png", "mr_train_1015_label_frame_160.png", "mr_train_1003_label_frame_74.png", "mr_train_1011_label_frame_89.png", "mr_train_1015_label_frame_131.png", "mr_train_1014_label_frame_1.png", "mr_train_1014_label_frame_136.png", "mr_train_1009_label_frame_57.png", "mr_train_1015_label_frame_123.png", "mr_train_1020_label_frame_114.png", "mr_train_1001_label_frame_142.png", "mr_train_1011_label_frame_80.png", "mr_train_1006_label_frame_16.png", "mr_train_1003_label_frame_92.png", "mr_train_1003_label_frame_2.png", "mr_train_1014_label_frame_66.png", "mr_train_1018_label_frame_71.png", "mr_train_1008_label_frame_48.png", "mr_train_1020_label_frame_50.png", "mr_train_1015_label_frame_57.png", "mr_train_1009_label_frame_26.png", "mr_train_1017_label_frame_81.png", "mr_train_1003_label_frame_126.png", "mr_train_1011_label_frame_145.png", "mr_train_1005_label_frame_65.png", "mr_train_1005_label_frame_48.png", "mr_train_1020_label_frame_2.png", "mr_train_1003_label_frame_80.png", "mr_train_1008_label_frame_83.png", "mr_train_1005_label_frame_63.png", "mr_train_1018_label_frame_128.png", "mr_train_1011_label_frame_8.png", "mr_train_1016_label_frame_4.png", "mr_train_1020_label_frame_97.png", "mr_train_1009_label_frame_82.png", "mr_train_1009_label_frame_5.png", "mr_train_1011_label_frame_120.png", "mr_train_1009_label_frame_116.png", "mr_train_1015_label_frame_56.png", "mr_train_1003_label_frame_97.png", "mr_train_1011_label_frame_93.png", "mr_train_1020_label_frame_69.png", "mr_train_1015_label_frame_194.png", "mr_train_1001_label_frame_54.png", "mr_train_1014_label_frame_150.png", "mr_train_1008_label_frame_80.png", "mr_train_1005_label_frame_58.png", "mr_train_1008_label_frame_79.png", "mr_train_1020_label_frame_99.png", "mr_train_1006_label_frame_111.png", "mr_train_1001_label_frame_77.png", "mr_train_1014_label_frame_12.png", "mr_train_1014_label_frame_146.png", "mr_train_1017_label_frame_125.png", "mr_train_1014_label_frame_91.png", "mr_train_1015_label_frame_150.png", "mr_train_1020_label_frame_100.png", "mr_train_1001_label_frame_5.png", "mr_train_1006_label_frame_148.png", "mr_train_1014_label_frame_46.png", "mr_train_1006_label_frame_102.png", "mr_train_1005_label_frame_87.png", "mr_train_1009_label_frame_108.png", "mr_train_1016_label_frame_17.png", "mr_train_1018_label_frame_30.png", "mr_train_1014_label_frame_105.png", "mr_train_1003_label_frame_41.png", "mr_train_1008_label_frame_46.png", "mr_train_1015_label_frame_36.png", "mr_train_1006_label_frame_5.png", "mr_train_1011_label_frame_117.png", "mr_train_1011_label_frame_63.png", "mr_train_1014_label_frame_67.png", "mr_train_1001_label_frame_20.png", "mr_train_1018_label_frame_26.png", "mr_train_1005_label_frame_12.png", "mr_train_1015_label_frame_173.png", "mr_train_1014_label_frame_115.png", "mr_train_1006_label_frame_154.png", "mr_train_1005_label_frame_110.png", "mr_train_1014_label_frame_8.png", "mr_train_1015_label_frame_91.png", "mr_train_1001_label_frame_95.png", "mr_train_1016_label_frame_128.png", "mr_train_1015_label_frame_177.png", "mr_train_1005_label_frame_50.png", "mr_train_1017_label_frame_24.png", "mr_train_1020_label_frame_59.png", "mr_train_1014_label_frame_118.png", "mr_train_1018_label_frame_102.png", "mr_train_1011_label_frame_135.png", "mr_train_1005_label_frame_19.png", "mr_train_1006_label_frame_31.png", "mr_train_1018_label_frame_116.png", "mr_train_1006_label_frame_125.png", "mr_train_1014_label_frame_94.png", "mr_train_1011_label_frame_69.png", "mr_train_1020_label_frame_47.png", "mr_train_1017_label_frame_19.png", "mr_train_1014_label_frame_32.png", "mr_train_1001_label_frame_79.png", "mr_train_1017_label_frame_51.png", "mr_train_1018_label_frame_8.png", "mr_train_1001_label_frame_39.png", "mr_train_1014_label_frame_88.png", "mr_train_1011_label_frame_46.png", "mr_train_1001_label_frame_146.png", "mr_train_1006_label_frame_77.png", "mr_train_1001_label_frame_56.png", "mr_train_1017_label_frame_29.png", "mr_train_1018_label_frame_20.png", "mr_train_1009_label_frame_106.png", "mr_train_1016_label_frame_75.png", "mr_train_1014_label_frame_84.png", "mr_train_1011_label_frame_97.png", "mr_train_1001_label_frame_145.png", "mr_train_1015_label_frame_172.png", "mr_train_1016_label_frame_34.png", "mr_train_1001_label_frame_110.png", "mr_train_1011_label_frame_70.png", "mr_train_1020_label_frame_57.png", "mr_train_1011_label_frame_10.png", "mr_train_1015_label_frame_184.png", "mr_train_1014_label_frame_22.png", "mr_train_1020_label_frame_27.png", "mr_train_1006_label_frame_83.png", "mr_train_1001_label_frame_65.png", "mr_train_1008_label_frame_19.png", "mr_train_1016_label_frame_95.png", "mr_train_1016_label_frame_30.png", "mr_train_1006_label_frame_75.png", "mr_train_1009_label_frame_10.png", "mr_train_1001_label_frame_67.png", "mr_train_1015_label_frame_44.png", "mr_train_1018_label_frame_48.png", "mr_train_1009_label_frame_66.png", "mr_train_1017_label_frame_139.png", "mr_train_1003_label_frame_132.png", "mr_train_1006_label_frame_158.png", "mr_train_1006_label_frame_119.png", "mr_train_1018_label_frame_132.png", "mr_train_1001_label_frame_10.png", "mr_train_1008_label_frame_60.png", "mr_train_1015_label_frame_139.png", "mr_train_1018_label_frame_64.png", "mr_train_1015_label_frame_18.png", "mr_train_1005_label_frame_27.png", "mr_train_1018_label_frame_12.png", "mr_train_1001_label_frame_61.png", "mr_train_1003_label_frame_99.png", "mr_train_1020_label_frame_125.png", "mr_train_1008_label_frame_112.png", "mr_train_1017_label_frame_84.png", "mr_train_1001_label_frame_150.png", "mr_train_1020_label_frame_13.png", "mr_train_1016_label_frame_127.png", "mr_train_1003_label_frame_115.png", "mr_train_1017_label_frame_35.png", "mr_train_1008_label_frame_63.png", "mr_train_1001_label_frame_103.png", "mr_train_1011_label_frame_125.png", "mr_train_1020_label_frame_23.png", "mr_train_1008_label_frame_22.png", "mr_train_1017_label_frame_131.png", "mr_train_1015_label_frame_22.png", "mr_train_1006_label_frame_143.png", "mr_train_1018_label_frame_87.png", "mr_train_1006_label_frame_61.png", "mr_train_1020_label_frame_1.png", "mr_train_1008_label_frame_119.png", "mr_train_1011_label_frame_61.png", "mr_train_1020_label_frame_129.png", "mr_train_1017_label_frame_50.png", "mr_train_1008_label_frame_103.png", "mr_train_1020_label_frame_55.png", "mr_train_1001_label_frame_108.png", "mr_train_1016_label_frame_96.png", "mr_train_1020_label_frame_25.png", "mr_train_1009_label_frame_55.png", "mr_train_1001_label_frame_27.png", "mr_train_1011_label_frame_86.png", "mr_train_1005_label_frame_52.png", "mr_train_1008_label_frame_96.png", "mr_train_1014_label_frame_103.png", "mr_train_1003_label_frame_11.png", "mr_train_1017_label_frame_48.png", "mr_train_1016_label_frame_35.png", "mr_train_1006_label_frame_97.png", "mr_train_1009_label_frame_69.png", "mr_train_1017_label_frame_58.png", "mr_train_1017_label_frame_88.png", "mr_train_1008_label_frame_114.png", "mr_train_1008_label_frame_108.png", "mr_train_1018_label_frame_66.png", "mr_train_1011_label_frame_108.png", "mr_train_1005_label_frame_66.png", "mr_train_1020_label_frame_77.png", "mr_train_1015_label_frame_105.png", "mr_train_1011_label_frame_118.png", "mr_train_1009_label_frame_0.png", "mr_train_1016_label_frame_66.png", "mr_train_1005_label_frame_35.png", "mr_train_1018_label_frame_35.png", "mr_train_1014_label_frame_52.png", "mr_train_1011_label_frame_49.png", "mr_train_1017_label_frame_26.png", "mr_train_1003_label_frame_0.png", "mr_train_1011_label_frame_140.png", "mr_train_1003_label_frame_116.png", "mr_train_1018_label_frame_49.png", "mr_train_1006_label_frame_103.png", "mr_train_1001_label_frame_136.png", "mr_train_1008_label_frame_77.png", "mr_train_1008_label_frame_93.png", "mr_train_1001_label_frame_107.png", "mr_train_1009_label_frame_78.png", "mr_train_1005_label_frame_113.png", "mr_train_1014_label_frame_133.png", "mr_train_1003_label_frame_59.png", "mr_train_1001_label_frame_48.png", "mr_train_1006_label_frame_40.png", "mr_train_1006_label_frame_87.png", "mr_train_1017_label_frame_61.png", "mr_train_1020_label_frame_12.png", "mr_train_1018_label_frame_43.png", "mr_train_1015_label_frame_74.png", "mr_train_1017_label_frame_113.png", "mr_train_1011_label_frame_152.png", "mr_train_1018_label_frame_18.png", "mr_train_1006_label_frame_104.png", "mr_train_1009_label_frame_63.png", "mr_train_1011_label_frame_151.png", "mr_train_1005_label_frame_57.png", "mr_train_1003_label_frame_37.png", "mr_train_1008_label_frame_41.png", "mr_train_1020_label_frame_66.png", "mr_train_1001_label_frame_40.png", "mr_train_1018_label_frame_120.png", "mr_train_1006_label_frame_157.png", "mr_train_1016_label_frame_51.png", "mr_train_1006_label_frame_110.png", "mr_train_1020_label_frame_0.png", "mr_train_1016_label_frame_87.png", "mr_train_1016_label_frame_48.png", "mr_train_1015_label_frame_120.png", "mr_train_1009_label_frame_15.png", "mr_train_1005_label_frame_17.png", "mr_train_1001_label_frame_32.png", "mr_train_1015_label_frame_26.png", "mr_train_1020_label_frame_109.png", "mr_train_1018_label_frame_123.png", "mr_train_1015_label_frame_114.png", "mr_train_1009_label_frame_93.png", "mr_train_1020_label_frame_83.png", "mr_train_1001_label_frame_63.png", "mr_train_1015_label_frame_86.png", "mr_train_1015_label_frame_40.png", "mr_train_1001_label_frame_97.png", "mr_train_1008_label_frame_94.png", "mr_train_1015_label_frame_28.png", "mr_train_1006_label_frame_133.png", "mr_train_1017_label_frame_119.png", "mr_train_1001_label_frame_29.png", "mr_train_1015_label_frame_187.png", "mr_train_1018_label_frame_37.png", "mr_train_1009_label_frame_40.png", "mr_train_1014_label_frame_29.png", "mr_train_1018_label_frame_95.png", "mr_train_1017_label_frame_128.png", "mr_train_1001_label_frame_143.png", "mr_train_1017_label_frame_34.png", "mr_train_1020_label_frame_101.png", "mr_train_1008_label_frame_49.png", "mr_train_1014_label_frame_79.png", "mr_train_1015_label_frame_170.png", "mr_train_1015_label_frame_33.png", "mr_train_1017_label_frame_38.png", "mr_train_1014_label_frame_31.png", "mr_train_1020_label_frame_6.png", "mr_train_1017_label_frame_1.png", "mr_train_1011_label_frame_21.png", "mr_train_1015_label_frame_157.png", "mr_train_1003_label_frame_45.png", "mr_train_1006_label_frame_51.png", "mr_train_1006_label_frame_153.png", "mr_train_1018_label_frame_101.png", "mr_train_1016_label_frame_102.png", "mr_train_1015_label_frame_168.png", "mr_train_1017_label_frame_32.png", "mr_train_1017_label_frame_85.png", "mr_train_1001_label_frame_59.png", "mr_train_1008_label_frame_31.png", "mr_train_1009_label_frame_94.png", "mr_train_1017_label_frame_78.png", "mr_train_1015_label_frame_65.png", "mr_train_1014_label_frame_62.png", "mr_train_1014_label_frame_153.png", "mr_train_1020_label_frame_72.png", "mr_train_1001_label_frame_133.png", "mr_train_1016_label_frame_73.png", "mr_train_1016_label_frame_90.png", "mr_train_1008_label_frame_53.png", "mr_train_1017_label_frame_5.png", "mr_train_1017_label_frame_47.png", "mr_train_1001_label_frame_104.png", "mr_train_1001_label_frame_12.png", "mr_train_1020_label_frame_61.png", "mr_train_1016_label_frame_111.png", "mr_train_1003_label_frame_93.png", "mr_train_1014_label_frame_54.png", "mr_train_1009_label_frame_32.png", "mr_train_1011_label_frame_130.png", "mr_train_1005_label_frame_44.png", "mr_train_1009_label_frame_42.png", "mr_train_1017_label_frame_82.png", "mr_train_1014_label_frame_87.png", "mr_train_1008_label_frame_12.png", "mr_train_1018_label_frame_45.png", "mr_train_1016_label_frame_23.png", "mr_train_1001_label_frame_1.png", "mr_train_1003_label_frame_28.png", "mr_train_1017_label_frame_67.png", "mr_train_1018_label_frame_93.png", "mr_train_1005_label_frame_64.png", "mr_train_1008_label_frame_113.png", "mr_train_1016_label_frame_122.png", "mr_train_1011_label_frame_110.png", "mr_train_1016_label_frame_112.png", "mr_train_1001_label_frame_118.png", "mr_train_1008_label_frame_97.png", "mr_train_1008_label_frame_84.png", "mr_train_1011_label_frame_146.png", "mr_train_1003_label_frame_147.png", "mr_train_1008_label_frame_74.png", "mr_train_1015_label_frame_97.png", "mr_train_1016_label_frame_26.png", "mr_train_1009_label_frame_12.png", "mr_train_1020_label_frame_73.png", "mr_train_1015_label_frame_5.png", "mr_train_1003_label_frame_48.png", "mr_train_1014_label_frame_44.png", "mr_train_1009_label_frame_89.png", "mr_train_1015_label_frame_199.png", "mr_train_1016_label_frame_123.png", "mr_train_1015_label_frame_93.png", "mr_train_1015_label_frame_145.png", "mr_train_1015_label_frame_165.png", "mr_train_1006_label_frame_151.png", "mr_train_1015_label_frame_122.png", "mr_train_1005_label_frame_73.png", "mr_train_1006_label_frame_59.png", "mr_train_1005_label_frame_51.png", "mr_train_1003_label_frame_55.png", "mr_train_1009_label_frame_23.png", "mr_train_1011_label_frame_17.png", "mr_train_1011_label_frame_142.png", "mr_train_1015_label_frame_85.png", "mr_train_1018_label_frame_103.png", "mr_train_1016_label_frame_116.png", "mr_train_1009_label_frame_99.png", "mr_train_1015_label_frame_180.png", "mr_train_1011_label_frame_148.png", "mr_train_1003_label_frame_13.png", "mr_train_1008_label_frame_35.png", "mr_train_1011_label_frame_23.png", "mr_train_1020_label_frame_51.png", "mr_train_1015_label_frame_158.png", "mr_train_1005_label_frame_37.png", "mr_train_1008_label_frame_40.png", "mr_train_1016_label_frame_11.png", "mr_train_1003_label_frame_91.png", "mr_train_1018_label_frame_144.png", "mr_train_1016_label_frame_59.png", "mr_train_1003_label_frame_128.png", "mr_train_1018_label_frame_65.png", "mr_train_1003_label_frame_53.png", "mr_train_1011_label_frame_96.png", "mr_train_1006_label_frame_57.png", "mr_train_1006_label_frame_13.png", "mr_train_1011_label_frame_112.png", "mr_train_1018_label_frame_124.png", "mr_train_1005_label_frame_83.png", "mr_train_1005_label_frame_6.png", "mr_train_1015_label_frame_31.png", "mr_train_1020_label_frame_26.png", "mr_train_1018_label_frame_113.png", "mr_train_1006_label_frame_37.png", "mr_train_1016_label_frame_36.png", "mr_train_1001_label_frame_72.png", "mr_train_1016_label_frame_24.png", "mr_train_1020_label_frame_108.png", "mr_train_1017_label_frame_129.png", "mr_train_1018_label_frame_78.png", "mr_train_1003_label_frame_27.png", "mr_train_1014_label_frame_75.png", "mr_train_1011_label_frame_57.png", "mr_train_1005_label_frame_107.png", "mr_train_1003_label_frame_156.png", "mr_train_1017_label_frame_59.png", "mr_train_1018_label_frame_97.png", "mr_train_1003_label_frame_117.png", "mr_train_1003_label_frame_46.png", "mr_train_1005_label_frame_76.png", "mr_train_1001_label_frame_98.png", "mr_train_1006_label_frame_44.png", "mr_train_1017_label_frame_103.png", "mr_train_1005_label_frame_84.png", "mr_train_1017_label_frame_75.png", "mr_train_1020_label_frame_16.png", "mr_train_1017_label_frame_20.png", "mr_train_1017_label_frame_33.png", "mr_train_1016_label_frame_99.png", "mr_train_1003_label_frame_23.png", "mr_train_1017_label_frame_16.png", "mr_train_1001_label_frame_148.png", "mr_train_1018_label_frame_60.png", "mr_train_1006_label_frame_86.png", "mr_train_1014_label_frame_48.png", "mr_train_1015_label_frame_19.png", "mr_train_1017_label_frame_102.png", "mr_train_1009_label_frame_41.png", "mr_train_1003_label_frame_10.png", "mr_train_1016_label_frame_43.png", "mr_train_1018_label_frame_79.png", "mr_train_1015_label_frame_58.png", "mr_train_1017_label_frame_97.png", "mr_train_1008_label_frame_6.png", "mr_train_1006_label_frame_117.png", "mr_train_1009_label_frame_101.png", "mr_train_1003_label_frame_89.png", "mr_train_1014_label_frame_69.png", "mr_train_1015_label_frame_156.png", "mr_train_1001_label_frame_101.png", "mr_train_1015_label_frame_136.png", "mr_train_1020_label_frame_117.png", "mr_train_1003_label_frame_146.png", "mr_train_1017_label_frame_117.png", "mr_train_1017_label_frame_49.png", "mr_train_1017_label_frame_124.png", "mr_train_1014_label_frame_3.png", "mr_train_1020_label_frame_46.png", "mr_train_1015_label_frame_78.png", "mr_train_1015_label_frame_45.png", "mr_train_1014_label_frame_77.png", "mr_train_1011_label_frame_51.png", "mr_train_1016_label_frame_2.png", "mr_train_1006_label_frame_152.png", "mr_train_1011_label_frame_5.png", "mr_train_1020_label_frame_131.png", "mr_train_1015_label_frame_42.png", "mr_train_1017_label_frame_52.png", "mr_train_1015_label_frame_109.png", "mr_train_1005_label_frame_7.png", "mr_train_1001_label_frame_0.png", "mr_train_1014_label_frame_101.png", "mr_train_1020_label_frame_52.png", "mr_train_1006_label_frame_76.png", "mr_train_1015_label_frame_67.png", "mr_train_1008_label_frame_110.png", "mr_train_1018_label_frame_145.png", "mr_train_1018_label_frame_27.png", "mr_train_1005_label_frame_39.png", "mr_train_1015_label_frame_116.png", "mr_train_1001_label_frame_141.png", "mr_train_1001_label_frame_6.png", "mr_train_1017_label_frame_44.png", "mr_train_1006_label_frame_7.png", "mr_train_1001_label_frame_31.png", "mr_train_1020_label_frame_98.png", "mr_train_1018_label_frame_129.png", "mr_train_1003_label_frame_107.png", "mr_train_1017_label_frame_130.png", "mr_train_1015_label_frame_70.png", "mr_train_1018_label_frame_143.png", "mr_train_1009_label_frame_111.png", "mr_train_1006_label_frame_113.png", "mr_train_1006_label_frame_3.png", "mr_train_1003_label_frame_73.png", "mr_train_1006_label_frame_88.png", "mr_train_1014_label_frame_55.png", "mr_train_1020_label_frame_37.png", "mr_train_1001_label_frame_3.png", "mr_train_1017_label_frame_99.png", "mr_train_1018_label_frame_29.png", "mr_train_1006_label_frame_131.png", "mr_train_1014_label_frame_4.png", "mr_train_1001_label_frame_158.png", "mr_train_1011_label_frame_129.png", "mr_train_1006_label_frame_27.png", "mr_train_1008_label_frame_128.png", "mr_train_1017_label_frame_136.png", "mr_train_1005_label_frame_93.png", "mr_train_1006_label_frame_22.png", "mr_train_1003_label_frame_69.png", "mr_train_1017_label_frame_62.png", "mr_train_1015_label_frame_146.png", "mr_train_1005_label_frame_13.png", "mr_train_1011_label_frame_73.png", "mr_train_1020_label_frame_35.png", "mr_train_1008_label_frame_58.png", "mr_train_1016_label_frame_71.png", "mr_train_1009_label_frame_72.png", "mr_train_1017_label_frame_53.png", "mr_train_1014_label_frame_10.png", "mr_train_1001_label_frame_37.png", "mr_train_1003_label_frame_49.png", "mr_train_1003_label_frame_51.png", "mr_train_1006_label_frame_81.png", "mr_train_1008_label_frame_45.png", "mr_train_1005_label_frame_86.png", "mr_train_1016_label_frame_31.png", "mr_train_1020_label_frame_103.png", "mr_train_1015_label_frame_34.png", "mr_train_1014_label_frame_0.png", "mr_train_1005_label_frame_109.png", "mr_train_1003_label_frame_40.png", "mr_train_1017_label_frame_10.png", "mr_train_1009_label_frame_13.png", "mr_train_1016_label_frame_110.png", "mr_train_1008_label_frame_126.png", "mr_train_1020_label_frame_96.png", "mr_train_1018_label_frame_7.png", "mr_train_1011_label_frame_34.png", "mr_train_1001_label_frame_11.png", "mr_train_1001_label_frame_64.png", "mr_train_1016_label_frame_107.png", "mr_train_1001_label_frame_55.png", "mr_train_1014_label_frame_7.png", "mr_train_1018_label_frame_139.png", "mr_train_1011_label_frame_111.png", "mr_train_1015_label_frame_8.png", "mr_train_1005_label_frame_120.png", "mr_train_1006_label_frame_142.png", "mr_train_1011_label_frame_137.png", "mr_train_1009_label_frame_52.png", "mr_train_1008_label_frame_89.png", "mr_train_1018_label_frame_5.png", "mr_train_1009_label_frame_79.png", "mr_train_1014_label_frame_155.png", "mr_train_1011_label_frame_106.png", "mr_train_1008_label_frame_56.png", "mr_train_1008_label_frame_16.png", "mr_train_1009_label_frame_35.png", "mr_train_1017_label_frame_126.png", "mr_train_1017_label_frame_30.png", "mr_train_1014_label_frame_140.png", "mr_train_1011_label_frame_114.png", "mr_train_1006_label_frame_46.png", "mr_train_1003_label_frame_5.png", "mr_train_1011_label_frame_33.png", "mr_train_1016_label_frame_3.png", "mr_train_1003_label_frame_159.png", "mr_train_1015_label_frame_100.png", "mr_train_1006_label_frame_43.png", "mr_train_1011_label_frame_132.png", "mr_train_1006_label_frame_52.png", "mr_train_1003_label_frame_77.png", "mr_train_1014_label_frame_119.png", "mr_train_1005_label_frame_75.png", "mr_train_1017_label_frame_9.png", "mr_train_1001_label_frame_144.png", "mr_train_1014_label_frame_6.png", "mr_train_1001_label_frame_121.png", "mr_train_1006_label_frame_145.png", "mr_train_1018_label_frame_10.png", "mr_train_1009_label_frame_29.png", "mr_train_1003_label_frame_14.png", "mr_train_1001_label_frame_147.png", "mr_train_1016_label_frame_63.png", "mr_train_1001_label_frame_152.png", "mr_train_1016_label_frame_121.png", "mr_train_1011_label_frame_27.png", "mr_train_1009_label_frame_118.png", "mr_train_1015_label_frame_101.png", "mr_train_1008_label_frame_87.png", "mr_train_1001_label_frame_58.png", "mr_train_1016_label_frame_72.png", "mr_train_1014_label_frame_117.png", "mr_train_1003_label_frame_32.png", "mr_train_1020_label_frame_91.png", "mr_train_1006_label_frame_112.png", "mr_train_1016_label_frame_19.png", "mr_train_1008_label_frame_51.png", "mr_train_1018_label_frame_57.png", "mr_train_1017_label_frame_104.png", "mr_train_1006_label_frame_19.png", "mr_train_1015_label_frame_68.png", "mr_train_1006_label_frame_122.png", "mr_train_1015_label_frame_1.png", "mr_train_1005_label_frame_14.png", "mr_train_1020_label_frame_87.png", "mr_train_1018_label_frame_104.png", "mr_train_1008_label_frame_116.png", "mr_train_1017_label_frame_76.png", "mr_train_1009_label_frame_6.png", "mr_train_1020_label_frame_22.png", "mr_train_1017_label_frame_101.png", "mr_train_1001_label_frame_119.png", "mr_train_1017_label_frame_56.png", "mr_train_1016_label_frame_37.png", "mr_train_1020_label_frame_34.png", "mr_train_1011_label_frame_156.png", "mr_train_1018_label_frame_81.png", "mr_train_1008_label_frame_105.png", "mr_train_1018_label_frame_141.png", "mr_train_1003_label_frame_124.png", "mr_train_1008_label_frame_120.png", "mr_train_1016_label_frame_5.png", "mr_train_1011_label_frame_98.png", "mr_train_1020_label_frame_79.png", "mr_train_1003_label_frame_42.png", "mr_train_1018_label_frame_46.png", "mr_train_1016_label_frame_84.png", "mr_train_1015_label_frame_110.png", "mr_train_1015_label_frame_25.png", "mr_train_1017_label_frame_73.png", "mr_train_1001_label_frame_74.png", "mr_train_1006_label_frame_92.png", "mr_train_1015_label_frame_15.png", "mr_train_1018_label_frame_111.png", "mr_train_1017_label_frame_132.png", "mr_train_1011_label_frame_81.png", "mr_train_1003_label_frame_81.png", "mr_train_1005_label_frame_28.png", "mr_train_1020_label_frame_88.png", "mr_train_1018_label_frame_135.png", "mr_train_1018_label_frame_106.png", "mr_train_1001_label_frame_7.png", "mr_train_1015_label_frame_141.png", "mr_train_1016_label_frame_28.png", "mr_train_1015_label_frame_159.png", "mr_train_1020_label_frame_132.png", "mr_train_1014_label_frame_114.png", "mr_train_1014_label_frame_154.png", "mr_train_1001_label_frame_134.png", "mr_train_1005_label_frame_11.png", "mr_train_1003_label_frame_114.png", "mr_train_1015_label_frame_151.png", "mr_train_1001_label_frame_105.png", "mr_train_1014_label_frame_143.png", "mr_train_1011_label_frame_149.png", "mr_train_1014_label_frame_141.png", "mr_train_1005_label_frame_0.png", "mr_train_1009_label_frame_112.png", "mr_train_1006_label_frame_67.png", "mr_train_1014_label_frame_36.png", "mr_train_1006_label_frame_155.png", "mr_train_1017_label_frame_92.png", "mr_train_1008_label_frame_78.png", "mr_train_1011_label_frame_103.png", "mr_train_1003_label_frame_39.png", "mr_train_1015_label_frame_48.png", "mr_train_1014_label_frame_93.png", "mr_train_1014_label_frame_26.png", "mr_train_1008_label_frame_115.png", "mr_train_1018_label_frame_68.png", "mr_train_1015_label_frame_7.png", "mr_train_1016_label_frame_13.png", "mr_train_1015_label_frame_179.png", "mr_train_1014_label_frame_11.png", "mr_train_1008_label_frame_10.png", "mr_train_1011_label_frame_155.png", "mr_train_1015_label_frame_39.png", "mr_train_1015_label_frame_147.png", "mr_train_1003_label_frame_17.png", "mr_train_1015_label_frame_79.png", "mr_train_1018_label_frame_110.png", "mr_train_1020_label_frame_31.png", "mr_train_1008_label_frame_101.png", "mr_train_1018_label_frame_131.png", "mr_train_1008_label_frame_68.png", "mr_train_1015_label_frame_195.png", "mr_train_1008_label_frame_85.png", "mr_train_1001_label_frame_15.png", "mr_train_1017_label_frame_28.png", "mr_train_1020_label_frame_86.png", "mr_train_1011_label_frame_87.png", "mr_train_1014_label_frame_37.png", "mr_train_1009_label_frame_86.png", "mr_train_1018_label_frame_52.png", "mr_train_1014_label_frame_149.png", "mr_train_1018_label_frame_19.png", "mr_train_1016_label_frame_39.png", "mr_train_1015_label_frame_21.png", "mr_train_1017_label_frame_55.png", "mr_train_1011_label_frame_90.png", "mr_train_1006_label_frame_50.png", "mr_train_1016_label_frame_88.png", "mr_train_1005_label_frame_102.png", "mr_train_1018_label_frame_142.png", "mr_train_1003_label_frame_43.png", "mr_train_1020_label_frame_110.png", "mr_train_1020_label_frame_3.png", "mr_train_1020_label_frame_60.png", "mr_train_1020_label_frame_75.png", "mr_train_1016_label_frame_6.png", "mr_train_1003_label_frame_98.png", "mr_train_1016_label_frame_120.png", "mr_train_1008_label_frame_73.png", "mr_train_1016_label_frame_33.png", "mr_train_1020_label_frame_76.png", "mr_train_1003_label_frame_19.png", "mr_train_1008_label_frame_15.png", "mr_train_1018_label_frame_24.png", "mr_train_1009_label_frame_100.png", "mr_train_1006_label_frame_137.png", "mr_train_1014_label_frame_53.png", "mr_train_1015_label_frame_148.png", "mr_train_1005_label_frame_127.png", "mr_train_1018_label_frame_69.png", "mr_train_1008_label_frame_5.png", "mr_train_1016_label_frame_83.png", "mr_train_1015_label_frame_188.png", "mr_train_1006_label_frame_147.png", "mr_train_1006_label_frame_101.png", "mr_train_1009_label_frame_58.png", "mr_train_1020_label_frame_81.png", "mr_train_1020_label_frame_9.png", "mr_train_1014_label_frame_51.png", "mr_train_1018_label_frame_75.png", "mr_train_1005_label_frame_117.png", "mr_train_1015_label_frame_167.png", "mr_train_1014_label_frame_47.png", "mr_train_1005_label_frame_15.png", "mr_train_1014_label_frame_129.png", "mr_train_1001_label_frame_130.png", "mr_train_1001_label_frame_50.png", "mr_train_1016_label_frame_32.png", "mr_train_1011_label_frame_134.png", "mr_train_1003_label_frame_58.png", "mr_train_1009_label_frame_14.png", "mr_train_1003_label_frame_133.png", "mr_train_1005_label_frame_72.png", "mr_train_1009_label_frame_56.png", "mr_train_1014_label_frame_106.png", "mr_train_1011_label_frame_58.png", "mr_train_1005_label_frame_74.png", "mr_train_1014_label_frame_96.png", "mr_train_1011_label_frame_66.png", "mr_train_1006_label_frame_53.png", "mr_train_1020_label_frame_113.png", "mr_train_1009_label_frame_92.png", "mr_train_1020_label_frame_124.png", "mr_train_1017_label_frame_31.png", "mr_train_1014_label_frame_19.png", "mr_train_1001_label_frame_139.png", "mr_train_1001_label_frame_138.png", "mr_train_1017_label_frame_69.png", "mr_train_1009_label_frame_110.png", "mr_train_1001_label_frame_114.png", "mr_train_1016_label_frame_100.png", "mr_train_1006_label_frame_118.png", "mr_train_1003_label_frame_35.png", "mr_train_1003_label_frame_36.png", "mr_train_1014_label_frame_125.png", "mr_train_1006_label_frame_23.png", "mr_train_1005_label_frame_22.png", "mr_train_1020_label_frame_11.png", "mr_train_1018_label_frame_53.png", "mr_train_1003_label_frame_44.png", "mr_train_1005_label_frame_41.png", "mr_train_1001_label_frame_71.png", "mr_train_1011_label_frame_131.png", "mr_train_1020_label_frame_58.png", "mr_train_1008_label_frame_11.png", "mr_train_1011_label_frame_150.png", "mr_train_1015_label_frame_164.png", "mr_train_1001_label_frame_41.png", "mr_train_1015_label_frame_94.png", "mr_train_1017_label_frame_118.png", "mr_train_1020_label_frame_56.png", "mr_train_1018_label_frame_82.png", "mr_train_1014_label_frame_139.png", "mr_train_1018_label_frame_72.png", "mr_train_1015_label_frame_54.png", "mr_train_1020_label_frame_71.png", "mr_train_1020_label_frame_4.png", "mr_train_1005_label_frame_77.png", "mr_train_1009_label_frame_25.png", "mr_train_1011_label_frame_128.png", "mr_train_1014_label_frame_40.png", "mr_train_1015_label_frame_77.png", "mr_train_1008_label_frame_47.png", "mr_train_1016_label_frame_29.png", "mr_train_1014_label_frame_43.png", "mr_train_1011_label_frame_109.png", "mr_train_1015_label_frame_46.png", "mr_train_1018_label_frame_16.png", "mr_train_1006_label_frame_6.png", "mr_train_1006_label_frame_71.png", "mr_train_1014_label_frame_9.png", "mr_train_1015_label_frame_133.png", "mr_train_1011_label_frame_144.png", "mr_train_1017_label_frame_134.png", "mr_train_1009_label_frame_85.png", "mr_train_1001_label_frame_23.png", "mr_train_1006_label_frame_106.png", "mr_train_1005_label_frame_123.png", "mr_train_1016_label_frame_119.png", "mr_train_1017_label_frame_4.png", "mr_train_1016_label_frame_79.png", "mr_train_1014_label_frame_109.png", "mr_train_1018_label_frame_62.png", "mr_train_1015_label_frame_47.png", "mr_train_1006_label_frame_136.png", "mr_train_1006_label_frame_12.png", "mr_train_1015_label_frame_14.png", "mr_train_1015_label_frame_121.png", "mr_train_1014_label_frame_148.png", "mr_train_1008_label_frame_57.png", "mr_train_1015_label_frame_181.png", "mr_train_1016_label_frame_129.png", "mr_train_1011_label_frame_123.png", "mr_train_1014_label_frame_42.png", "mr_train_1003_label_frame_131.png", "mr_train_1008_label_frame_13.png", "mr_train_1008_label_frame_52.png", "mr_train_1005_label_frame_26.png", "mr_train_1015_label_frame_24.png", "mr_train_1020_label_frame_49.png", "mr_train_1011_label_frame_153.png", "mr_train_1005_label_frame_31.png", "mr_train_1018_label_frame_105.png", "mr_train_1014_label_frame_28.png", "mr_train_1005_label_frame_38.png", "mr_train_1011_label_frame_40.png", "mr_train_1014_label_frame_142.png", "mr_train_1015_label_frame_82.png", "mr_train_1015_label_frame_71.png", "mr_train_1008_label_frame_44.png", "mr_train_1011_label_frame_116.png", "mr_train_1017_label_frame_45.png", "mr_train_1009_label_frame_27.png", "mr_train_1017_label_frame_37.png", "mr_train_1016_label_frame_52.png", "mr_train_1015_label_frame_13.png", "mr_train_1003_label_frame_21.png", "mr_train_1009_label_frame_80.png", "mr_train_1014_label_frame_17.png", "mr_train_1015_label_frame_196.png", "mr_train_1020_label_frame_21.png", "mr_train_1017_label_frame_109.png", "mr_train_1014_label_frame_23.png", "mr_train_1008_label_frame_25.png", "mr_train_1014_label_frame_159.png", "mr_train_1014_label_frame_124.png", "mr_train_1003_label_frame_54.png", "mr_train_1015_label_frame_152.png", "mr_train_1011_label_frame_143.png", "mr_train_1003_label_frame_60.png", "mr_train_1011_label_frame_147.png", "mr_train_1020_label_frame_32.png", "mr_train_1020_label_frame_116.png", "mr_train_1014_label_frame_76.png", "mr_train_1001_label_frame_153.png", "mr_train_1001_label_frame_102.png", "mr_train_1020_label_frame_28.png", "mr_train_1003_label_frame_150.png", "mr_train_1011_label_frame_65.png", "mr_train_1003_label_frame_68.png", "mr_train_1018_label_frame_136.png", "mr_train_1003_label_frame_152.png", "mr_train_1011_label_frame_1.png", "mr_train_1014_label_frame_68.png", "mr_train_1003_label_frame_134.png", "mr_train_1014_label_frame_86.png", "mr_train_1016_label_frame_113.png", "mr_train_1020_label_frame_62.png", "mr_train_1018_label_frame_88.png", "mr_train_1020_label_frame_48.png", "mr_train_1016_label_frame_109.png", "mr_train_1006_label_frame_123.png", "mr_train_1003_label_frame_65.png", "mr_train_1001_label_frame_120.png", "mr_train_1009_label_frame_18.png", "mr_train_1015_label_frame_155.png", "mr_train_1014_label_frame_116.png", "mr_train_1003_label_frame_22.png", "mr_train_1016_label_frame_7.png", "mr_train_1011_label_frame_54.png", "mr_train_1016_label_frame_20.png", "mr_train_1016_label_frame_85.png", "mr_train_1009_label_frame_43.png", "mr_train_1006_label_frame_134.png", "mr_train_1005_label_frame_25.png", "mr_train_1009_label_frame_107.png", "mr_train_1016_label_frame_27.png", "mr_train_1011_label_frame_4.png", "mr_train_1006_label_frame_73.png", "mr_train_1017_label_frame_98.png", "mr_train_1018_label_frame_3.png", "mr_train_1008_label_frame_75.png", "mr_train_1017_label_frame_42.png", "mr_train_1003_label_frame_105.png", "mr_train_1006_label_frame_28.png", "mr_train_1008_label_frame_81.png", "mr_train_1011_label_frame_102.png", "mr_train_1018_label_frame_44.png", "mr_train_1003_label_frame_4.png", "mr_train_1005_label_frame_121.png", "mr_train_1015_label_frame_112.png", "mr_train_1011_label_frame_154.png", "mr_train_1009_label_frame_48.png", "mr_train_1018_label_frame_28.png", "mr_train_1016_label_frame_104.png", "mr_train_1003_label_frame_83.png", "mr_train_1006_label_frame_42.png", "mr_train_1009_label_frame_47.png", "mr_train_1015_label_frame_3.png", "mr_train_1017_label_frame_110.png", "mr_train_1008_label_frame_50.png", "mr_train_1009_label_frame_75.png", "mr_train_1020_label_frame_107.png", "mr_train_1020_label_frame_65.png", "mr_train_1003_label_frame_148.png", "mr_train_1018_label_frame_0.png", "mr_train_1009_label_frame_68.png", "mr_train_1003_label_frame_6.png", "mr_train_1018_label_frame_59.png", "mr_train_1009_label_frame_45.png", "mr_train_1008_label_frame_88.png", "mr_train_1001_label_frame_86.png", "mr_train_1018_label_frame_148.png", "mr_train_1003_label_frame_119.png", "mr_train_1018_label_frame_2.png", "mr_train_1006_label_frame_95.png", "mr_train_1015_label_frame_11.png", "mr_train_1006_label_frame_150.png", "mr_train_1001_label_frame_129.png", "mr_train_1015_label_frame_106.png", "mr_train_1001_label_frame_137.png", "mr_train_1014_label_frame_63.png", "mr_train_1006_label_frame_100.png", "mr_train_1014_label_frame_45.png", "mr_train_1011_label_frame_62.png", "mr_train_1001_label_frame_75.png", "mr_train_1009_label_frame_51.png", "mr_train_1020_label_frame_119.png", "mr_train_1008_label_frame_1.png", "mr_train_1016_label_frame_101.png", "mr_train_1006_label_frame_65.png", "mr_train_1003_label_frame_138.png", "mr_train_1017_label_frame_3.png", "mr_train_1020_label_frame_54.png", "mr_train_1003_label_frame_30.png", "mr_train_1020_label_frame_70.png", "mr_train_1014_label_frame_147.png", "mr_train_1018_label_frame_108.png", "mr_train_1003_label_frame_76.png", "mr_train_1008_label_frame_54.png", "mr_train_1020_label_frame_123.png", "mr_train_1006_label_frame_141.png", "mr_train_1009_label_frame_109.png", "mr_train_1018_label_frame_70.png", "mr_train_1003_label_frame_26.png", "mr_train_1016_label_frame_92.png", "mr_train_1015_label_frame_84.png", "mr_train_1008_label_frame_59.png", "mr_train_1008_label_frame_127.png", "mr_train_1005_label_frame_129.png", "mr_train_1011_label_frame_7.png", "mr_train_1009_label_frame_62.png", "mr_train_1017_label_frame_65.png", "mr_train_1011_label_frame_113.png", "mr_train_1015_label_frame_127.png", "mr_train_1014_label_frame_71.png", "mr_train_1001_label_frame_92.png", "mr_train_1017_label_frame_86.png", "mr_train_1006_label_frame_24.png", "mr_train_1015_label_frame_90.png", "mr_train_1016_label_frame_81.png", "mr_train_1016_label_frame_91.png", "mr_train_1011_label_frame_11.png", "mr_train_1020_label_frame_120.png", "mr_train_1008_label_frame_18.png", "mr_train_1008_label_frame_122.png", "mr_train_1006_label_frame_135.png", "mr_train_1001_label_frame_36.png", "mr_train_1018_label_frame_146.png", "mr_train_1003_label_frame_86.png", "mr_train_1003_label_frame_137.png", "mr_train_1006_label_frame_78.png", "mr_train_1005_label_frame_108.png", "mr_train_1003_label_frame_136.png", "mr_train_1003_label_frame_113.png", "mr_train_1003_label_frame_12.png", "mr_train_1001_label_frame_4.png", "mr_train_1003_label_frame_118.png", "mr_train_1014_label_frame_132.png", "mr_train_1003_label_frame_63.png", "mr_train_1008_label_frame_91.png", "mr_train_1016_label_frame_54.png", "mr_train_1014_label_frame_90.png", "mr_train_1011_label_frame_35.png", "mr_train_1006_label_frame_91.png", "mr_train_1011_label_frame_133.png", "mr_train_1014_label_frame_20.png", "mr_train_1018_label_frame_107.png", "mr_train_1015_label_frame_81.png", "mr_train_1006_label_frame_70.png", "mr_train_1001_label_frame_49.png", "mr_train_1008_label_frame_42.png", "mr_train_1003_label_frame_112.png", "mr_train_1008_label_frame_111.png", "mr_train_1017_label_frame_90.png", "mr_train_1018_label_frame_109.png", "mr_train_1018_label_frame_94.png", "mr_train_1008_label_frame_86.png", "mr_train_1017_label_frame_79.png", "mr_train_1017_label_frame_122.png", "mr_train_1011_label_frame_95.png", "mr_train_1015_label_frame_198.png", "mr_train_1015_label_frame_92.png", "mr_train_1017_label_frame_60.png", "mr_train_1005_label_frame_45.png", "mr_train_1015_label_frame_23.png", "mr_train_1003_label_frame_153.png", "mr_train_1015_label_frame_62.png", "mr_train_1006_label_frame_80.png", "mr_train_1020_label_frame_38.png", "mr_train_1001_label_frame_117.png", "mr_train_1009_label_frame_102.png", "mr_train_1011_label_frame_38.png", "mr_train_1016_label_frame_60.png", "mr_train_1020_label_frame_39.png", "mr_train_1016_label_frame_98.png", "mr_train_1003_label_frame_18.png", "mr_train_1008_label_frame_2.png", "mr_train_1006_label_frame_144.png", "mr_train_1003_label_frame_154.png", "mr_train_1014_label_frame_81.png", "mr_train_1018_label_frame_54.png", "mr_train_1020_label_frame_18.png", "mr_train_1009_label_frame_70.png", "mr_train_1016_label_frame_68.png", "mr_train_1003_label_frame_75.png", "mr_train_1016_label_frame_126.png", "mr_train_1015_label_frame_128.png", "mr_train_1009_label_frame_11.png", "mr_train_1011_label_frame_104.png", "mr_train_1020_label_frame_40.png", "mr_train_1014_label_frame_151.png", "mr_train_1014_label_frame_134.png", "mr_train_1005_label_frame_99.png", "mr_train_1017_label_frame_100.png", "mr_train_1005_label_frame_36.png", "mr_train_1009_label_frame_31.png", "mr_train_1005_label_frame_106.png", "mr_train_1008_label_frame_29.png", "mr_train_1003_label_frame_71.png", "mr_train_1009_label_frame_37.png", "mr_train_1017_label_frame_106.png", "mr_train_1020_label_frame_7.png", "mr_train_1001_label_frame_51.png", "mr_train_1001_label_frame_128.png", "mr_train_1011_label_frame_100.png", "mr_train_1014_label_frame_25.png", "mr_train_1008_label_frame_26.png", "mr_train_1001_label_frame_9.png", "mr_train_1009_label_frame_21.png", "mr_train_1018_label_frame_121.png", "mr_train_1006_label_frame_114.png", "mr_train_1005_label_frame_4.png", "mr_train_1018_label_frame_17.png", "mr_train_1011_label_frame_13.png", "mr_train_1018_label_frame_96.png", "mr_train_1009_label_frame_83.png", "mr_train_1015_label_frame_124.png", "mr_train_1006_label_frame_29.png", "mr_train_1014_label_frame_83.png", "mr_train_1003_label_frame_151.png", "mr_train_1014_label_frame_100.png", "mr_train_1011_label_frame_136.png", "mr_train_1009_label_frame_105.png", "mr_train_1016_label_frame_93.png", "mr_train_1011_label_frame_127.png", "mr_train_1005_label_frame_98.png", "mr_train_1005_label_frame_69.png", "mr_train_1011_label_frame_71.png", "mr_train_1014_label_frame_102.png", "mr_train_1015_label_frame_154.png", "mr_train_1003_label_frame_7.png", "mr_train_1018_label_frame_133.png", "mr_train_1015_label_frame_76.png", "mr_train_1018_label_frame_114.png", "mr_train_1001_label_frame_91.png", "mr_train_1001_label_frame_44.png", "mr_train_1016_label_frame_117.png", "mr_train_1015_label_frame_6.png", "mr_train_1020_label_frame_33.png", "mr_train_1015_label_frame_176.png", "mr_train_1005_label_frame_96.png", "mr_train_1001_label_frame_155.png", "mr_train_1014_label_frame_98.png", "mr_train_1015_label_frame_29.png", "mr_train_1011_label_frame_94.png", "mr_train_1014_label_frame_122.png", "mr_train_1014_label_frame_65.png", "mr_train_1016_label_frame_103.png", "mr_train_1009_label_frame_103.png", "mr_train_1016_label_frame_47.png", "mr_train_1015_label_frame_103.png", "mr_train_1014_label_frame_131.png", "mr_train_1018_label_frame_138.png", "mr_train_1015_label_frame_161.png", "mr_train_1008_label_frame_34.png", "mr_train_1020_label_frame_82.png", "mr_train_1015_label_frame_72.png", "mr_train_1018_label_frame_92.png", "mr_train_1014_label_frame_49.png", "mr_train_1016_label_frame_1.png", "mr_train_1018_label_frame_38.png", "mr_train_1009_label_frame_76.png", "mr_train_1015_label_frame_10.png", "mr_train_1011_label_frame_53.png", "mr_train_1001_label_frame_24.png", "mr_train_1003_label_frame_67.png", "mr_train_1009_label_frame_24.png", "mr_train_1020_label_frame_20.png", "mr_train_1014_label_frame_82.png", "mr_train_1018_label_frame_51.png", "mr_train_1006_label_frame_105.png", "mr_train_1008_label_frame_106.png", "mr_train_1008_label_frame_43.png", "mr_train_1020_label_frame_121.png", "mr_train_1009_label_frame_4.png", "mr_train_1015_label_frame_38.png", "mr_train_1001_label_frame_33.png", "mr_train_1003_label_frame_139.png", "mr_train_1003_label_frame_123.png", "mr_train_1017_label_frame_116.png", "mr_train_1006_label_frame_49.png", "mr_train_1017_label_frame_83.png", "mr_train_1014_label_frame_121.png", "mr_train_1018_label_frame_61.png", "mr_train_1014_label_frame_107.png", "mr_train_1008_label_frame_9.png", "mr_train_1006_label_frame_82.png", "mr_train_1006_label_frame_64.png", "mr_train_1015_label_frame_102.png", "mr_train_1009_label_frame_113.png", "mr_train_1020_label_frame_41.png", "mr_train_1015_label_frame_132.png", "mr_train_1018_label_frame_67.png", "mr_train_1015_label_frame_37.png", "mr_train_1015_label_frame_16.png", "mr_train_1017_label_frame_112.png", "mr_train_1001_label_frame_115.png", "mr_train_1017_label_frame_105.png", "mr_train_1020_label_frame_90.png", "mr_train_1014_label_frame_157.png", "mr_train_1008_label_frame_121.png", "mr_train_1008_label_frame_82.png", "mr_train_1018_label_frame_47.png", "mr_train_1016_label_frame_9.png", "mr_train_1014_label_frame_21.png", "mr_train_1001_label_frame_113.png", "mr_train_1015_label_frame_130.png", "mr_train_1006_label_frame_33.png", "mr_train_1003_label_frame_94.png", "mr_train_1020_label_frame_93.png", "mr_train_1005_label_frame_90.png", "mr_train_1018_label_frame_98.png", "mr_train_1011_label_frame_124.png", "mr_train_1001_label_frame_8.png", "mr_train_1003_label_frame_155.png", "mr_train_1018_label_frame_32.png", "mr_train_1005_label_frame_23.png", "mr_train_1005_label_frame_21.png", "mr_train_1008_label_frame_76.png", "mr_train_1017_label_frame_64.png", "mr_train_1017_label_frame_94.png", "mr_train_1018_label_frame_36.png", "mr_train_1001_label_frame_42.png", "mr_train_1017_label_frame_95.png", "mr_train_1020_label_frame_95.png", "mr_train_1014_label_frame_128.png", "mr_train_1008_label_frame_117.png", "mr_train_1006_label_frame_115.png", "mr_train_1017_label_frame_8.png", "mr_train_1014_label_frame_108.png", "mr_train_1008_label_frame_21.png", "mr_train_1015_label_frame_53.png", "mr_train_1001_label_frame_14.png", "mr_train_1016_label_frame_77.png", "mr_train_1018_label_frame_91.png", "mr_train_1009_label_frame_81.png", "mr_train_1011_label_frame_43.png", "mr_train_1011_label_frame_75.png", "mr_train_1008_label_frame_104.png"], "valid_labels": ["mr_train_1007_label_frame_39.png", "mr_train_1007_label_frame_29.png", "mr_train_1013_label_frame_46.png", "mr_train_1004_label_frame_100.png", "mr_train_1013_label_frame_111.png", "mr_train_1007_label_frame_85.png", "mr_train_1007_label_frame_15.png", "mr_train_1013_label_frame_80.png", "mr_train_1007_label_frame_57.png", "mr_train_1004_label_frame_68.png", "mr_train_1007_label_frame_173.png", "mr_train_1013_label_frame_17.png", "mr_train_1007_label_frame_108.png", "mr_train_1004_label_frame_79.png", "mr_train_1007_label_frame_146.png", "mr_train_1007_label_frame_169.png", "mr_train_1007_label_frame_52.png", "mr_train_1004_label_frame_51.png", "mr_train_1007_label_frame_141.png", "mr_train_1007_label_frame_177.png", "mr_train_1013_label_frame_39.png", "mr_train_1007_label_frame_73.png", "mr_train_1004_label_frame_23.png", "mr_train_1004_label_frame_82.png", "mr_train_1007_label_frame_16.png", "mr_train_1013_label_frame_1.png", "mr_train_1007_label_frame_110.png", "mr_train_1007_label_frame_102.png", "mr_train_1007_label_frame_40.png", "mr_train_1013_label_frame_109.png", "mr_train_1013_label_frame_87.png", "mr_train_1013_label_frame_41.png", "mr_train_1007_label_frame_45.png", "mr_train_1013_label_frame_56.png", "mr_train_1007_label_frame_134.png", "mr_train_1007_label_frame_111.png", "mr_train_1007_label_frame_147.png", "mr_train_1007_label_frame_34.png", "mr_train_1004_label_frame_29.png", "mr_train_1013_label_frame_79.png", "mr_train_1007_label_frame_118.png", "mr_train_1007_label_frame_72.png", "mr_train_1013_label_frame_82.png", "mr_train_1004_label_frame_110.png", "mr_train_1007_label_frame_56.png", "mr_train_1013_label_frame_81.png", "mr_train_1004_label_frame_119.png", "mr_train_1013_label_frame_61.png", "mr_train_1007_label_frame_80.png", "mr_train_1013_label_frame_48.png", "mr_train_1013_label_frame_74.png", "mr_train_1013_label_frame_60.png", "mr_train_1013_label_frame_40.png", "mr_train_1004_label_frame_73.png", "mr_train_1004_label_frame_89.png", "mr_train_1004_label_frame_109.png", "mr_train_1007_label_frame_42.png", "mr_train_1007_label_frame_100.png", "mr_train_1004_label_frame_25.png", "mr_train_1004_label_frame_118.png", "mr_train_1007_label_frame_120.png", "mr_train_1007_label_frame_28.png", "mr_train_1004_label_frame_104.png", "mr_train_1013_label_frame_30.png", "mr_train_1013_label_frame_104.png", "mr_train_1007_label_frame_60.png", "mr_train_1007_label_frame_93.png", "mr_train_1004_label_frame_117.png", "mr_train_1013_label_frame_3.png", "mr_train_1013_label_frame_10.png", "mr_train_1007_label_frame_178.png", "mr_train_1007_label_frame_71.png", "mr_train_1004_label_frame_62.png", "mr_train_1007_label_frame_122.png", "mr_train_1013_label_frame_83.png", "mr_train_1004_label_frame_17.png", "mr_train_1004_label_frame_84.png", "mr_train_1007_label_frame_27.png", "mr_train_1013_label_frame_85.png", "mr_train_1004_label_frame_60.png", "mr_train_1004_label_frame_86.png", "mr_train_1007_label_frame_172.png", "mr_train_1004_label_frame_45.png", "mr_train_1013_label_frame_66.png", "mr_train_1007_label_frame_23.png", "mr_train_1007_label_frame_61.png", "mr_train_1007_label_frame_44.png", "mr_train_1004_label_frame_1.png", "mr_train_1004_label_frame_61.png", "mr_train_1013_label_frame_70.png", "mr_train_1007_label_frame_105.png", "mr_train_1013_label_frame_0.png", "mr_train_1013_label_frame_78.png", "mr_train_1004_label_frame_101.png", "mr_train_1007_label_frame_107.png", "mr_train_1007_label_frame_158.png", "mr_train_1007_label_frame_30.png", "mr_train_1007_label_frame_149.png", "mr_train_1007_label_frame_8.png", "mr_train_1004_label_frame_38.png", "mr_train_1013_label_frame_24.png", "mr_train_1004_label_frame_5.png", "mr_train_1004_label_frame_71.png", "mr_train_1007_label_frame_43.png", "mr_train_1007_label_frame_174.png", "mr_train_1007_label_frame_115.png", "mr_train_1007_label_frame_54.png", "mr_train_1004_label_frame_48.png", "mr_train_1004_label_frame_27.png", "mr_train_1007_label_frame_106.png", "mr_train_1007_label_frame_176.png", "mr_train_1007_label_frame_95.png", "mr_train_1004_label_frame_116.png", "mr_train_1004_label_frame_114.png", "mr_train_1004_label_frame_47.png", "mr_train_1004_label_frame_6.png", "mr_train_1013_label_frame_43.png", "mr_train_1007_label_frame_89.png", "mr_train_1013_label_frame_7.png", "mr_train_1007_label_frame_152.png", "mr_train_1007_label_frame_137.png", "mr_train_1007_label_frame_25.png", "mr_train_1007_label_frame_104.png", "mr_train_1004_label_frame_58.png", "mr_train_1007_label_frame_133.png", "mr_train_1004_label_frame_7.png", "mr_train_1007_label_frame_101.png", "mr_train_1007_label_frame_11.png", "mr_train_1007_label_frame_157.png", "mr_train_1004_label_frame_93.png", "mr_train_1013_label_frame_93.png", "mr_train_1004_label_frame_57.png", "mr_train_1004_label_frame_43.png", "mr_train_1013_label_frame_26.png", "mr_train_1004_label_frame_69.png", "mr_train_1004_label_frame_10.png", "mr_train_1007_label_frame_81.png", "mr_train_1013_label_frame_75.png", "mr_train_1007_label_frame_17.png", "mr_train_1007_label_frame_166.png", "mr_train_1013_label_frame_14.png", "mr_train_1007_label_frame_20.png", "mr_train_1004_label_frame_55.png", "mr_train_1013_label_frame_25.png", "mr_train_1004_label_frame_12.png", "mr_train_1007_label_frame_114.png", "mr_train_1007_label_frame_5.png", "mr_train_1004_label_frame_11.png", "mr_train_1007_label_frame_126.png", "mr_train_1007_label_frame_63.png", "mr_train_1007_label_frame_117.png", "mr_train_1007_label_frame_49.png", "mr_train_1004_label_frame_66.png", "mr_train_1013_label_frame_15.png", "mr_train_1004_label_frame_35.png", "mr_train_1007_label_frame_113.png", "mr_train_1004_label_frame_8.png", "mr_train_1004_label_frame_74.png", "mr_train_1013_label_frame_89.png", "mr_train_1013_label_frame_71.png", "mr_train_1013_label_frame_107.png", "mr_train_1007_label_frame_132.png", "mr_train_1013_label_frame_106.png", "mr_train_1004_label_frame_94.png", "mr_train_1007_label_frame_32.png", "mr_train_1013_label_frame_54.png", "mr_train_1004_label_frame_36.png", "mr_train_1004_label_frame_3.png", "mr_train_1013_label_frame_22.png", "mr_train_1013_label_frame_103.png", "mr_train_1013_label_frame_8.png", "mr_train_1013_label_frame_95.png", "mr_train_1007_label_frame_37.png", "mr_train_1013_label_frame_34.png", "mr_train_1007_label_frame_128.png", "mr_train_1004_label_frame_70.png", "mr_train_1013_label_frame_42.png", "mr_train_1013_label_frame_50.png", "mr_train_1007_label_frame_164.png", "mr_train_1007_label_frame_163.png", "mr_train_1004_label_frame_52.png", "mr_train_1007_label_frame_22.png", "mr_train_1013_label_frame_53.png", "mr_train_1004_label_frame_50.png", "mr_train_1007_label_frame_160.png", "mr_train_1007_label_frame_4.png", "mr_train_1007_label_frame_116.png", "mr_train_1013_label_frame_13.png", "mr_train_1004_label_frame_42.png", "mr_train_1007_label_frame_99.png", "mr_train_1007_label_frame_96.png", "mr_train_1013_label_frame_84.png", "mr_train_1013_label_frame_63.png", "mr_train_1007_label_frame_144.png", "mr_train_1013_label_frame_58.png", "mr_train_1007_label_frame_50.png", "mr_train_1007_label_frame_86.png", "mr_train_1007_label_frame_68.png", "mr_train_1007_label_frame_14.png", "mr_train_1007_label_frame_98.png", "mr_train_1007_label_frame_79.png", "mr_train_1007_label_frame_13.png", "mr_train_1004_label_frame_87.png", "mr_train_1007_label_frame_66.png", "mr_train_1004_label_frame_49.png", "mr_train_1013_label_frame_16.png", "mr_train_1007_label_frame_51.png", "mr_train_1007_label_frame_119.png", "mr_train_1004_label_frame_44.png", "mr_train_1004_label_frame_19.png", "mr_train_1004_label_frame_31.png", "mr_train_1004_label_frame_115.png", "mr_train_1007_label_frame_175.png", "mr_train_1013_label_frame_45.png", "mr_train_1004_label_frame_40.png", "mr_train_1013_label_frame_47.png", "mr_train_1013_label_frame_29.png", "mr_train_1007_label_frame_33.png", "mr_train_1007_label_frame_26.png", "mr_train_1013_label_frame_99.png", "mr_train_1007_label_frame_65.png", "mr_train_1007_label_frame_151.png", "mr_train_1004_label_frame_59.png", "mr_train_1004_label_frame_22.png", "mr_train_1007_label_frame_21.png", "mr_train_1007_label_frame_155.png", "mr_train_1004_label_frame_15.png", "mr_train_1004_label_frame_65.png", "mr_train_1007_label_frame_83.png", "mr_train_1013_label_frame_94.png", "mr_train_1004_label_frame_37.png", "mr_train_1004_label_frame_103.png", "mr_train_1007_label_frame_3.png", "mr_train_1004_label_frame_98.png", "mr_train_1007_label_frame_159.png", "mr_train_1004_label_frame_4.png", "mr_train_1004_label_frame_30.png", "mr_train_1007_label_frame_0.png", "mr_train_1007_label_frame_6.png", "mr_train_1007_label_frame_162.png", "mr_train_1004_label_frame_41.png", "mr_train_1007_label_frame_75.png", "mr_train_1013_label_frame_62.png", "mr_train_1007_label_frame_109.png", "mr_train_1007_label_frame_124.png", "mr_train_1013_label_frame_110.png", "mr_train_1004_label_frame_95.png", "mr_train_1013_label_frame_98.png", "mr_train_1007_label_frame_38.png", "mr_train_1004_label_frame_91.png", "mr_train_1013_label_frame_101.png", "mr_train_1007_label_frame_1.png", "mr_train_1013_label_frame_108.png", "mr_train_1007_label_frame_84.png", "mr_train_1007_label_frame_139.png", "mr_train_1004_label_frame_64.png", "mr_train_1004_label_frame_102.png", "mr_train_1007_label_frame_78.png", "mr_train_1004_label_frame_77.png", "mr_train_1007_label_frame_19.png", "mr_train_1004_label_frame_28.png", "mr_train_1007_label_frame_103.png", "mr_train_1004_label_frame_106.png", "mr_train_1004_label_frame_53.png", "mr_train_1004_label_frame_113.png", "mr_train_1007_label_frame_2.png", "mr_train_1004_label_frame_63.png", "mr_train_1007_label_frame_18.png", "mr_train_1007_label_frame_135.png", "mr_train_1004_label_frame_32.png", "mr_train_1013_label_frame_97.png", "mr_train_1013_label_frame_90.png", "mr_train_1007_label_frame_130.png", "mr_train_1007_label_frame_123.png", "mr_train_1004_label_frame_21.png", "mr_train_1007_label_frame_136.png", "mr_train_1013_label_frame_35.png", "mr_train_1013_label_frame_73.png", "mr_train_1004_label_frame_14.png", "mr_train_1004_label_frame_85.png", "mr_train_1007_label_frame_167.png", "mr_train_1007_label_frame_131.png", "mr_train_1004_label_frame_96.png", "mr_train_1013_label_frame_55.png", "mr_train_1007_label_frame_47.png", "mr_train_1007_label_frame_148.png", "mr_train_1013_label_frame_37.png", "mr_train_1007_label_frame_77.png", "mr_train_1007_label_frame_161.png", "mr_train_1007_label_frame_55.png", "mr_train_1004_label_frame_34.png", "mr_train_1013_label_frame_6.png", "mr_train_1013_label_frame_102.png", "mr_train_1007_label_frame_41.png", "mr_train_1004_label_frame_13.png", "mr_train_1007_label_frame_170.png", "mr_train_1007_label_frame_76.png", "mr_train_1004_label_frame_97.png", "mr_train_1007_label_frame_91.png", "mr_train_1007_label_frame_129.png", "mr_train_1004_label_frame_16.png", "mr_train_1013_label_frame_38.png", "mr_train_1013_label_frame_100.png", "mr_train_1013_label_frame_57.png", "mr_train_1004_label_frame_83.png", "mr_train_1013_label_frame_33.png", "mr_train_1004_label_frame_2.png", "mr_train_1013_label_frame_67.png", "mr_train_1007_label_frame_35.png", "mr_train_1013_label_frame_91.png", "mr_train_1007_label_frame_140.png", "mr_train_1007_label_frame_36.png", "mr_train_1007_label_frame_12.png", "mr_train_1007_label_frame_145.png", "mr_train_1013_label_frame_36.png", "mr_train_1007_label_frame_142.png", "mr_train_1004_label_frame_26.png", "mr_train_1007_label_frame_24.png", "mr_train_1013_label_frame_23.png", "mr_train_1007_label_frame_62.png", "mr_train_1004_label_frame_9.png", "mr_train_1013_label_frame_52.png", "mr_train_1007_label_frame_179.png", "mr_train_1004_label_frame_88.png", "mr_train_1007_label_frame_168.png", "mr_train_1007_label_frame_171.png", "mr_train_1007_label_frame_125.png", "mr_train_1013_label_frame_9.png", "mr_train_1007_label_frame_69.png", "mr_train_1007_label_frame_112.png", "mr_train_1004_label_frame_108.png", "mr_train_1013_label_frame_44.png", "mr_train_1004_label_frame_111.png", "mr_train_1004_label_frame_39.png", "mr_train_1013_label_frame_27.png", "mr_train_1007_label_frame_88.png", "mr_train_1013_label_frame_88.png", "mr_train_1013_label_frame_4.png", "mr_train_1004_label_frame_72.png", "mr_train_1007_label_frame_9.png", "mr_train_1004_label_frame_107.png", "mr_train_1007_label_frame_48.png", "mr_train_1007_label_frame_53.png", "mr_train_1007_label_frame_67.png", "mr_train_1013_label_frame_76.png", "mr_train_1007_label_frame_31.png", "mr_train_1013_label_frame_12.png", "mr_train_1007_label_frame_64.png", "mr_train_1004_label_frame_78.png", "mr_train_1007_label_frame_82.png", "mr_train_1007_label_frame_74.png", "mr_train_1007_label_frame_70.png", "mr_train_1013_label_frame_59.png", "mr_train_1004_label_frame_75.png", "mr_train_1013_label_frame_28.png", "mr_train_1013_label_frame_49.png", "mr_train_1013_label_frame_11.png", "mr_train_1004_label_frame_0.png", "mr_train_1007_label_frame_154.png", "mr_train_1007_label_frame_46.png", "mr_train_1013_label_frame_19.png", "mr_train_1004_label_frame_81.png", "mr_train_1004_label_frame_56.png", "mr_train_1007_label_frame_121.png", "mr_train_1007_label_frame_90.png", "mr_train_1013_label_frame_96.png", "mr_train_1004_label_frame_76.png", "mr_train_1007_label_frame_165.png", "mr_train_1004_label_frame_46.png", "mr_train_1013_label_frame_68.png", "mr_train_1013_label_frame_51.png", "mr_train_1004_label_frame_33.png", "mr_train_1013_label_frame_69.png", "mr_train_1007_label_frame_94.png", "mr_train_1007_label_frame_92.png", "mr_train_1013_label_frame_20.png", "mr_train_1013_label_frame_65.png", "mr_train_1004_label_frame_67.png", "mr_train_1007_label_frame_156.png", "mr_train_1007_label_frame_10.png", "mr_train_1013_label_frame_86.png", "mr_train_1013_label_frame_32.png", "mr_train_1004_label_frame_80.png", "mr_train_1013_label_frame_2.png", "mr_train_1007_label_frame_87.png", "mr_train_1007_label_frame_150.png", "mr_train_1013_label_frame_18.png", "mr_train_1004_label_frame_18.png", "mr_train_1007_label_frame_59.png", "mr_train_1007_label_frame_58.png", "mr_train_1013_label_frame_105.png", "mr_train_1004_label_frame_90.png", "mr_train_1004_label_frame_92.png", "mr_train_1013_label_frame_31.png", "mr_train_1004_label_frame_99.png", "mr_train_1004_label_frame_24.png", "mr_train_1013_label_frame_5.png", "mr_train_1007_label_frame_138.png", "mr_train_1013_label_frame_92.png", "mr_train_1013_label_frame_72.png", "mr_train_1007_label_frame_143.png", "mr_train_1013_label_frame_64.png", "mr_train_1007_label_frame_153.png", "mr_train_1013_label_frame_77.png", "mr_train_1004_label_frame_105.png", "mr_train_1007_label_frame_127.png", "mr_train_1004_label_frame_112.png", "mr_train_1007_label_frame_7.png", "mr_train_1013_label_frame_21.png", "mr_train_1007_label_frame_97.png", "mr_train_1004_label_frame_54.png", "mr_train_1004_label_frame_20.png"], "test_labels": ["mr_train_1002_label_frame_38.png", "mr_train_1002_label_frame_6.png", "mr_train_1019_label_frame_127.png", "mr_train_1019_label_frame_104.png", "mr_train_1002_label_frame_3.png", "mr_train_1010_label_frame_141.png", "mr_train_1012_label_frame_10.png", "mr_train_1010_label_frame_143.png", "mr_train_1019_label_frame_76.png", "mr_train_1002_label_frame_55.png", "mr_train_1019_label_frame_80.png", "mr_train_1002_label_frame_47.png", "mr_train_1010_label_frame_32.png", "mr_train_1010_label_frame_48.png", "mr_train_1012_label_frame_91.png", "mr_train_1010_label_frame_17.png", "mr_train_1019_label_frame_93.png", "mr_train_1002_label_frame_113.png", "mr_train_1019_label_frame_40.png", "mr_train_1019_label_frame_114.png", "mr_train_1012_label_frame_106.png", "mr_train_1002_label_frame_21.png", "mr_train_1012_label_frame_84.png", "mr_train_1019_label_frame_28.png", "mr_train_1012_label_frame_14.png", "mr_train_1002_label_frame_8.png", "mr_train_1002_label_frame_97.png", "mr_train_1012_label_frame_107.png", "mr_train_1012_label_frame_8.png", "mr_train_1010_label_frame_112.png", "mr_train_1012_label_frame_68.png", "mr_train_1019_label_frame_23.png", "mr_train_1012_label_frame_35.png", "mr_train_1002_label_frame_48.png", "mr_train_1010_label_frame_42.png", "mr_train_1010_label_frame_116.png", "mr_train_1010_label_frame_123.png", "mr_train_1010_label_frame_39.png", "mr_train_1002_label_frame_99.png", "mr_train_1002_label_frame_111.png", "mr_train_1010_label_frame_49.png", "mr_train_1010_label_frame_82.png", "mr_train_1010_label_frame_156.png", "mr_train_1002_label_frame_98.png", "mr_train_1019_label_frame_88.png", "mr_train_1012_label_frame_101.png", "mr_train_1012_label_frame_7.png", "mr_train_1019_label_frame_75.png", "mr_train_1010_label_frame_81.png", "mr_train_1019_label_frame_56.png", "mr_train_1019_label_frame_9.png", "mr_train_1010_label_frame_6.png", "mr_train_1010_label_frame_128.png", "mr_train_1010_label_frame_151.png", "mr_train_1012_label_frame_37.png", "mr_train_1010_label_frame_22.png", "mr_train_1010_label_frame_135.png", "mr_train_1002_label_frame_36.png", "mr_train_1012_label_frame_47.png", "mr_train_1012_label_frame_50.png", "mr_train_1012_label_frame_40.png", "mr_train_1012_label_frame_105.png", "mr_train_1019_label_frame_50.png", "mr_train_1012_label_frame_93.png", "mr_train_1010_label_frame_107.png", "mr_train_1002_label_frame_75.png", "mr_train_1019_label_frame_72.png", "mr_train_1010_label_frame_132.png", "mr_train_1002_label_frame_94.png", "mr_train_1002_label_frame_110.png", "mr_train_1019_label_frame_90.png", "mr_train_1012_label_frame_3.png", "mr_train_1019_label_frame_36.png", "mr_train_1010_label_frame_126.png", "mr_train_1010_label_frame_70.png", "mr_train_1002_label_frame_100.png", "mr_train_1012_label_frame_60.png", "mr_train_1012_label_frame_52.png", "mr_train_1012_label_frame_25.png", "mr_train_1002_label_frame_18.png", "mr_train_1010_label_frame_130.png", "mr_train_1010_label_frame_121.png", "mr_train_1019_label_frame_64.png", "mr_train_1012_label_frame_110.png", "mr_train_1002_label_frame_33.png", "mr_train_1002_label_frame_72.png", "mr_train_1010_label_frame_109.png", "mr_train_1019_label_frame_110.png", "mr_train_1012_label_frame_121.png", "mr_train_1012_label_frame_0.png", "mr_train_1010_label_frame_159.png", "mr_train_1002_label_frame_24.png", "mr_train_1002_label_frame_2.png", "mr_train_1012_label_frame_88.png", "mr_train_1010_label_frame_14.png", "mr_train_1019_label_frame_89.png", "mr_train_1010_label_frame_129.png", "mr_train_1010_label_frame_83.png", "mr_train_1019_label_frame_129.png", "mr_train_1019_label_frame_37.png", "mr_train_1019_label_frame_102.png", "mr_train_1002_label_frame_35.png", "mr_train_1010_label_frame_131.png", "mr_train_1010_label_frame_120.png", "mr_train_1019_label_frame_39.png", "mr_train_1002_label_frame_71.png", "mr_train_1010_label_frame_146.png", "mr_train_1019_label_frame_98.png", "mr_train_1012_label_frame_59.png", "mr_train_1019_label_frame_46.png", "mr_train_1019_label_frame_54.png", "mr_train_1010_label_frame_28.png", "mr_train_1012_label_frame_54.png", "mr_train_1002_label_frame_90.png", "mr_train_1019_label_frame_42.png", "mr_train_1010_label_frame_110.png", "mr_train_1010_label_frame_140.png", "mr_train_1019_label_frame_118.png", "mr_train_1012_label_frame_76.png", "mr_train_1010_label_frame_38.png", "mr_train_1019_label_frame_1.png", "mr_train_1019_label_frame_95.png", "mr_train_1010_label_frame_68.png", "mr_train_1010_label_frame_93.png", "mr_train_1010_label_frame_117.png", "mr_train_1010_label_frame_59.png", "mr_train_1010_label_frame_58.png", "mr_train_1010_label_frame_147.png", "mr_train_1010_label_frame_66.png", "mr_train_1019_label_frame_7.png", "mr_train_1010_label_frame_7.png", "mr_train_1012_label_frame_28.png", "mr_train_1010_label_frame_23.png", "mr_train_1002_label_frame_86.png", "mr_train_1002_label_frame_37.png", "mr_train_1002_label_frame_39.png", "mr_train_1002_label_frame_60.png", "mr_train_1012_label_frame_126.png", "mr_train_1010_label_frame_74.png", "mr_train_1012_label_frame_74.png", "mr_train_1012_label_frame_49.png", "mr_train_1002_label_frame_16.png", "mr_train_1012_label_frame_57.png", "mr_train_1010_label_frame_99.png", "mr_train_1012_label_frame_69.png", "mr_train_1010_label_frame_78.png", "mr_train_1012_label_frame_75.png", "mr_train_1010_label_frame_47.png", "mr_train_1002_label_frame_119.png", "mr_train_1002_label_frame_81.png", "mr_train_1012_label_frame_45.png", "mr_train_1002_label_frame_63.png", "mr_train_1002_label_frame_85.png", "mr_train_1012_label_frame_46.png", "mr_train_1002_label_frame_120.png", "mr_train_1002_label_frame_76.png", "mr_train_1010_label_frame_127.png", "mr_train_1010_label_frame_62.png", "mr_train_1012_label_frame_9.png", "mr_train_1010_label_frame_55.png", "mr_train_1019_label_frame_101.png", "mr_train_1019_label_frame_134.png", "mr_train_1012_label_frame_41.png", "mr_train_1002_label_frame_4.png", "mr_train_1012_label_frame_67.png", "mr_train_1002_label_frame_42.png", "mr_train_1012_label_frame_1.png", "mr_train_1012_label_frame_36.png", "mr_train_1010_label_frame_79.png", "mr_train_1010_label_frame_152.png", "mr_train_1019_label_frame_26.png", "mr_train_1019_label_frame_123.png", "mr_train_1019_label_frame_103.png", "mr_train_1002_label_frame_7.png", "mr_train_1019_label_frame_70.png", "mr_train_1010_label_frame_43.png", "mr_train_1019_label_frame_108.png", "mr_train_1019_label_frame_97.png", "mr_train_1019_label_frame_27.png", "mr_train_1002_label_frame_80.png", "mr_train_1012_label_frame_39.png", "mr_train_1010_label_frame_54.png", "mr_train_1010_label_frame_45.png", "mr_train_1002_label_frame_109.png", "mr_train_1019_label_frame_55.png", "mr_train_1002_label_frame_117.png", "mr_train_1019_label_frame_96.png", "mr_train_1002_label_frame_91.png", "mr_train_1019_label_frame_78.png", "mr_train_1012_label_frame_94.png", "mr_train_1012_label_frame_64.png", "mr_train_1019_label_frame_87.png", "mr_train_1010_label_frame_5.png", "mr_train_1002_label_frame_52.png", "mr_train_1002_label_frame_25.png", "mr_train_1002_label_frame_92.png", "mr_train_1012_label_frame_11.png", "mr_train_1012_label_frame_23.png", "mr_train_1019_label_frame_44.png", "mr_train_1010_label_frame_35.png", "mr_train_1019_label_frame_86.png", "mr_train_1010_label_frame_102.png", "mr_train_1019_label_frame_4.png", "mr_train_1012_label_frame_81.png", "mr_train_1019_label_frame_120.png", "mr_train_1019_label_frame_122.png", "mr_train_1010_label_frame_111.png", "mr_train_1010_label_frame_80.png", "mr_train_1010_label_frame_88.png", "mr_train_1010_label_frame_15.png", "mr_train_1010_label_frame_122.png", "mr_train_1012_label_frame_97.png", "mr_train_1010_label_frame_63.png", "mr_train_1002_label_frame_101.png", "mr_train_1012_label_frame_83.png", "mr_train_1019_label_frame_62.png", "mr_train_1012_label_frame_77.png", "mr_train_1010_label_frame_9.png", "mr_train_1012_label_frame_114.png", "mr_train_1002_label_frame_79.png", "mr_train_1012_label_frame_20.png", "mr_train_1019_label_frame_77.png", "mr_train_1012_label_frame_55.png", "mr_train_1012_label_frame_96.png", "mr_train_1012_label_frame_18.png", "mr_train_1019_label_frame_16.png", "mr_train_1019_label_frame_130.png", "mr_train_1012_label_frame_98.png", "mr_train_1019_label_frame_34.png", "mr_train_1002_label_frame_17.png", "mr_train_1002_label_frame_5.png", "mr_train_1002_label_frame_46.png", "mr_train_1012_label_frame_62.png", "mr_train_1019_label_frame_63.png", "mr_train_1010_label_frame_86.png", "mr_train_1019_label_frame_105.png", "mr_train_1019_label_frame_117.png", "mr_train_1012_label_frame_95.png", "mr_train_1002_label_frame_65.png", "mr_train_1010_label_frame_73.png", "mr_train_1019_label_frame_94.png", "mr_train_1012_label_frame_80.png", "mr_train_1002_label_frame_69.png", "mr_train_1002_label_frame_116.png", "mr_train_1019_label_frame_22.png", "mr_train_1002_label_frame_87.png", "mr_train_1012_label_frame_27.png", "mr_train_1019_label_frame_52.png", "mr_train_1019_label_frame_74.png", "mr_train_1002_label_frame_77.png", "mr_train_1010_label_frame_92.png", "mr_train_1002_label_frame_84.png", "mr_train_1010_label_frame_153.png", "mr_train_1012_label_frame_53.png", "mr_train_1019_label_frame_65.png", "mr_train_1010_label_frame_29.png", "mr_train_1019_label_frame_121.png", "mr_train_1012_label_frame_119.png", "mr_train_1012_label_frame_26.png", "mr_train_1019_label_frame_111.png", "mr_train_1010_label_frame_61.png", "mr_train_1019_label_frame_99.png", "mr_train_1010_label_frame_100.png", "mr_train_1019_label_frame_29.png", "mr_train_1019_label_frame_119.png", "mr_train_1002_label_frame_41.png", "mr_train_1002_label_frame_66.png", "mr_train_1002_label_frame_43.png", "mr_train_1012_label_frame_42.png", "mr_train_1019_label_frame_32.png", "mr_train_1010_label_frame_60.png", "mr_train_1002_label_frame_103.png", "mr_train_1010_label_frame_139.png", "mr_train_1019_label_frame_59.png", "mr_train_1012_label_frame_103.png", "mr_train_1019_label_frame_82.png", "mr_train_1002_label_frame_56.png", "mr_train_1002_label_frame_27.png", "mr_train_1010_label_frame_106.png", "mr_train_1019_label_frame_11.png", "mr_train_1019_label_frame_107.png", "mr_train_1012_label_frame_82.png", "mr_train_1010_label_frame_46.png", "mr_train_1012_label_frame_79.png", "mr_train_1012_label_frame_125.png", "mr_train_1019_label_frame_57.png", "mr_train_1019_label_frame_66.png", "mr_train_1010_label_frame_103.png", "mr_train_1012_label_frame_21.png", "mr_train_1002_label_frame_102.png", "mr_train_1019_label_frame_49.png", "mr_train_1010_label_frame_137.png", "mr_train_1010_label_frame_158.png", "mr_train_1012_label_frame_5.png", "mr_train_1010_label_frame_124.png", "mr_train_1012_label_frame_111.png", "mr_train_1012_label_frame_123.png", "mr_train_1012_label_frame_6.png", "mr_train_1010_label_frame_24.png", "mr_train_1010_label_frame_133.png", "mr_train_1019_label_frame_43.png", "mr_train_1002_label_frame_115.png", "mr_train_1010_label_frame_118.png", "mr_train_1010_label_frame_65.png", "mr_train_1012_label_frame_99.png", "mr_train_1010_label_frame_56.png", "mr_train_1010_label_frame_11.png", "mr_train_1010_label_frame_108.png", "mr_train_1002_label_frame_78.png", "mr_train_1019_label_frame_112.png", "mr_train_1012_label_frame_17.png", "mr_train_1010_label_frame_138.png", "mr_train_1002_label_frame_23.png", "mr_train_1002_label_frame_107.png", "mr_train_1002_label_frame_12.png", "mr_train_1012_label_frame_2.png", "mr_train_1002_label_frame_34.png", "mr_train_1002_label_frame_30.png", "mr_train_1019_label_frame_61.png", "mr_train_1012_label_frame_31.png", "mr_train_1012_label_frame_108.png", "mr_train_1002_label_frame_122.png", "mr_train_1010_label_frame_154.png", "mr_train_1012_label_frame_30.png", "mr_train_1012_label_frame_15.png", "mr_train_1002_label_frame_32.png", "mr_train_1010_label_frame_89.png", "mr_train_1019_label_frame_19.png", "mr_train_1002_label_frame_93.png", "mr_train_1002_label_frame_20.png", "mr_train_1010_label_frame_31.png", "mr_train_1012_label_frame_78.png", "mr_train_1010_label_frame_119.png", "mr_train_1019_label_frame_17.png", "mr_train_1019_label_frame_53.png", "mr_train_1012_label_frame_44.png", "mr_train_1010_label_frame_52.png", "mr_train_1010_label_frame_142.png", "mr_train_1010_label_frame_87.png", "mr_train_1010_label_frame_19.png", "mr_train_1002_label_frame_83.png", "mr_train_1002_label_frame_10.png", "mr_train_1012_label_frame_32.png", "mr_train_1012_label_frame_66.png", "mr_train_1002_label_frame_0.png", "mr_train_1019_label_frame_124.png", "mr_train_1010_label_frame_136.png", "mr_train_1012_label_frame_16.png", "mr_train_1010_label_frame_16.png", "mr_train_1019_label_frame_51.png", "mr_train_1010_label_frame_67.png", "mr_train_1010_label_frame_1.png", "mr_train_1019_label_frame_3.png", "mr_train_1002_label_frame_114.png", "mr_train_1012_label_frame_104.png", "mr_train_1012_label_frame_34.png", "mr_train_1010_label_frame_97.png", "mr_train_1002_label_frame_9.png", "mr_train_1019_label_frame_2.png", "mr_train_1012_label_frame_87.png", "mr_train_1002_label_frame_50.png", "mr_train_1010_label_frame_95.png", "mr_train_1019_label_frame_81.png", "mr_train_1019_label_frame_116.png", "mr_train_1019_label_frame_18.png", "mr_train_1019_label_frame_60.png", "mr_train_1010_label_frame_150.png", "mr_train_1002_label_frame_14.png", "mr_train_1012_label_frame_120.png", "mr_train_1012_label_frame_127.png", "mr_train_1019_label_frame_21.png", "mr_train_1002_label_frame_123.png", "mr_train_1019_label_frame_48.png", "mr_train_1012_label_frame_19.png", "mr_train_1012_label_frame_92.png", "mr_train_1010_label_frame_0.png", "mr_train_1010_label_frame_26.png", "mr_train_1019_label_frame_68.png", "mr_train_1012_label_frame_89.png", "mr_train_1010_label_frame_8.png", "mr_train_1010_label_frame_76.png", "mr_train_1010_label_frame_36.png", "mr_train_1019_label_frame_125.png", "mr_train_1002_label_frame_19.png", "mr_train_1010_label_frame_51.png", "mr_train_1019_label_frame_132.png", "mr_train_1012_label_frame_118.png", "mr_train_1002_label_frame_67.png", "mr_train_1019_label_frame_12.png", "mr_train_1002_label_frame_96.png", "mr_train_1019_label_frame_79.png", "mr_train_1012_label_frame_70.png", "mr_train_1010_label_frame_145.png", "mr_train_1010_label_frame_25.png", "mr_train_1002_label_frame_57.png", "mr_train_1012_label_frame_61.png", "mr_train_1010_label_frame_44.png", "mr_train_1010_label_frame_37.png", "mr_train_1012_label_frame_86.png", "mr_train_1010_label_frame_3.png", "mr_train_1019_label_frame_41.png", "mr_train_1012_label_frame_117.png", "mr_train_1010_label_frame_148.png", "mr_train_1002_label_frame_74.png", "mr_train_1002_label_frame_49.png", "mr_train_1002_label_frame_95.png", "mr_train_1012_label_frame_113.png", "mr_train_1019_label_frame_5.png", "mr_train_1012_label_frame_13.png", "mr_train_1010_label_frame_90.png", "mr_train_1002_label_frame_112.png", "mr_train_1012_label_frame_90.png", "mr_train_1002_label_frame_58.png", "mr_train_1010_label_frame_94.png", "mr_train_1002_label_frame_108.png", "mr_train_1019_label_frame_92.png", "mr_train_1019_label_frame_106.png", "mr_train_1002_label_frame_45.png", "mr_train_1019_label_frame_30.png", "mr_train_1019_label_frame_109.png", "mr_train_1019_label_frame_131.png", "mr_train_1002_label_frame_51.png", "mr_train_1002_label_frame_125.png", "mr_train_1012_label_frame_58.png", "mr_train_1002_label_frame_105.png", "mr_train_1012_label_frame_72.png", "mr_train_1010_label_frame_72.png", "mr_train_1010_label_frame_30.png", "mr_train_1010_label_frame_149.png", "mr_train_1010_label_frame_77.png", "mr_train_1002_label_frame_22.png", "mr_train_1010_label_frame_13.png", "mr_train_1019_label_frame_25.png", "mr_train_1002_label_frame_53.png", "mr_train_1012_label_frame_22.png", "mr_train_1012_label_frame_51.png", "mr_train_1002_label_frame_70.png", "mr_train_1002_label_frame_13.png", "mr_train_1002_label_frame_11.png", "mr_train_1010_label_frame_113.png", "mr_train_1019_label_frame_10.png", "mr_train_1012_label_frame_12.png", "mr_train_1019_label_frame_33.png", "mr_train_1010_label_frame_101.png", "mr_train_1010_label_frame_4.png", "mr_train_1010_label_frame_57.png", "mr_train_1010_label_frame_84.png", "mr_train_1012_label_frame_116.png", "mr_train_1002_label_frame_61.png", "mr_train_1012_label_frame_63.png", "mr_train_1002_label_frame_1.png", "mr_train_1012_label_frame_71.png", "mr_train_1010_label_frame_40.png", "mr_train_1002_label_frame_88.png", "mr_train_1010_label_frame_105.png", "mr_train_1002_label_frame_26.png", "mr_train_1010_label_frame_91.png", "mr_train_1002_label_frame_126.png", "mr_train_1019_label_frame_126.png", "mr_train_1010_label_frame_75.png", "mr_train_1012_label_frame_124.png", "mr_train_1019_label_frame_133.png", "mr_train_1010_label_frame_114.png", "mr_train_1002_label_frame_127.png", "mr_train_1010_label_frame_64.png", "mr_train_1010_label_frame_134.png", "mr_train_1010_label_frame_115.png", "mr_train_1019_label_frame_0.png", "mr_train_1010_label_frame_53.png", "mr_train_1002_label_frame_89.png", "mr_train_1019_label_frame_100.png", "mr_train_1002_label_frame_124.png", "mr_train_1010_label_frame_71.png", "mr_train_1002_label_frame_62.png", "mr_train_1010_label_frame_20.png", "mr_train_1010_label_frame_18.png", "mr_train_1002_label_frame_54.png", "mr_train_1019_label_frame_20.png", "mr_train_1010_label_frame_10.png", "mr_train_1002_label_frame_59.png", "mr_train_1012_label_frame_38.png", "mr_train_1019_label_frame_31.png", "mr_train_1002_label_frame_40.png", "mr_train_1010_label_frame_157.png", "mr_train_1019_label_frame_67.png", "mr_train_1002_label_frame_104.png", "mr_train_1012_label_frame_85.png", "mr_train_1012_label_frame_33.png", "mr_train_1010_label_frame_144.png", "mr_train_1012_label_frame_43.png", "mr_train_1010_label_frame_125.png", "mr_train_1002_label_frame_15.png", "mr_train_1010_label_frame_104.png", "mr_train_1012_label_frame_29.png", "mr_train_1019_label_frame_91.png", "mr_train_1002_label_frame_82.png", "mr_train_1002_label_frame_29.png", "mr_train_1012_label_frame_24.png", "mr_train_1012_label_frame_102.png", "mr_train_1010_label_frame_155.png", "mr_train_1019_label_frame_83.png", "mr_train_1019_label_frame_8.png", "mr_train_1019_label_frame_85.png", "mr_train_1010_label_frame_27.png", "mr_train_1012_label_frame_65.png", "mr_train_1019_label_frame_113.png", "mr_train_1002_label_frame_73.png", "mr_train_1012_label_frame_115.png", "mr_train_1019_label_frame_13.png", "mr_train_1010_label_frame_96.png", "mr_train_1010_label_frame_33.png", "mr_train_1019_label_frame_38.png", "mr_train_1019_label_frame_58.png", "mr_train_1012_label_frame_4.png", "mr_train_1019_label_frame_71.png", "mr_train_1012_label_frame_48.png", "mr_train_1010_label_frame_21.png", "mr_train_1012_label_frame_109.png", "mr_train_1019_label_frame_69.png", "mr_train_1012_label_frame_56.png", "mr_train_1010_label_frame_41.png", "mr_train_1002_label_frame_28.png", "mr_train_1010_label_frame_34.png", "mr_train_1019_label_frame_115.png", "mr_train_1002_label_frame_118.png", "mr_train_1019_label_frame_47.png", "mr_train_1019_label_frame_14.png", "mr_train_1002_label_frame_68.png", "mr_train_1019_label_frame_128.png", "mr_train_1010_label_frame_69.png", "mr_train_1002_label_frame_121.png", "mr_train_1012_label_frame_73.png", "mr_train_1002_label_frame_106.png", "mr_train_1010_label_frame_85.png", "mr_train_1019_label_frame_35.png", "mr_train_1002_label_frame_31.png", "mr_train_1019_label_frame_15.png", "mr_train_1019_label_frame_84.png", "mr_train_1002_label_frame_64.png", "mr_train_1012_label_frame_112.png", "mr_train_1010_label_frame_12.png", "mr_train_1012_label_frame_122.png", "mr_train_1019_label_frame_24.png", "mr_train_1010_label_frame_2.png", "mr_train_1010_label_frame_98.png", "mr_train_1012_label_frame_100.png", "mr_train_1019_label_frame_73.png", "mr_train_1010_label_frame_50.png", "mr_train_1019_label_frame_45.png", "mr_train_1019_label_frame_6.png", "mr_train_1002_label_frame_44.png"]} \ No newline at end of file diff --git a/files_split/__init__.py b/files_split/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/files_split/fgadr_pkl_file.pkl b/files_split/fgadr_pkl_file.pkl new file mode 100644 index 0000000000000000000000000000000000000000..db7f6048ba14026c46467ef6f57a107ef48a0728 --- /dev/null +++ b/files_split/fgadr_pkl_file.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d44356d5fc5e30bb200ddb789d70730a4158875a61140171232ab44ac3c98400 +size 159475 diff --git a/further_training_lvm_med/README.md b/further_training_lvm_med/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3ab4d4f407710de5cf0e75284894f30f76ff03c7 --- /dev/null +++ b/further_training_lvm_med/README.md @@ -0,0 +1,20 @@ + +# Description + +The LIVECell dataset was published by [Edlund et.al.](https://www.nature.com/articles/s41592-021-01249-6) in 2021. This dataset consists of over 1.6 million high-quality cells from various sets of cell morphologies and +culture densities which are manually annotated carefully by experts. +There are 8 types of Cells in this dataset. All of them were merged and then continued pre-trained from LVM-MED weights. + + +```bibtex +@article{edlund2021livecell, + title={LIVECell—A large-scale dataset for label-free live cell segmentation}, + author={Edlund, Christoffer and Jackson, Timothy R and Khalid, Nabeel and Bevan, Nicola and Dale, Timothy and Dengel, Andreas and Ahmed, Sheraz and Trygg, Johan and Sj{\"o}gren, Rickard}, + journal={Nature methods}, + volume={18}, + number={9}, + pages={1038--1045}, + year={2021}, + publisher={Nature Publishing Group US New York} +} +``` diff --git a/further_training_lvm_med/__init__.py b/further_training_lvm_med/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lvm-med-training-data/0_SSL_BraTS2018_Brain/file_list.txt b/lvm-med-training-data/0_SSL_BraTS2018_Brain/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..cfb21a58ee80522f36d5f1c9f96be5bae6c46693 --- /dev/null +++ b/lvm-med-training-data/0_SSL_BraTS2018_Brain/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afc528cf7ea027d47295dfd92d269b7c28f5a798a11fb8b7741c007199088a40 +size 1239 diff --git a/lvm-med-training-data/0_SSL_HNSCC_Mixed_HeadNeck/file_list.txt b/lvm-med-training-data/0_SSL_HNSCC_Mixed_HeadNeck/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..dea8c10f27ebd93e5f4b59197414189452557cb6 --- /dev/null +++ b/lvm-med-training-data/0_SSL_HNSCC_Mixed_HeadNeck/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cda6b92937fa473d5f34cdfb4a4f5fb7ac0d499f465a89cea70543dd589172ed +size 12493214 diff --git a/lvm-med-training-data/0_SSL_HyperKvasir_Color_Colon/file_list.txt b/lvm-med-training-data/0_SSL_HyperKvasir_Color_Colon/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..22585176ef5a07f204af493ccb6e20a90df7ee6a --- /dev/null +++ b/lvm-med-training-data/0_SSL_HyperKvasir_Color_Colon/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf426ced9fefa4aad87e6c854be637ee56d10742f837d4000347b5be8db7ac8b +size 4073555 diff --git a/lvm-med-training-data/0_SSL_LUNA2016_Lung/file_list.txt b/lvm-med-training-data/0_SSL_LUNA2016_Lung/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..357d2eebdf370bd2ee76f70bc0ce6b53140fb9bd --- /dev/null +++ b/lvm-med-training-data/0_SSL_LUNA2016_Lung/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f5b59752573ad3338bc2b9c13b3c935473f698edb41be51559ef703b5c90087f +size 24563 diff --git a/lvm-med-training-data/0_SSL_LiTS2017_Liver/file_list.txt b/lvm-med-training-data/0_SSL_LiTS2017_Liver/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..2121e2accd9ac6dfcbe1ecd2a099ecabae972ceb --- /dev/null +++ b/lvm-med-training-data/0_SSL_LiTS2017_Liver/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dab86191762ddf5de005a219034a9231551cceb71e5d8dcd95a911e02033b9a +size 305 diff --git a/lvm-med-training-data/0_SSL_OPC-Radiomics_CT_Oropharynx/file_list.txt b/lvm-med-training-data/0_SSL_OPC-Radiomics_CT_Oropharynx/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..40ca9f9ef917d35234dd8437a2e22b3d8bd97a2a --- /dev/null +++ b/lvm-med-training-data/0_SSL_OPC-Radiomics_CT_Oropharynx/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:965463f825a6005d2d19092ae7227d709290511db24f35e285ba07a08d1a3963 +size 2010629 diff --git a/lvm-med-training-data/0_SSL_Osteosarcoma-UT_Color_Cells/file_list.txt b/lvm-med-training-data/0_SSL_Osteosarcoma-UT_Color_Cells/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..73a4f8f43de5580d59a5467d1b05883f89e07121 --- /dev/null +++ b/lvm-med-training-data/0_SSL_Osteosarcoma-UT_Color_Cells/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:94f595bd56c4c4e9e8aaa662c9885efc0e507fb8cfe36e62842fe5c6a229884a +size 14959 diff --git a/lvm-med-training-data/0_SSL_PROSTATEx_MRI_Prostate/file_list.txt b/lvm-med-training-data/0_SSL_PROSTATEx_MRI_Prostate/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..4023fb20310c4d8c47cb67831b4639b3faa2eece --- /dev/null +++ b/lvm-med-training-data/0_SSL_PROSTATEx_MRI_Prostate/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cf3e17bcf284190c17afe90f13690f34c6fa08a2e5baec7d3dcca5be9bbf465 +size 945208 diff --git a/lvm-med-training-data/0_SSL_Pancreas-CT_CT_Pancreas/file_list.txt b/lvm-med-training-data/0_SSL_Pancreas-CT_CT_Pancreas/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..4ea37bf0214c001980a35d8f1cbcda8958fdec5c --- /dev/null +++ b/lvm-med-training-data/0_SSL_Pancreas-CT_CT_Pancreas/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63db0f826fc297d2109dfcc89a7ec014d25aba4adc54977bf5b3744ebf02486c +size 295460 diff --git a/lvm-med-training-data/0_SSL_PatchCamelyon_Color_Cells/file_list.txt b/lvm-med-training-data/0_SSL_PatchCamelyon_Color_Cells/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf35e5142fcf30756b7f75746a655de4a6d5f1f0 --- /dev/null +++ b/lvm-med-training-data/0_SSL_PatchCamelyon_Color_Cells/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fcf65fa24e03a9cc4bb6039c5187e098274447f43405b04a5ea56eda1643cc22 +size 39 diff --git a/lvm-med-training-data/0_SSL_Pelvic-Reference-Data_CT_Pelvic/file_list.txt b/lvm-med-training-data/0_SSL_Pelvic-Reference-Data_CT_Pelvic/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..5c69c750bb31784d3d4553efa104690caee31800 --- /dev/null +++ b/lvm-med-training-data/0_SSL_Pelvic-Reference-Data_CT_Pelvic/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:229b1afe850a57176886c41ab07298ce7dc142ecf068865cb2ddbc4162544715 +size 237850 diff --git a/lvm-med-training-data/0_SSL_TCGA-CESC_MRI_Cervix/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-CESC_MRI_Cervix/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..93dd10b4ea8287bf5b5da0023ab2708de07f147a --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-CESC_MRI_Cervix/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6c413cbc4b3aab07f1def60e17c1674eb7820ddeb5f253cb2c4d663bc561c124 +size 418787 diff --git a/lvm-med-training-data/0_SSL_TCGA-COAD_CT_Colon/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-COAD_CT_Colon/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..18a1a96646c886e3ea488279c9cb7a760eebb565 --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-COAD_CT_Colon/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:315f94543a107d4981260d5ba42b9e97dccfc9f0d10358038803b6bf1953e31c +size 163210 diff --git a/lvm-med-training-data/0_SSL_TCGA-ESCA_CT_Esophagus/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-ESCA_CT_Esophagus/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..a59c5b2e6075d14822ba5749d88815ba9d4b0178 --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-ESCA_CT_Esophagus/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7aa66c58b39b4d3e6dab3e6d06e957d590ada1719fbf9e4db7c94c71da982d46 +size 371797 diff --git a/lvm-med-training-data/0_SSL_TCGA-KICH_Mixed_Kidney/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-KICH_Mixed_Kidney/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..ace9598a93cedd0db7c9ce656a301665edd85840 --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-KICH_Mixed_Kidney/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e4cb763fc921afe4e00c5a3c77ea32c3ef3f45eac9d64f4acfbe6d184daaa2d +size 195316 diff --git a/lvm-med-training-data/0_SSL_TCGA-KIRC_Mixed_Multi/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-KIRC_Mixed_Multi/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..4c0a048b760f3671f6f4a5a9e13b6165e3898b5c --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-KIRC_Mixed_Multi/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3dc8bcddb7a74c693fe3c0118d42add04a0e951f22b756c699b278ffc1b3c68 +size 3100288 diff --git a/lvm-med-training-data/0_SSL_TCGA-READ_Mixed_Rectum/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-READ_Mixed_Rectum/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..da3be74a40834da392a845b97e19fc6f89199042 --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-READ_Mixed_Rectum/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:394453330d7365aeeb43adcce8568f0b44ada7e0610ed2fd7178a9b75a525c04 +size 22937 diff --git a/lvm-med-training-data/0_SSL_TCGA-SARC_Mixed_Multi/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-SARC_Mixed_Multi/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..13d72f4b275b91cca6b843eb6371f1214c55455f --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-SARC_Mixed_Multi/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:094f233db83ef5577fae3d76e67e90882b5fde621236b8d5eee5aba604e6a63f +size 60361 diff --git a/lvm-med-training-data/0_SSL_TCGA-THCA_CT_Thyroid/file_list.txt b/lvm-med-training-data/0_SSL_TCGA-THCA_CT_Thyroid/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..cbcc553e822a1f47b08d98856d37fb5bc5868864 --- /dev/null +++ b/lvm-med-training-data/0_SSL_TCGA-THCA_CT_Thyroid/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:43f38402f11f052c30df704b3eaa6d7c64c636fd70275da29bb1f379a7736c47 +size 67701 diff --git a/lvm-med-training-data/0_SSL_VinBigData_X-ray_Lung/file_list.txt b/lvm-med-training-data/0_SSL_VinBigData_X-ray_Lung/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..8eaa84dbdd3032a60231f06587f4158ab0dbf77e --- /dev/null +++ b/lvm-med-training-data/0_SSL_VinBigData_X-ray_Lung/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:300aae71b448aea691cd6694b195beb67fd95d0377d072d707430a84dd806667 +size 555000 diff --git a/lvm-med-training-data/ADNI1_MRI_Brain_1.5T/file_list.txt b/lvm-med-training-data/ADNI1_MRI_Brain_1.5T/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..08055c5c9d489c381789664bf850f29254fa963e --- /dev/null +++ b/lvm-med-training-data/ADNI1_MRI_Brain_1.5T/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e02643eb606c419f70717d681caff18be885b2aab7cfea31d59d658ae9ff34b +size 1859 diff --git a/lvm-med-training-data/ADNI1_MRI_Brain_3T/file_list.txt b/lvm-med-training-data/ADNI1_MRI_Brain_3T/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d824db80880a7a4b8638f74071dfc1219695d2f --- /dev/null +++ b/lvm-med-training-data/ADNI1_MRI_Brain_3T/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e4f09e6716920d45ef69b1ff67f03662fa7bcf70dc84e5d318654d87aaf6557 +size 350 diff --git a/lvm-med-training-data/AML-Cytomorphology_Color_Cells/file_list.txt b/lvm-med-training-data/AML-Cytomorphology_Color_Cells/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..dcbf6c43b254e746fe527ed651fca20dba9be2c2 --- /dev/null +++ b/lvm-med-training-data/AML-Cytomorphology_Color_Cells/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f4e4b4f3a59c0c32215f94c81ace46f89f52d99ede516d1d259bcfb429efb519 +size 62426 diff --git a/lvm-med-training-data/APTOS2019_Color_Retinopathy/file_list.txt b/lvm-med-training-data/APTOS2019_Color_Retinopathy/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..d37043b156ca350ff9b1a898befd14d0f617f2a4 --- /dev/null +++ b/lvm-med-training-data/APTOS2019_Color_Retinopathy/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b1ad90d7c711191fb30856c25e569e63da2e8181bc79deefdc9d0adb29eaf34 +size 15143 diff --git a/lvm-med-training-data/BCCD-Dataset_Color_Bloodcells/file_list.txt b/lvm-med-training-data/BCCD-Dataset_Color_Bloodcells/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..4b60aae02a61f71c3a3dbdc7f671127c84048579 --- /dev/null +++ b/lvm-med-training-data/BCCD-Dataset_Color_Bloodcells/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5f1155b3f50cc0772cd35a0406e69352b4e08d9e9109b57f5976d071b640d4e +size 6132 diff --git a/lvm-med-training-data/BCSS_Color_Cells/file_list.txt b/lvm-med-training-data/BCSS_Color_Cells/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..45cfab388476ccf82033d0994a25c29d0543f02b --- /dev/null +++ b/lvm-med-training-data/BCSS_Color_Cells/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc913606d8f18e8c9dde716dc0e9332047d86967d1b058294c0e4c0511f915dd +size 1767 diff --git a/lvm-med-training-data/C-NMC_Leukemia_Color_Cells/file_list.txt b/lvm-med-training-data/C-NMC_Leukemia_Color_Cells/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..052cdd3ba2e4d85585ccf70d7d36b4514179b4db --- /dev/null +++ b/lvm-med-training-data/C-NMC_Leukemia_Color_Cells/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4fb2835be578e6a04ebac0848ee361a0876538290abb1cd9ad4fcc09fec7d381 +size 214703 diff --git a/lvm-med-training-data/CBIS-DDSM_Mammography_Breast/file_list.txt b/lvm-med-training-data/CBIS-DDSM_Mammography_Breast/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..7bdd5e20d60983782626f0c58340bcd0a64c9ab6 --- /dev/null +++ b/lvm-med-training-data/CBIS-DDSM_Mammography_Breast/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:556f1b896da3601b4c7793ba9d1ed5776b2186c74d8b809b0529daa895ee2571 +size 81715 diff --git a/lvm-med-training-data/COVIDx_CT_Lung/file_list.txt.txt b/lvm-med-training-data/COVIDx_CT_Lung/file_list.txt.txt new file mode 100644 index 0000000000000000000000000000000000000000..9937235c9af65910daacba35d8541de18b39b328 --- /dev/null +++ b/lvm-med-training-data/COVIDx_CT_Lung/file_list.txt.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7e1d3bd4001c92131d8704daaac2807f4cdbebd1524f48e10cac228ba5e1087 +size 122 diff --git a/lvm-med-training-data/DentalPanoramicXrays_X-ray_Tooth/file_list.txt b/lvm-med-training-data/DentalPanoramicXrays_X-ray_Tooth/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..c48d6ebcc39ca1cdcf66e52c8a7db8683481478c --- /dev/null +++ b/lvm-med-training-data/DentalPanoramicXrays_X-ray_Tooth/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:367c027510bd9c22b597af476502f666e8c4f281c38715d44ec21d0a853fc1d3 +size 339 diff --git a/lvm-med-training-data/EyePACS_Color_Retinopathy/file_list.txt b/lvm-med-training-data/EyePACS_Color_Retinopathy/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca4b4654d25d6c0720c88cff27e9bf4680a49623 --- /dev/null +++ b/lvm-med-training-data/EyePACS_Color_Retinopathy/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2bb98278b8db5857cf3dc17df18d6e04b96294cfad7a74d72046574dcc32513 +size 141205 diff --git a/lvm-med-training-data/HC18_Ultrasound_Fetus/file_list.txt b/lvm-med-training-data/HC18_Ultrasound_Fetus/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..5fb48154459938cbeba180cc2de78313d4372c8f --- /dev/null +++ b/lvm-med-training-data/HC18_Ultrasound_Fetus/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:427e91994c54e9f61be5aef9e7e4e1aaab090e60166b9a0d39251c2da5be7637 +size 3489 diff --git a/lvm-med-training-data/Heidelberg_OCT_Eye/file_list.txt b/lvm-med-training-data/Heidelberg_OCT_Eye/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b23b2769d340288f93b3d1c960e2af9a75788f1 --- /dev/null +++ b/lvm-med-training-data/Heidelberg_OCT_Eye/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fff5bd1cfa6068ceafb640422adef6a9746ec8f24b49db1e11a31d99e37c5e64 +size 13 diff --git a/lvm-med-training-data/Hippseg2011_MRI_Brain/file_list.txt b/lvm-med-training-data/Hippseg2011_MRI_Brain/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..45cd6b2b8ed55bb7904972fa84eb52e69a4e391f --- /dev/null +++ b/lvm-med-training-data/Hippseg2011_MRI_Brain/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:250b5a675d5f147485658f80e38421b92c417292664b174ad802661da1652ec0 +size 95 diff --git a/lvm-med-training-data/ISIC2019_Color_Skin/file_list.txt b/lvm-med-training-data/ISIC2019_Color_Skin/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..0afd56f89db7b23b20b42cd883b267009768a6ef --- /dev/null +++ b/lvm-med-training-data/ISIC2019_Color_Skin/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:05b1e980bec356f8db4bd6083b28cdfd8f2f648693c95f851b8b00605e9bca7c +size 109277 diff --git a/lvm-med-training-data/KiTS19_CT_Kidney/file_list.txt b/lvm-med-training-data/KiTS19_CT_Kidney/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..e5de0d300d7fcbf35c14c3102753a4374c340add --- /dev/null +++ b/lvm-med-training-data/KiTS19_CT_Kidney/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0c42056779fa9a83337b21463abe890b0c8c48d626b9f41cd9975d0ec761aab +size 1938 diff --git a/lvm-med-training-data/Kvasir_Color_Colon/file_list.txt b/lvm-med-training-data/Kvasir_Color_Colon/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..d0b99d0139add64e03f11d6dd5229076b7b6190b --- /dev/null +++ b/lvm-med-training-data/Kvasir_Color_Colon/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64d361ae511ddd566f270e96f366472734163a4bd607f4ba9487cc77323c336f +size 71720 diff --git a/lvm-med-training-data/LHNCBC-Malaria_Color_Blood-Smear/file_list.txt b/lvm-med-training-data/LHNCBC-Malaria_Color_Blood-Smear/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..c35f8022216f7ed0342ff525a9d51501a25b3d4b --- /dev/null +++ b/lvm-med-training-data/LHNCBC-Malaria_Color_Blood-Smear/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0638d4055c0849301d4e7b63f8a2aa26aa687b91c2bb3197574491fe2492f3c6 +size 273764 diff --git a/lvm-med-training-data/MLL_Color_BoneMarrow/file_list.txt b/lvm-med-training-data/MLL_Color_BoneMarrow/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa50fb1ed1c09f8b58c33818a5927df18b53e114 --- /dev/null +++ b/lvm-med-training-data/MLL_Color_BoneMarrow/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10b7f7eabb714fc8c123c3c6848c1031c5b479fb097b208e313ce4b2f379abb8 +size 580902 diff --git a/lvm-med-training-data/MMWHS_CT_Heart/file_list.txt b/lvm-med-training-data/MMWHS_CT_Heart/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..35bd56a50da0e5138591d12d885b8bbd4a33d192 --- /dev/null +++ b/lvm-med-training-data/MMWHS_CT_Heart/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c89047af479ec3602b8a447ab7cc0b95fa5492c1994d2fd2e254b6ef2b671b97 +size 240 diff --git a/lvm-med-training-data/MMWHS_MR_Heart/file_list.txt b/lvm-med-training-data/MMWHS_MR_Heart/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..55bb0a0f61a01b89d57b8fc58f105d891e408fb5 --- /dev/null +++ b/lvm-med-training-data/MMWHS_MR_Heart/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5496a761e3c098f0934a470820a8a04387fce790a1168aa854554d73c47e260 +size 240 diff --git a/lvm-med-training-data/MSD/file_list.txt b/lvm-med-training-data/MSD/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..d79b8ba9e775ce25d9fc005322105b4d9c81d3f1 --- /dev/null +++ b/lvm-med-training-data/MSD/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ca0b9e50f90f05884ec2226dad565310427bdb2b2430e264e6501d6561ec66b +size 11265 diff --git a/lvm-med-training-data/MitoEM_SEM_Cells/file_list.txt b/lvm-med-training-data/MitoEM_SEM_Cells/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3638d880441d3d5f7fc8fee980b175c87db03be --- /dev/null +++ b/lvm-med-training-data/MitoEM_SEM_Cells/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:856511e2ad37ba914cb6d4c8aa35d6ac420378fc8f2959654f316994c0960058 +size 210 diff --git a/lvm-med-training-data/NuCLS_Color_Nucleus/file_list.txt b/lvm-med-training-data/NuCLS_Color_Nucleus/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..dfa0bcac57d6f253bb15f52f5e725abea1bf5817 --- /dev/null +++ b/lvm-med-training-data/NuCLS_Color_Nucleus/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:123c9790b477080fdac66ce054eb10867b122e8ce386ac380a87a1b34ac4497f +size 140741 diff --git a/lvm-med-training-data/RSNA-Bone-Age_X-ray_Bone/file_list.txt b/lvm-med-training-data/RSNA-Bone-Age_X-ray_Bone/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..4daa011571af02280a0ab7b50a5816ec31f91b92 --- /dev/null +++ b/lvm-med-training-data/RSNA-Bone-Age_X-ray_Bone/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eabc48b99f8029e5f2de6894c1e6f9e177d2da3bc1f892292ebf15763157ed27 +size 26284 diff --git a/lvm-med-training-data/SARAS-MESAD_Color_Surgery/file_list.txt b/lvm-med-training-data/SARAS-MESAD_Color_Surgery/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..1697a5aee1a21bb7b3b2ae5713cc8a2314aaf31b --- /dev/null +++ b/lvm-med-training-data/SARAS-MESAD_Color_Surgery/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:96a737f0a4fb11a14aa2344a87177da43f7bff02f5f605dc955b6a6592f8d057 +size 541021 diff --git a/lvm-med-training-data/Shenzhen_X-ray_Lung/file_list.txt b/lvm-med-training-data/Shenzhen_X-ray_Lung/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..575fe15ca3aa2667d2cdf55c715ecb94fbae18fe --- /dev/null +++ b/lvm-med-training-data/Shenzhen_X-ray_Lung/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c481c5e19dbc071ce490230c28e148ac859444757ae8469e5ecd25485ac2c54 +size 2844 diff --git a/lvm-med-training-data/X-ray_Shoulder/file_list.txt b/lvm-med-training-data/X-ray_Shoulder/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..d6db4b495d65cefedb0751c097ba8cb9a78ac3b9 --- /dev/null +++ b/lvm-med-training-data/X-ray_Shoulder/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6c11f0c930145fe15e015fc927000ab2568157054b01687cabc4d83ce7b612db +size 16662 diff --git a/lvm-med-training-data/__init__.py b/lvm-med-training-data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lvm-med-training-data/dataset.md b/lvm-med-training-data/dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..bfc91f80c4a7ed209b74df63e78e443f8dff517c --- /dev/null +++ b/lvm-med-training-data/dataset.md @@ -0,0 +1,7 @@ +## Tutorial using LMV-Med pre-trained models. +Because we collected public dataset and used them during training phase, there is a possibility of leaking testing data if some paper evaluated again experiments on one of our dataset. To avoid this, we suggest the following settings: + +Assume you want to run downstream tasks on dataset ```X```, then please check the below condition to use properly released models: +![](training_data.png) + +For dataset without standard splitting, we provided in each folder a .txt file including all of used indexes during the training step (usually```20%```total data). These samples then should not be used in the testing phase. diff --git a/lvm-med-training-data/m2caiSeg_Color_Laparoscopic/file_list.txt b/lvm-med-training-data/m2caiSeg_Color_Laparoscopic/file_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..60e02739f81fb0804626f41c6fea660be4fb5fda --- /dev/null +++ b/lvm-med-training-data/m2caiSeg_Color_Laparoscopic/file_list.txt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86c46eceb468ca13bd7baec4f11a1e89df77537eea46e5eff792736e9ba6af97 +size 5412 diff --git a/lvm-med-training-data/training_data.png b/lvm-med-training-data/training_data.png new file mode 100644 index 0000000000000000000000000000000000000000..1d3550bb1f07e0af7d96bec3503a808322d2a477 --- /dev/null +++ b/lvm-med-training-data/training_data.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3523d00d7a672af17c681df47b48680689ceff0520a14c2007ef0df78d6c954b +size 117039 diff --git a/lvm_med.yml b/lvm_med.yml new file mode 100644 index 0000000000000000000000000000000000000000..354c2e91bd653a35d4669e9c9e5e0b6194499dfa --- /dev/null +++ b/lvm_med.yml @@ -0,0 +1,328 @@ +name: lvm_med +channels: + - pytorch-lts + - nvidia + - anaconda + - conda-forge + - defaults +dependencies: + - _libgcc_mutex=0.1=main + - _openmp_mutex=4.5=1_gnu + - argon2-cffi=20.1.0=py38h27cfd23_1 + - asttokens=2.0.5=pyhd3eb1b0_0 + - attrs=21.4.0=pyhd3eb1b0_0 + - backcall=0.2.0=pyhd3eb1b0_0 + - beautifulsoup4=4.11.1=py38h06a4308_0 + - blas=1.0=mkl + - bottleneck=1.3.2=py38heb32a55_1 + - brotlipy=0.7.0=py38h497a2fe_1001 + - bzip2=1.0.8=h7b6447c_0 + - ca-certificates=2022.07.19=h06a4308_0 + - certifi=2022.5.18.1=py38h06a4308_0 + - cffi=1.15.0=py38hd667e15_1 + - charset-normalizer=2.0.12=pyhd8ed1ab_0 + - cloudpickle=2.0.0=pyhd3eb1b0_0 + - cryptography=35.0.0=py38ha5dfef3_0 + - cudatoolkit=11.1.74=h6bb024c_0 + - cytoolz=0.11.0=py38h7b6447c_0 + - dask-core=2022.2.1=pyhd3eb1b0_0 + - debugpy=1.5.1=py38h295c915_0 + - decorator=5.1.1=pyhd3eb1b0_0 + - defusedxml=0.7.1=pyhd3eb1b0_0 + - docker-pycreds=0.4.0=py_0 + - entrypoints=0.4=py38h06a4308_0 + - executing=0.8.3=pyhd3eb1b0_0 + - ffmpeg=4.2.2=h20bf706_0 + - freetype=2.11.0=h70c0345_0 + - fsspec=2022.2.0=pyhd3eb1b0_0 + - giflib=5.2.1=h7b6447c_0 + - gitdb=4.0.9=pyhd8ed1ab_0 + - gitpython=3.1.27=pyhd8ed1ab_0 + - gmp=6.2.1=h2531618_2 + - gnutls=3.6.15=he1e5248_0 + - idna=3.3=pyhd8ed1ab_0 + - imageio=2.9.0=pyhd3eb1b0_0 + - importlib-metadata=4.11.4=py38h578d9bd_0 + - importlib_resources=5.9.0=pyhd8ed1ab_0 + - intel-openmp=2021.4.0=h06a4308_3561 + - ipykernel=6.9.1=py38h06a4308_0 + - ipython=8.3.0=py38h06a4308_0 + - ipython_genutils=0.2.0=pyhd3eb1b0_1 + - jedi=0.18.1=py38h06a4308_1 + - jinja2=3.1.2=pyhd8ed1ab_1 + - jpeg=9b=h024ee3a_2 + - jupyter_client=7.0.6=pyhd8ed1ab_0 + - jupyter_core=4.10.0=py38h06a4308_0 + - jupyterlab_pygments=0.2.2=pyhd8ed1ab_0 + - lame=3.100=h7b6447c_0 + - lcms2=2.12=h3be6417_0 + - ld_impl_linux-64=2.35.1=h7274673_9 + - libblas=3.9.0=12_linux64_mkl + - libcblas=3.9.0=12_linux64_mkl + - libffi=3.3=he6710b0_2 + - libgcc-ng=9.3.0=h5101ec6_17 + - libgfortran-ng=7.5.0=ha8ba4b0_17 + - libgfortran4=7.5.0=ha8ba4b0_17 + - libgomp=9.3.0=h5101ec6_17 + - libidn2=2.3.2=h7f8727e_0 + - liblapack=3.9.0=12_linux64_mkl + - libopus=1.3.1=h7b6447c_0 + - libpng=1.6.37=hbc83047_0 + - libprotobuf=3.15.8=h780b84a_0 + - libsodium=1.0.18=h7b6447c_0 + - libstdcxx-ng=9.3.0=hd4cf53a_17 + - libtasn1=4.16.0=h27cfd23_0 + - libtiff=4.2.0=h85742a9_0 + - libunistring=0.9.10=h27cfd23_0 + - libuv=1.40.0=h7b6447c_0 + - libvpx=1.7.0=h439df22_0 + - libwebp=1.2.0=h89dd481_0 + - libwebp-base=1.2.0=h27cfd23_0 + - locket=0.2.1=py38h06a4308_2 + - lz4-c=1.9.3=h295c915_1 + - markupsafe=2.1.1=py38h7f8727e_0 + - matplotlib-inline=0.1.2=pyhd3eb1b0_2 + - mistune=0.8.4=py38h7b6447c_1000 + - mkl=2021.4.0=h06a4308_640 + - mkl-service=2.4.0=py38h7f8727e_0 + - mkl_fft=1.3.1=py38hd3c417c_0 + - mkl_random=1.2.2=py38h51133e4_0 + - nbconvert=6.5.0=pyhd8ed1ab_0 + - nbconvert-core=6.5.0=pyhd8ed1ab_0 + - nbconvert-pandoc=6.5.0=pyhd8ed1ab_0 + - nbformat=5.4.0=pyhd8ed1ab_0 + - ncurses=6.3=h7f8727e_2 + - nest-asyncio=1.5.5=py38h06a4308_0 + - nettle=3.7.3=hbbd107a_1 + - networkx=2.7.1=pyhd3eb1b0_0 + - ninja=1.10.2=py38hd09550d_3 + - notebook=6.4.11=py38h06a4308_0 + - numexpr=2.8.1=py38h6abb31d_0 + - numpy-base=1.21.5=py38hf524024_2 + - olefile=0.46=pyhd3eb1b0_0 + - openh264=2.1.1=h4ff587b_0 + - openssl=1.1.1q=h7f8727e_0 + - packaging=21.3=pyhd3eb1b0_0 + - pandas=1.4.1=py38h295c915_0 + - pandoc=2.18=ha770c72_0 + - pandocfilters=1.5.0=pyhd3eb1b0_0 + - parso=0.8.3=pyhd3eb1b0_0 + - partd=1.2.0=pyhd3eb1b0_1 + - pathtools=0.1.2=py_1 + - pexpect=4.8.0=pyhd3eb1b0_3 + - pickleshare=0.7.5=pyhd3eb1b0_1003 + - pillow=8.4.0=py38h5aabda8_0 + - pip=21.2.4=py38h06a4308_0 + - prometheus_client=0.13.1=pyhd3eb1b0_0 + - promise=2.3=py38h578d9bd_6 + - prompt-toolkit=3.0.20=pyhd3eb1b0_0 + - protobuf=3.15.8=py38h709712a_0 + - psutil=5.8.0=py38h27cfd23_1 + - ptyprocess=0.7.0=pyhd3eb1b0_2 + - pure_eval=0.2.2=pyhd3eb1b0_0 + - pycparser=2.21=pyhd8ed1ab_0 + - pydicom=2.3.0=pyh6c4a22f_0 + - pygments=2.12.0=pyhd8ed1ab_0 + - pyopenssl=22.0.0=pyhd8ed1ab_0 + - pyparsing=3.0.4=pyhd3eb1b0_0 + - pyserial=3.5=py38h06a4308_0 + - pysocks=1.7.1=py38h578d9bd_5 + - python=3.8.12=h12debd9_0 + - python-dateutil=2.8.2=pyhd3eb1b0_0 + - python-fastjsonschema=2.16.1=pyhd8ed1ab_0 + - python_abi=3.8=2_cp38 + - pytorch=1.8.2=py3.8_cuda11.1_cudnn8.0.5_0 + - pytz=2021.3=pyhd3eb1b0_0 + - pywavelets=1.3.0=py38h7f8727e_0 + - pyyaml=6.0=py38h7f8727e_1 + - readline=8.1.2=h7f8727e_1 + - requests=2.27.1=pyhd8ed1ab_0 + - scikit-image=0.19.2=py38h51133e4_0 + - scipy=1.7.3=py38hc147768_0 + - send2trash=1.8.0=pyhd3eb1b0_1 + - sentry-sdk=1.5.12=pyhd8ed1ab_0 + - setproctitle=1.2.2=py38h497a2fe_0 + - shortuuid=1.0.8=py38h578d9bd_0 + - six=1.16.0=pyhd3eb1b0_1 + - smmap=3.0.5=pyh44b312d_0 + - soupsieve=2.3.2.post1=pyhd8ed1ab_0 + - sqlite=3.37.2=hc218d9a_0 + - stack_data=0.2.0=pyhd3eb1b0_0 + - terminado=0.13.1=py38h06a4308_0 + - testpath=0.5.0=pyhd3eb1b0_0 + - tifffile=2020.10.1=py38hdd07704_2 + - tinycss2=1.1.1=pyhd8ed1ab_0 + - tk=8.6.11=h1ccaba5_0 + - toolz=0.11.2=pyhd3eb1b0_0 + - torchaudio=0.8.2=py38 + - torchvision=0.9.2=py38_cu111 + - tornado=6.1=py38h27cfd23_0 + - urllib3=1.26.9=pyhd8ed1ab_0 + - wandb=0.12.15=pyhd8ed1ab_0 + - wcwidth=0.2.5=pyhd3eb1b0_0 + - webencodings=0.5.1=py_1 + - wheel=0.37.1=pyhd3eb1b0_0 + - x264=1!157.20191217=h7b6447c_0 + - xz=5.2.5=h7b6447c_0 + - yaml=0.2.5=h7b6447c_0 + - zeromq=4.3.4=h2531618_0 + - zipp=3.8.0=py38h06a4308_0 + - zlib=1.2.11=h7f8727e_4 + - zstd=1.4.9=haebb681_0 + - pip: + - absl-py==1.1.0 + - addict==2.4.0 + - aiohttp==3.8.4 + - aiosignal==1.3.1 + - alabaster==0.7.12 + - albumentations==1.2.1 + - antlr4-python3-runtime==4.8 + - app==0.0.1 + - appdirs==1.4.4 + - async-timeout==4.0.2 + - babel==2.10.1 + - black==19.3b0 + - bleach==5.0.0 + - bs4==0.0.1 + - cachetools==5.2.0 + - cfgv==3.3.1 + - click==8.0.4 + - cnn-finetune==0.6.0 + - colorama==0.4.5 + - commonmark==0.9.1 + - conda==4.3.16 + - contextlib2==21.6.0 + - cycler==0.11.0 + - cython==0.29.22 + - datasets==2.12.0 + - deprecated==1.2.13 + - dill==0.3.6 + - distlib==0.3.4 + - docutils==0.17.1 + - efficientnet-pytorch==0.7.1 + - einops==0.4.1 + - fairscale==0.4.6 + - faiss-gpu==1.7.2 + - fastjsonschema==2.15.3 + - filelock==3.7.0 + - flake8==3.8.1 + - flake8-bugbear==22.4.25 + - flake8-comprehensions==3.10.0 + - focal-loss-torch==0.1.2 + - fonttools==4.33.3 + - frozenlist==1.3.3 + - fvcore==0.1.3.post20210317 + - google-auth==2.9.0 + - google-auth-oauthlib==0.4.6 + - grpcio==1.47.0 + - huggingface-hub==0.14.1 + - humanize==4.0.0 + - hydra-core==1.0.7 + - identify==2.5.1 + - imagesize==1.3.0 + - importlib-resources==5.7.1 + - iniconfig==1.1.1 + - iopath==0.1.9 + - ipdb==0.13.9 + - ipywidgets==7.7.1 + - isort==5.7.0 + - joblib==1.1.0 + - jsonschema==4.5.1 + - jupyter-client==7.3.1 + - jupyterlab-widgets==1.1.1 + - kaggle==1.5.13 + - kiwisolver==1.4.2 + - markdown==3.3.7 + - matplotlib==3.5.2 + - mccabe==0.6.1 + - medpy==0.4.0 + - ml-collections==0.1.1 + - mmcv-full==1.6.2 + - mmdet==2.25.2 + - model-index==0.1.11 + - multidict==6.0.4 + - multiprocess==0.70.14 + - munch==2.5.0 + - natsort==8.2.0 + - nbclient==0.6.3 + - nibabel==3.2.2 + - nodeenv==1.6.0 + - numpy==1.21.5 + - oauthlib==3.2.0 + - omegaconf==2.0.6 + - opencv-python==4.5.5.64 + - openmim==0.3.2 + - ordered-set==4.1.0 + - parameterized==0.7.4 + - platformdirs==2.5.2 + - pluggy==1.0.0 + - portalocker==2.4.0 + - pre-commit==2.19.0 + - pretrainedmodels==0.7.4 + - py==1.11.0 + - pyarrow==12.0.0 + - pyasn1==0.4.8 + - pyasn1-modules==0.2.8 + - pycocotools==2.0.4 + - pycodestyle==2.6.0 + - pycosat==0.6.3 + - pyflakes==2.2.0 + - pyrsistent==0.18.1 + - pytest==7.1.2 + - python-slugify==8.0.1 + - pyzmq==23.0.0 + - qudida==0.0.4 + - regex==2023.5.5 + - requests-oauthlib==1.3.1 + - responses==0.18.0 + - rich==12.6.0 + - rsa==4.8 + - ruamel-yaml==0.17.21 + - ruamel-yaml-clib==0.2.7 + - safetensors==0.3.1 + - scikit-learn==0.24.1 + - self-attention-cv==1.2.3 + - setuptools==59.5.0 + - simpleitk==2.1.1 + - sklearn==0.0.post4 + - snowballstemmer==2.2.0 + - sphinx==4.5.0 + - sphinxcontrib-applehelp==1.0.2 + - sphinxcontrib-devhelp==1.0.2 + - sphinxcontrib-htmlhelp==2.0.0 + - sphinxcontrib-jsmath==1.0.1 + - sphinxcontrib-qthelp==1.0.3 + - sphinxcontrib-serializinghtml==1.1.5 + - submitit==1.3.3 + - tabulate==0.8.9 + - tensorboard==2.9.1 + - tensorboard-data-server==0.6.1 + - tensorboard-plugin-wit==1.8.1 + - tensorboardx==2.6 + - termcolor==1.1.0 + - terminaltables==3.1.10 + - text-unidecode==1.3 + - threadpoolctl==3.1.0 + - tim==0.5 + - timm==0.6.5 + - tokenizers==0.13.3 + - toml==0.10.2 + - tomli==2.0.1 + - torchio==0.18.73 + - torchmetrics==0.10.0 + - tqdm==4.62.3 + - traitlets==5.2.1.post0 + - transformers==4.28.1 + - typing-extensions==4.4.0 + - unzip==1.0.0 + - virtualenv==20.14.1 + - werkzeug==2.1.2 + - widgetsnbextension==3.6.1 + - wrapt==1.13.3 + - xxhash==3.2.0 + - yacs==0.1.8 + - yapf==0.32.0 + - yarl==1.9.2 + - monai==1.1.0 + - ensemble-boxes==1.0.8 + \ No newline at end of file diff --git a/lvm_med_weights/README.md b/lvm_med_weights/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a4faed6b82eae9fe05e249970ba5861d50664ef7 --- /dev/null +++ b/lvm_med_weights/README.md @@ -0,0 +1 @@ +## Please place pre-trained models in this folder diff --git a/lvm_med_weights/__init__.py b/lvm_med_weights/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/medsam.py b/medsam.py new file mode 100644 index 0000000000000000000000000000000000000000..f5c961a17df6d6549d3ec1ed340650033915418e --- /dev/null +++ b/medsam.py @@ -0,0 +1,27 @@ +from segmentation_2d.MedSAM_2d import medsam_2d +from segmentation_2d.LVMMed_SAM_2d import lvm_medsam_2d + +from segmentation_3d.MedSAM_3d import medsam_3d +from segmentation_3d.LVMMed_SAM_3d import lvm_medsam_3d + +from utils.func import ( + parse_config, + load_config +) + +if __name__=="__main__": + yml_args = parse_config() + cfg = load_config(yml_args.config) + + assert cfg.base.is_2D + cfg.base.is_3D == 1 + + if yml_args.lvm_med_encoder_path != '': + if cfg.base.is_2D: + lvm_medsam_2d(yml_args, cfg) + else: + lvm_medsam_3d(yml_args, cfg) + else: + if cfg.base.is_2D: + medsam_2d(yml_args, cfg) + if cfg.base.is_3D: + medsam_3d(yml_args, cfg) \ No newline at end of file diff --git a/model/__init__.py b/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/model/unet_vit.py b/model/unet_vit.py new file mode 100644 index 0000000000000000000000000000000000000000..ccd0ab450a62271e3ac50e974d2c2d0030059533 --- /dev/null +++ b/model/unet_vit.py @@ -0,0 +1,691 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + + +import torch +import torch.nn as nn +from einops import rearrange +from functools import partial +import torch.nn.functional as F +from typing import Optional, Tuple, Type +import timm + + +class vit_timm(nn.Module): + def __init__( + self, + patch_dim, + pretrained = False, + ): + super(vit_timm, self).__init__() + self.patch_dim = patch_dim + self.model = timm.create_model(f'vit_base_patch16_224', num_classes=0) + if pretrained : + # load pretrained imagenet weight + weight = torch.load('/home/caduser/KOTORI/hoang_graph_matching_v1/vit_base_imagenet.pth', map_location = 'cpu') + self.model.load_state_dict(weight, strict = False) + + def forward(self,x): +# x = self.model.patch_embed(x) # remove some layer at this step +# x = self.model.pos_drop(x) + if len(x.shape) == 4 : # 4, 14, 14, 768 + # orignal shape : bs, patch_dim , patch_dim , embedding_dim + x = rearrange(x, 'b p1 p2 d -> b (p1 p2) d') + x = self.model.blocks(x) + x = self.model.norm(x) + x = rearrange(x, 'b (p1 p2) c -> b c p1 p2', p1 = self.patch_dim, p2 = self.patch_dim) + return x + + +class vit_encoder_b(nn.Module): + def __init__(self): + super().__init__() + + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + encoder_embed_dim=768 + encoder_depth=12 + encoder_num_heads=12 + encoder_global_attn_indexes=[2, 5, 8, 11] + + self.model =ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + use_abs_pos = False, + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ) + def forward(self, x): + return self.model(x) + + +class EncoderBottleneck(nn.Module): + def __init__(self, in_channels, out_channels, stride=1, base_width=64): + super().__init__() + + self.downsample = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(out_channels) + ) + + width = int(out_channels * (base_width / 64)) + + self.conv1 = nn.Conv2d(in_channels, width, kernel_size=1, stride=1, bias=False) + self.norm1 = nn.BatchNorm2d(width) + + self.conv2 = nn.Conv2d(width, width, kernel_size=3, stride=2, groups=1, padding=1, dilation=1, bias=False) + self.norm2 = nn.BatchNorm2d(width) + + self.conv3 = nn.Conv2d(width, out_channels, kernel_size=1, stride=1, bias=False) + self.norm3 = nn.BatchNorm2d(out_channels) + + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x_down = self.downsample(x) + + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + + x = self.conv2(x) + x = self.norm2(x) + x = self.relu(x) + + x = self.conv3(x) + x = self.norm3(x) + x = x + x_down + x = self.relu(x) + + return x + + +class DecoderBottleneck(nn.Module): + def __init__(self, in_channels, out_channels, scale_factor=2): + super().__init__() + + self.upsample = nn.Upsample(scale_factor=scale_factor, mode='bilinear', align_corners=True) + self.layer = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x, x_concat=None): + x = self.upsample(x) + + if x_concat is not None: +# print(x.shape, x_concat.shape) + x = torch.cat([x_concat, x], dim=1) + + x = self.layer(x) + return x + + +def load_weight_for_vit_encoder(pretrained): + + weight = None + elif pretrained == 'lvm-med-vit': + path = './checkpoints/lvmmed_vit.torch' + print(f'Pretrained path of LVM-MED-VIT : {path}') + weight = torch.load(path, map_location ='cpu') + print(f'Number of params in original checkpoint : {len(weight)}') + for key in list(weight.keys()): + weight['model.' + key] = weight[key] + del weight[key] + + #print(f'Number of params in final checkpoint : {len(weight)}') + return weight + + + +class Encoder(nn.Module): + def __init__(self, in_channels, out_channels, pretrained, patch_dim): + super().__init__() + + self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=7, stride=2, padding=3, bias=False) + self.norm1 = nn.BatchNorm2d(out_channels) + self.relu = nn.ReLU(inplace=True) + + self.encoder1 = EncoderBottleneck(out_channels, out_channels * 2, stride=2) + self.encoder2 = EncoderBottleneck(out_channels * 2, out_channels * 4, stride=2) + self.encoder3 = EncoderBottleneck(out_channels * 4, out_channels * 8, stride=2) +# self.vit = vit_encoder_b() + + +# if pretrained == 'scratch' : +# print('LOAD VIT RANDOM SUCCESSFULLY') +# self.vit = vit_timm(pretrained = False) +# else : + if pretrained == 'flava': + from flava_model import Flava_encoder + print('LOAD FLAVA MODEL SUCCESSFULLY') + self.vit = Flava_encoder(patch_dim) + elif pretrained == 'default' : + print('LOAD VIT RANDOM SUCCESSFULLY') + self.vit = vit_timm(pretrained = False) + elif pretrained == 'clip' : + from clip_model import Clip_encoder + print('LOAD CLIP MODEL SUCCESSFULLY') + self.vit = Clip_encoder(patch_dim) + elif pretrained == 'imagenet': + print('LOAD VIT IMAGENET SUCCESSFULLY') + self.vit = vit_timm(pretrained = True, patch_dim = patch_dim) + elif pretrained in ['sam' , 'ssl' , 'ssl_large']: + self.vit = vit_encoder_b() + weight = load_weight_for_vit_encoder(pretrained) + self.vit.load_state_dict(weight,strict = False) + + + self.conv2 = nn.Conv2d(out_channels * 8, 384, kernel_size=3, stride=1, padding=1) + self.norm2 = nn.BatchNorm2d(384) + + def forward(self, x): + x = self.conv1(x) + x = self.norm1(x) + x1 = self.relu(x) # 4, 96, 112, 112 + + x2 = self.encoder1(x1) # 4, 192, 56, 56 + x3 = self.encoder2(x2) # 4, 384, 28, 28 + x = self.encoder3(x3) # 4, 768, 14, 14 +# x = self.conv1x1(x) + + x = x.permute(0, 2, 3, 1) # 4, 14, 14, 768 + + ''' + enter VIT at this step + VIT in TranUNET need input shape of (bs, patch_1, patch_2, dim) ie,: 4, 14, 14, 768 + output shape of (bs, dim, patch_1, patch_2): ie 4, 768, 14, 14 + ''' + x = self.vit(x) # 4, 768, 14, 14 + x = self.conv2(x) # 4, 512, 28, 28 + x = self.norm2(x) + x = self.relu(x) + return x, x1, x2, x3 + + +class Decoder(nn.Module): + def __init__(self, out_channels, class_num): + super().__init__() + + self.decoder1 = DecoderBottleneck(out_channels * 8, out_channels * 2) + self.decoder2 = DecoderBottleneck(out_channels * 4, out_channels) + self.decoder3 = DecoderBottleneck(out_channels * 2, int(out_channels * 1 / 2)) + self.decoder4 = DecoderBottleneck(int(out_channels * 1 / 2), int(out_channels * 1 / 8)) + self.conv1 = nn.Conv2d(int(out_channels * 1 / 8), class_num, kernel_size=1) + + def forward(self, x, x1, x2, x3): + x = self.decoder1(x, x3) + x = self.decoder2(x, x2) + x = self.decoder3(x, x1) + x = self.decoder4(x) + x = self.conv1(x) + + return x + + +class TransUNet(nn.Module): + def __init__(self, in_channels, out_channels, class_num, pretrained, patch_dim): + super().__init__() + + self.encoder = Encoder(in_channels, out_channels, pretrained, patch_dim) + self.decoder = Decoder(out_channels, class_num) + + def forward(self, x): + x, x1, x2, x3 = self.encoder(x) + x = self.decoder(x, x1, x2, x3) + return x + + + +class MLPBlock(nn.Module): + def __init__( + self, + embedding_dim: int, + mlp_dim: int, + act: Type[nn.Module] = nn.GELU, + ) -> None: + super().__init__() + self.lin1 = nn.Linear(embedding_dim, mlp_dim) + self.lin2 = nn.Linear(mlp_dim, embedding_dim) + self.act = act() + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.lin2(self.act(self.lin1(x))) + +class LayerNorm2d(nn.Module): + def __init__(self, num_channels: int, eps: float = 1e-6) -> None: + super().__init__() + self.weight = nn.Parameter(torch.ones(num_channels)) + self.bias = nn.Parameter(torch.zeros(num_channels)) + self.eps = eps + + def forward(self, x: torch.Tensor) -> torch.Tensor: + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x + + + + + +# This class and its supporting functions below lightly adapted from the ViTDet backbone available at: https://github.com/facebookresearch/detectron2/blob/main/detectron2/modeling/backbone/vit.py # noqa +class ImageEncoderViT(nn.Module): + def __init__( + self, + img_size: int = 1024, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 768, + depth: int = 12, + num_heads: int = 12, + mlp_ratio: float = 4.0, + out_chans: int = 256, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + include_neck: bool = False, + use_abs_pos: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + global_attn_indexes: Tuple[int, ...] = (), + ) -> None: + """ + Args: + img_size (int): Input image size. + patch_size (int): Patch size. + in_chans (int): Number of input image channels. + embed_dim (int): Patch embedding dimension. + depth (int): Depth of ViT. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_abs_pos (bool): If True, use absolute positional embeddings. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. + global_attn_indexes (list): Indexes for blocks using global attention. + """ + super().__init__() + self.img_size = img_size + + self.patch_embed = PatchEmbed( + kernel_size=(patch_size, patch_size), + stride=(patch_size, patch_size), + in_chans=in_chans, + embed_dim=embed_dim, + ) + + self.pos_embed: Optional[nn.Parameter] = None + if use_abs_pos: + # Initialize absolute positional embedding with pretrain image size. + self.pos_embed = nn.Parameter( + torch.zeros(1, img_size // patch_size, img_size // patch_size, embed_dim) + ) + + self.blocks = nn.ModuleList() + for i in range(depth): + block = Block( + dim=embed_dim, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + norm_layer=norm_layer, + act_layer=act_layer, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + window_size=window_size if i not in global_attn_indexes else 0, + input_size=(img_size // patch_size, img_size // patch_size), + ) + self.blocks.append(block) + +# if self.include_neck : + self.neck = nn.Sequential( + nn.Conv2d( + embed_dim, + out_chans, + kernel_size=1, + bias=False, + ), + LayerNorm2d(out_chans), + nn.Conv2d( + out_chans, + out_chans, + kernel_size=3, + padding=1, + bias=False, + ), + LayerNorm2d(out_chans), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: +# x = self.patch_embed(x) +# if self.pos_embed is not None: +# x = x + self.pos_embed + + for blk in self.blocks: + x = blk(x) +# print(x.grad_fn) + +# if self.include_neck : +# x = self.neck(x.permute(0, 3, 1, 2)) +# else : +# x = x.permute(0, 3, 1, 2) + +# x = self.neck(x.permute(0, 3, 1, 2)) + x = x.permute(0, 3, 1, 2) + return x + + +class Block(nn.Module): + """Transformer blocks with support of window attention and residual propagation blocks""" + + def __init__( + self, + dim: int, + num_heads: int, + mlp_ratio: float = 4.0, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. If it equals 0, then + use global attention. + input_size (int or None): Input resolution for calculating the relative positional + parameter size. + """ + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + input_size=input_size if window_size == 0 else (window_size, window_size), + ) + + self.norm2 = norm_layer(dim) + self.mlp = MLPBlock(embedding_dim=dim, mlp_dim=int(dim * mlp_ratio), act=act_layer) + + self.window_size = window_size + + def forward(self, x: torch.Tensor) -> torch.Tensor: + shortcut = x + x = self.norm1(x) + # Window partition + if self.window_size > 0: + H, W = x.shape[1], x.shape[2] + x, pad_hw = window_partition(x, self.window_size) + + x = self.attn(x) + # Reverse window partition + if self.window_size > 0: + x = window_unpartition(x, self.window_size, pad_hw, (H, W)) + + x = shortcut + x + x = x + self.mlp(self.norm2(x)) + + return x + + +class Attention(nn.Module): + """Multi-head Attention block with relative position embeddings.""" + + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool: If True, add a learnable bias to query, key, value. + rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + input_size (int or None): Input resolution for calculating the relative positional + parameter size. + """ + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + + self.use_rel_pos = use_rel_pos + if self.use_rel_pos: + assert ( + input_size is not None + ), "Input size must be provided if using relative positional encoding." + # initialize relative positional embeddings + self.rel_pos_h = nn.Parameter(torch.zeros(2 * input_size[0] - 1, head_dim)) + self.rel_pos_w = nn.Parameter(torch.zeros(2 * input_size[1] - 1, head_dim)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + B, H, W, _ = x.shape + # qkv with shape (3, B, nHead, H * W, C) + qkv = self.qkv(x).reshape(B, H * W, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + # q, k, v with shape (B * nHead, H * W, C) + q, k, v = qkv.reshape(3, B * self.num_heads, H * W, -1).unbind(0) + + attn = (q * self.scale) @ k.transpose(-2, -1) + + if self.use_rel_pos: + attn = add_decomposed_rel_pos(attn, q, self.rel_pos_h, self.rel_pos_w, (H, W), (H, W)) + + attn = attn.softmax(dim=-1) + x = (attn @ v).view(B, self.num_heads, H, W, -1).permute(0, 2, 3, 1, 4).reshape(B, H, W, -1) + x = self.proj(x) + + return x + + +def window_partition(x: torch.Tensor, window_size: int) -> Tuple[torch.Tensor, Tuple[int, int]]: + """ + Partition into non-overlapping windows with padding if needed. + Args: + x (tensor): input tokens with [B, H, W, C]. + window_size (int): window size. + + Returns: + windows: windows after partition with [B * num_windows, window_size, window_size, C]. + (Hp, Wp): padded height and width before partition + """ + B, H, W, C = x.shape + + pad_h = (window_size - H % window_size) % window_size + pad_w = (window_size - W % window_size) % window_size + if pad_h > 0 or pad_w > 0: + x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h)) + Hp, Wp = H + pad_h, W + pad_w + + x = x.view(B, Hp // window_size, window_size, Wp // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows, (Hp, Wp) + + +def window_unpartition( + windows: torch.Tensor, window_size: int, pad_hw: Tuple[int, int], hw: Tuple[int, int] +) -> torch.Tensor: + """ + Window unpartition into original sequences and removing padding. + Args: + x (tensor): input tokens with [B * num_windows, window_size, window_size, C]. + window_size (int): window size. + pad_hw (Tuple): padded height and width (Hp, Wp). + hw (Tuple): original height and width (H, W) before padding. + + Returns: + x: unpartitioned sequences with [B, H, W, C]. + """ + Hp, Wp = pad_hw + H, W = hw + B = windows.shape[0] // (Hp * Wp // window_size // window_size) + x = windows.view(B, Hp // window_size, Wp // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, Hp, Wp, -1) + + if Hp > H or Wp > W: + x = x[:, :H, :W, :].contiguous() + return x + + +def get_rel_pos(q_size: int, k_size: int, rel_pos: torch.Tensor) -> torch.Tensor: +# """ +# Get relative positional embeddings according to the relative positions of +# query and key sizes. +# Args: +# q_size (int): size of query q. +# k_size (int): size of key k. +# rel_pos (Tensor): relative position embeddings (L, C). + +# Returns: +# Extracted positional embeddings according to relative positions. +# """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode="linear", + ) + rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + + +def add_decomposed_rel_pos( + attn: torch.Tensor, + q: torch.Tensor, + rel_pos_h: torch.Tensor, + rel_pos_w: torch.Tensor, + q_size: Tuple[int, int], + k_size: Tuple[int, int], +) -> torch.Tensor: + """ + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py # noqa B950 + Args: + attn (Tensor): attention map. + q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C). + rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis. + rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis. + q_size (Tuple): spatial sequence size of query q with (q_h, q_w). + k_size (Tuple): spatial sequence size of key k with (k_h, k_w). + + Returns: + attn (Tensor): attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum("bhwc,hkc->bhwk", r_q, Rh) + rel_w = torch.einsum("bhwc,wkc->bhwk", r_q, Rw) + + attn = ( + attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :] + ).view(B, q_h * q_w, k_h * k_w) + + return attn + + +class PatchEmbed(nn.Module): + """ + Image to Patch Embedding. + """ + + def __init__( + self, + kernel_size: Tuple[int, int] = (16, 16), + stride: Tuple[int, int] = (16, 16), + padding: Tuple[int, int] = (0, 0), + in_chans: int = 3, + embed_dim: int = 768, + ) -> None: + """ + Args: + kernel_size (Tuple): kernel size of the projection layer. + stride (Tuple): stride of the projection layer. + padding (Tuple): padding size of the projection layer. + in_chans (int): Number of input image channels. + embed_dim (int): embed_dim (int): Patch embedding dimension. + """ + super().__init__() + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=kernel_size, stride=stride, padding=padding + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.proj(x) + # B C H W -> B H W C + x = x.permute(0, 2, 3, 1) + return x + + + +if __name__ == '__main__': + import torch + transunet = TransUNet( + in_channels=1, + out_channels=96, + pretrained ='ssl', + patch_dim = 16, + class_num=8) + + print(sum(p.numel() for p in transunet.parameters())) + print(transunet(torch.randn(4, 1, 224, 224)).shape) diff --git a/model/unet_vit_classification.py b/model/unet_vit_classification.py new file mode 100644 index 0000000000000000000000000000000000000000..d195d2a9189f4de7724d15898dd8b2a91fc89351 --- /dev/null +++ b/model/unet_vit_classification.py @@ -0,0 +1,501 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + + +import torch +import torch.nn as nn +from einops import rearrange +from functools import partial +import torch.nn.functional as F + +from typing import Optional, Tuple, Type + +class vit_encoder_b(nn.Module): + def __init__(self, num_classes = 4): + super().__init__() + + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + encoder_embed_dim=768 + encoder_depth=12 + encoder_num_heads=12 + encoder_global_attn_indexes=[2, 5, 8, 11] + + self.model =ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + use_abs_pos = False, + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ) + + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(768,1000) + def forward(self, x): + x = self.model(x) + x = self.avgpool(x) + x = torch.squeeze(x) + x = self.fc(x) + return x + +def load_weight_for_vit_encoder(pretrained, settings): + weight = None + if pretrained == 'lvm-med-vit': + path = settings['vit'][pretrained] + print(f'Pretrained path : {path}') + weight = torch.load(path, map_location = 'cpu') + print(f'Number of params in original checkpoint : {len(weight)}') + for key in list(weight.keys()): + weight['model.' + key] = weight[key] + del weight[key] + print(f'Number of params in final checkpoint : {len(weight)}') + return weight + + + +class MLPBlock(nn.Module): + def __init__( + self, + embedding_dim: int, + mlp_dim: int, + act: Type[nn.Module] = nn.GELU, + ) -> None: + super().__init__() + self.lin1 = nn.Linear(embedding_dim, mlp_dim) + self.lin2 = nn.Linear(mlp_dim, embedding_dim) + self.act = act() + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.lin2(self.act(self.lin1(x))) + +class LayerNorm2d(nn.Module): + def __init__(self, num_channels: int, eps: float = 1e-6) -> None: + super().__init__() + self.weight = nn.Parameter(torch.ones(num_channels)) + self.bias = nn.Parameter(torch.zeros(num_channels)) + self.eps = eps + + def forward(self, x: torch.Tensor) -> torch.Tensor: + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x + + + + + +# This class and its supporting functions below lightly adapted from the ViTDet backbone available at: https://github.com/facebookresearch/detectron2/blob/main/detectron2/modeling/backbone/vit.py # noqa +class ImageEncoderViT(nn.Module): + def __init__( + self, + img_size: int = 1024, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 768, + depth: int = 12, + num_heads: int = 12, + mlp_ratio: float = 4.0, + out_chans: int = 256, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + include_neck: bool = False, + use_abs_pos: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + global_attn_indexes: Tuple[int, ...] = (), + ) -> None: + """ + Args: + img_size (int): Input image size. + patch_size (int): Patch size. + in_chans (int): Number of input image channels. + embed_dim (int): Patch embedding dimension. + depth (int): Depth of ViT. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_abs_pos (bool): If True, use absolute positional embeddings. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. + global_attn_indexes (list): Indexes for blocks using global attention. + """ + super().__init__() + self.img_size = img_size + + self.patch_embed = PatchEmbed( + kernel_size=(patch_size, patch_size), + stride=(patch_size, patch_size), + in_chans=in_chans, + embed_dim=embed_dim, + ) + + self.pos_embed: Optional[nn.Parameter] = None + if use_abs_pos: + # Initialize absolute positional embedding with pretrain image size. + self.pos_embed = nn.Parameter( + torch.zeros(1, img_size // patch_size, img_size // patch_size, embed_dim) + ) + + self.blocks = nn.ModuleList() + for i in range(depth): + block = Block( + dim=embed_dim, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + norm_layer=norm_layer, + act_layer=act_layer, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + window_size=window_size if i not in global_attn_indexes else 0, + input_size=(img_size // patch_size, img_size // patch_size), + ) + self.blocks.append(block) + +# if self.include_neck : + self.neck = nn.Sequential( + nn.Conv2d( + embed_dim, + out_chans, + kernel_size=1, + bias=False, + ), + LayerNorm2d(out_chans), + nn.Conv2d( + out_chans, + out_chans, + kernel_size=3, + padding=1, + bias=False, + ), + LayerNorm2d(out_chans), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.patch_embed(x) + if self.pos_embed is not None: + x = x + self.pos_embed + + for blk in self.blocks: + x = blk(x) +# print(x.grad_fn) + +# if self.include_neck : +# x = self.neck(x.permute(0, 3, 1, 2)) +# else : +# x = x.permute(0, 3, 1, 2) + +# x = self.neck(x.permute(0, 3, 1, 2)) + x = x.permute(0, 3, 1, 2) + return x + + +class Block(nn.Module): + """Transformer blocks with support of window attention and residual propagation blocks""" + + def __init__( + self, + dim: int, + num_heads: int, + mlp_ratio: float = 4.0, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. If it equals 0, then + use global attention. + input_size (int or None): Input resolution for calculating the relative positional + parameter size. + """ + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + input_size=input_size if window_size == 0 else (window_size, window_size), + ) + + self.norm2 = norm_layer(dim) + self.mlp = MLPBlock(embedding_dim=dim, mlp_dim=int(dim * mlp_ratio), act=act_layer) + + self.window_size = window_size + + def forward(self, x: torch.Tensor) -> torch.Tensor: + shortcut = x + x = self.norm1(x) + # Window partition + if self.window_size > 0: + H, W = x.shape[1], x.shape[2] + x, pad_hw = window_partition(x, self.window_size) + + x = self.attn(x) + # Reverse window partition + if self.window_size > 0: + x = window_unpartition(x, self.window_size, pad_hw, (H, W)) + + x = shortcut + x + x = x + self.mlp(self.norm2(x)) + + return x + + +class Attention(nn.Module): + """Multi-head Attention block with relative position embeddings.""" + + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool: If True, add a learnable bias to query, key, value. + rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + input_size (int or None): Input resolution for calculating the relative positional + parameter size. + """ + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + + self.use_rel_pos = use_rel_pos + if self.use_rel_pos: + assert ( + input_size is not None + ), "Input size must be provided if using relative positional encoding." + # initialize relative positional embeddings + self.rel_pos_h = nn.Parameter(torch.zeros(2 * input_size[0] - 1, head_dim)) + self.rel_pos_w = nn.Parameter(torch.zeros(2 * input_size[1] - 1, head_dim)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + B, H, W, _ = x.shape + # qkv with shape (3, B, nHead, H * W, C) + qkv = self.qkv(x).reshape(B, H * W, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + # q, k, v with shape (B * nHead, H * W, C) + q, k, v = qkv.reshape(3, B * self.num_heads, H * W, -1).unbind(0) + + attn = (q * self.scale) @ k.transpose(-2, -1) + + if self.use_rel_pos: + attn = add_decomposed_rel_pos(attn, q, self.rel_pos_h, self.rel_pos_w, (H, W), (H, W)) + + attn = attn.softmax(dim=-1) + x = (attn @ v).view(B, self.num_heads, H, W, -1).permute(0, 2, 3, 1, 4).reshape(B, H, W, -1) + x = self.proj(x) + + return x + + +def window_partition(x: torch.Tensor, window_size: int) -> Tuple[torch.Tensor, Tuple[int, int]]: + """ + Partition into non-overlapping windows with padding if needed. + Args: + x (tensor): input tokens with [B, H, W, C]. + window_size (int): window size. + + Returns: + windows: windows after partition with [B * num_windows, window_size, window_size, C]. + (Hp, Wp): padded height and width before partition + """ + B, H, W, C = x.shape + + pad_h = (window_size - H % window_size) % window_size + pad_w = (window_size - W % window_size) % window_size + if pad_h > 0 or pad_w > 0: + x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h)) + Hp, Wp = H + pad_h, W + pad_w + + x = x.view(B, Hp // window_size, window_size, Wp // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows, (Hp, Wp) + + +def window_unpartition( + windows: torch.Tensor, window_size: int, pad_hw: Tuple[int, int], hw: Tuple[int, int] +) -> torch.Tensor: + """ + Window unpartition into original sequences and removing padding. + Args: + x (tensor): input tokens with [B * num_windows, window_size, window_size, C]. + window_size (int): window size. + pad_hw (Tuple): padded height and width (Hp, Wp). + hw (Tuple): original height and width (H, W) before padding. + + Returns: + x: unpartitioned sequences with [B, H, W, C]. + """ + Hp, Wp = pad_hw + H, W = hw + B = windows.shape[0] // (Hp * Wp // window_size // window_size) + x = windows.view(B, Hp // window_size, Wp // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, Hp, Wp, -1) + + if Hp > H or Wp > W: + x = x[:, :H, :W, :].contiguous() + return x + + +def get_rel_pos(q_size: int, k_size: int, rel_pos: torch.Tensor) -> torch.Tensor: +# """ +# Get relative positional embeddings according to the relative positions of +# query and key sizes. +# Args: +# q_size (int): size of query q. +# k_size (int): size of key k. +# rel_pos (Tensor): relative position embeddings (L, C). + +# Returns: +# Extracted positional embeddings according to relative positions. +# """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode="linear", + ) + rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + + +def add_decomposed_rel_pos( + attn: torch.Tensor, + q: torch.Tensor, + rel_pos_h: torch.Tensor, + rel_pos_w: torch.Tensor, + q_size: Tuple[int, int], + k_size: Tuple[int, int], +) -> torch.Tensor: + """ + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py # noqa B950 + Args: + attn (Tensor): attention map. + q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C). + rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis. + rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis. + q_size (Tuple): spatial sequence size of query q with (q_h, q_w). + k_size (Tuple): spatial sequence size of key k with (k_h, k_w). + + Returns: + attn (Tensor): attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum("bhwc,hkc->bhwk", r_q, Rh) + rel_w = torch.einsum("bhwc,wkc->bhwk", r_q, Rw) + + attn = ( + attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :] + ).view(B, q_h * q_w, k_h * k_w) + + return attn + + +class PatchEmbed(nn.Module): + """ + Image to Patch Embedding. + """ + + def __init__( + self, + kernel_size: Tuple[int, int] = (16, 16), + stride: Tuple[int, int] = (16, 16), + padding: Tuple[int, int] = (0, 0), + in_chans: int = 3, + embed_dim: int = 768, + ) -> None: + """ + Args: + kernel_size (Tuple): kernel size of the projection layer. + stride (Tuple): stride of the projection layer. + padding (Tuple): padding size of the projection layer. + in_chans (int): Number of input image channels. + embed_dim (int): embed_dim (int): Patch embedding dimension. + """ + super().__init__() + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=kernel_size, stride=stride, padding=padding + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.proj(x) + # B C H W -> B H W C + x = x.permute(0, 2, 3, 1) + return x + + + +if __name__ == '__main__': + import torch + transunet = TransUNet( + in_channels=1, + out_channels=96, + class_num=8) + + print(sum(p.numel() for p in transunet.parameters())) + print(transunet(torch.randn(4, 1, 224, 224)).shape) diff --git a/notebook/Prompt_Demo.ipynb b/notebook/Prompt_Demo.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e1d3deabf713cb9718d34dce3aa832bd741f2e74 --- /dev/null +++ b/notebook/Prompt_Demo.ipynb @@ -0,0 +1,454 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 8, + "id": "18b63e44", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/nghiemtd\n", + "/home/nghiemtd/LVM-Med\n" + ] + } + ], + "source": [ + "## Notebook demo bouding box-based prompt segmentation\n", + "# import library\n", + "%cd ..\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import os \n", + "import json\n", + "import gc\n", + "from tqdm import tqdm\n", + "from torch.utils.data.dataloader import default_collate\n", + "import torch\n", + "from torch.nn import functional as F\n", + "from torch.utils.data import Dataset, DataLoader\n", + "from dataloader import ResizeLongestSide\n", + "import matplotlib.pyplot as plt\n", + "from segment_anything import sam_model_registry, SamPredictor\n", + "import shutil\n", + "import cv2\n", + "from utils import compute_dice_coefficient\n", + "from segment_anything import our_vit\n", + "from skimage.transform import resize\n", + "import random\n", + "%cd LVM-Med\n", + "seed = 2023\n", + "torch.manual_seed(seed)\n", + "np.random.seed(seed)\n", + "random.seed(seed)\n", + "cuda_string = 'cuda:' + \"2\"\n", + "device = torch.device(cuda_string if torch.cuda.is_available() else 'cpu')" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "8fdfbee1", + "metadata": {}, + "outputs": [], + "source": [ + "## Define functions for loading and preprocessing images\n", + "def preprocess(name_dataset, img, img_size, is_mask):\n", + " # preprocess image and mask\n", + " original_shape = img.shape\n", + " img = np.asarray(img)\n", + " if not is_mask:\n", + " img = resize(img, (img_size, img_size), order=0, preserve_range=True, anti_aliasing=False).astype('uint8')\n", + " img = ((img - img.min()) * (1/(img.max() - img.min()) * 255)).astype('uint8')\n", + " if img.shape[-1]>3 and len(img.shape)==3:\n", + " img = img[:,:,:3]\n", + " if len(img.shape)==2:\n", + " img = np.repeat(img[:,:,None], 3, axis=-1)\n", + " else:\n", + " img = resize(img, (256, 256), order=0, preserve_range=True, anti_aliasing=False).astype('uint8')\n", + " if len(img.shape) == 3:\n", + " img = img[:, :, 0]\n", + " return img, original_shape\n", + "\n", + "def load(filename, is_mask=False):\n", + " # load image and mask\n", + " if is_mask:\n", + " mask = cv2.imread(filename, 0)\n", + " return mask\n", + " else:\n", + " image = cv2.imread(filename)\n", + " return image" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "820b2190", + "metadata": {}, + "outputs": [], + "source": [ + "## Define functions for drawing\n", + "def draw_bbox(image, box, label, color, thickness=3):\n", + " # draw box for prompt\n", + " alpha = 0.1\n", + " alpha_box = 0.4\n", + " overlay_bbox = image.copy()\n", + " overlay_text = image.copy()\n", + " output = image.copy()\n", + "\n", + " text_width, text_height = cv2.getTextSize(label.upper(), cv2.FONT_HERSHEY_SIMPLEX, 0.6, 1)[0]\n", + " cv2.rectangle(overlay_bbox, (int(box[0]), int(box[1])), (int(box[2]), int(box[3])),\n", + " color, thickness)\n", + " return overlay_bbox\n", + "\n", + "def plot_img(img, size=(10, 10), is_rgb=True):\n", + " plt.figure(figsize=size)\n", + " plt.imshow(img)\n", + " plt.show()\n", + "\n", + "def mask_color_img(img, mask, alpha, color):\n", + " # using overlay_image function below to draw masks into images.\n", + " mask = np.where(mask>0)\n", + " out = img.copy()\n", + " img_layer = img.copy()\n", + " img_layer[mask] = color\n", + " out = cv2.addWeighted(img_layer, alpha, out, 1 - alpha, 0)\n", + " return(out)\n", + "\n", + "def overlay_image(img_slice, mask_pred, color):\n", + " # This function is used to draw contours and masks into images. It returns images which have masks and contours on them\n", + " np_mask = mask_pred.copy()\n", + " np_img = img_slice.copy()\n", + " np_mask[np_mask > 0] = 1\n", + " contours, hierachy = cv2.findContours(np_mask, cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)\n", + " contour = [] \n", + " if len(contours) != 0:\n", + " for i in range(0, len(contours)):\n", + " if len(contours[i]) > len(contour):\n", + " contour = contours[i]\n", + " else:\n", + " contour = contours\n", + " color_tuple = tuple([int(x) for x in color])\n", + " cv2.drawContours(np_img, contour, -1, color_tuple, 2);\n", + " r = mask_color_img(np_img, np_mask, 0.25, color)\n", + " return r\n", + "\n", + "def draw_demo(out, image_demo, boxes):\n", + " ''' mask is asigned to out variable. Through out, boxes, and image_demo, \n", + " this function return output which masks, contours, and boxxes are drawn on images'''\n", + " out = resize(out, (256, 256), order=0, preserve_range=True, anti_aliasing=False).astype('uint8')\n", + " image_demo = resize(image_demo, (256, 256), order=0, preserve_range=True, anti_aliasing=False).astype('uint8')\n", + " image_with_masks = overlay_image(image_demo, out, color=(255,0,0))\n", + " box = boxes.cpu().numpy()\n", + " box[0] = int(box[0])\n", + " box[1] = int(box[1])\n", + " box[2] = int(box[2])\n", + " box[3] = int(box[3])\n", + "\n", + " output = draw_bbox(image_with_masks, box, \"test\", (255,0,0), thickness=1)\n", + " return output\n", + "\n", + "def grid_display(list_of_images, list_of_titles=[], no_of_columns=2, figsize=(10,10)):\n", + " # draw 4 images in a row\n", + " \n", + " fig = plt.figure(figsize=figsize)\n", + " column = 0\n", + " for i in range(len(list_of_images)):\n", + " column += 1\n", + " # check for end of column and create a new figure\n", + " if column == no_of_columns+1:\n", + " fig = plt.figure(figsize=figsize)\n", + " column = 1\n", + " fig.add_subplot(1, no_of_columns, column)\n", + " plt.imshow(list_of_images[i])\n", + " plt.axis('off')\n", + " if len(list_of_titles) >= len(list_of_images):\n", + " plt.title(list_of_titles[i], fontdict={'fontsize': 20})\n", + " plt.show()\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "64fefd18", + "metadata": {}, + "outputs": [], + "source": [ + "## Define functions for extracting predicted masks\n", + "def get_bbox_from_mask(mask):\n", + " # Returns a bounding box from a mask\n", + " \n", + " y_indices, x_indices = np.where(mask > 0)\n", + " x_min, x_max = np.min(x_indices), np.max(x_indices)\n", + " y_min, y_max = np.min(y_indices), np.max(y_indices)\n", + " # add perturbation to bounding box coordinates\n", + " H, W = mask.shape\n", + " x_min = max(0, x_min - 9)\n", + " x_max = min(W, x_max + 5)\n", + " y_min = max(0, y_min - 9)\n", + " y_max = min(H, y_max + 5)\n", + "\n", + " return np.array([x_min, y_min, x_max, y_max])\n", + "\n", + "def process(back, sam_model, image, boxes):\n", + " # this function is used to get the mask output of each image after using prompt to predict mask.\n", + " with torch.no_grad():\n", + " sam_model = sam_model.to(device)\n", + " encoder = back.to(device)\n", + " encoder = encoder.to(\"cuda:2\")\n", + " image_embedding = sam_model.preprocess(image[:, :, :])\n", + " image_embedding = image_embedding.to(\"cuda:2\")\n", + "\n", + " image_embedding = encoder(image_embedding)\n", + " image_embedding = image_embedding.cpu().numpy()[0]\n", + " image_embedding = torch.tensor(image_embedding).float()\n", + "\n", + " box_np = boxes.numpy()\n", + " sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size)\n", + " box = sam_trans.apply_boxes(box_np, (image_embedding.shape[0], image_embedding.shape[1]))\n", + " box_torch = torch.as_tensor(box, dtype=torch.float32, device=device)\n", + " if len(box_torch.shape) == 2:\n", + " box_torch = box_torch[:, None, :] # (B, 1, 4)\n", + "\n", + " \"\"\"\n", + " Prompt encoder component\n", + " \"\"\"\n", + " prompt_encoder = sam_model.prompt_encoder.to(device)\n", + " prompt_encoder = prompt_encoder.to(device)\n", + " box_torch = box_torch.to(device)\n", + " sparse_embeddings, dense_embeddings = prompt_encoder(\n", + " points=None,\n", + " boxes=box_torch,\n", + " masks=None,\n", + " )\n", + "\n", + " \"\"\"\n", + " Mask decoder component\n", + " \"\"\"\n", + " sam_model = sam_model.to(device)\n", + " mask_segmentation, iou_predictions = sam_model.mask_decoder(\n", + " image_embeddings=image_embedding.to(device), # (B, 256, 64, 64)\n", + " image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64)\n", + " sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256)\n", + " dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64)\n", + " multimask_output=False,\n", + " )\n", + " \n", + " out = torch.sigmoid(mask_segmentation[0, 0, :, :]).cpu().numpy().squeeze()\n", + " out[out <= 0.5] = 0\n", + " out[out > 0.5] = 1\n", + " return out\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "7cac8512", + "metadata": {}, + "outputs": [], + "source": [ + "def demo(dataset_name):\n", + " # this function draw three outputs of three models and groundtruth mask\n", + " file = \"./notebook/image_demo\"\n", + " dir_image = os.path.join(os.path.join(file, dataset_name), \"test\")\n", + " dir_label = os.path.join(os.path.join(file, dataset_name), \"test_labels\")\n", + " gc.collect()\n", + " torch.cuda.empty_cache()\n", + " torch.cuda.reset_peak_memory_stats()\n", + "\n", + " arch = 'vit_encoder_b'\n", + " backbone = our_vit.__dict__[arch]()\n", + " weight = torch.load('./lvm_med_weights/lvmmed_vit.pth', map_location ='cpu')\n", + " backbone.load_state_dict(weight)\n", + " \n", + " sam_model_our = sam_model_registry['vit_b'](checkpoint=\"./working_dir/checkpoint/sam_model_best_large_ssl_\" + dataset_name + \".pth\")\n", + " sam_model_our.eval()\n", + " backbone.eval()\n", + "\n", + " sam_model_finetune = sam_model_registry['vit_b'](checkpoint=\"./working_dir/checkpoint/sam_model_best_original_\" + dataset_name + \".pth\")\n", + " sam_model_finetune.eval()\n", + "\n", + " sam_model_nofinetune = sam_model_registry['vit_b'](checkpoint=\"./working_dir/sam_vit_b_01ec64.pth\")\n", + " sam_model_nofinetune.eval()\n", + " for i in os.listdir(dir_image):\n", + " image_file = os.path.join(dir_image, i)\n", + " image = load(image_file, False)\n", + " image_demo = image.copy()\n", + " if dataset_name == \"isiconlytrain\": \n", + " a = i.replace(\".jpg\", \"_segmentation.png\")\n", + " else:\n", + " a = i\n", + " label_file = os.path.join(dir_label, a)\n", + " label = load(label_file, True)\n", + " label[label < 50] = 0\n", + " label[label > 200] = 1\n", + " label_demo = label.copy()\n", + " \n", + " image, original_image_shape = preprocess(dataset_name, image, 1024, is_mask=False) # (1, 224, 224)\n", + " label, original_mask_shape = preprocess(dataset_name, label, 1024, is_mask=True)\n", + "\n", + " boxes = get_bbox_from_mask(label)\n", + " boxes = torch.tensor(boxes).float()\n", + " image = torch.as_tensor(image.copy()).permute(2, 0, 1).float().contiguous()\n", + " image = image[None, :, :, :]\n", + " label = label[None, :, :]\n", + " label = torch.tensor(label[None, :,:]).long()\n", + " label = label.to(\"cuda:2\", dtype=torch.float32)\n", + " image = image.to(\"cuda:2\")\n", + "\n", + " out_our = process(backbone, sam_model_our, image, boxes)\n", + " out_finetune = process(sam_model_finetune.image_encoder, sam_model_finetune, image, boxes)\n", + " out_nofinetune = process(sam_model_nofinetune.image_encoder, sam_model_nofinetune, image, boxes)\n", + "\n", + " a_our = compute_dice_coefficient(label.cpu()>0, out_our>0).item()*100\n", + " a_finetune = compute_dice_coefficient(label.cpu()>0, out_finetune>0).item()*100\n", + " a_nofinetune = compute_dice_coefficient(label.cpu()>0, out_nofinetune>0).item()*100\n", + "\n", + " output_our = draw_demo(out_our, image_demo, boxes)\n", + " output_finetune = draw_demo(out_finetune, image_demo, boxes)\n", + " output_nofinetune = draw_demo(out_nofinetune, image_demo, boxes)\n", + "\n", + " image_demo = resize(image_demo, (256, 256), order=0, preserve_range=True, anti_aliasing=False).astype('uint8')\n", + " label_demo = resize(label_demo, (256, 256), order=0, preserve_range=True, anti_aliasing=False).astype('uint8')\n", + " groundtruth = overlay_image(image_demo, label_demo, color=(255,0,0))\n", + " \n", + " print(i)\n", + " titles = [\"groundtruth\", \"LVM-Med (SAM’s ViT): dice = \" + str(round(a_our, 2)), \n", + " \"SAM (fine-tuning): dice = \" + str(round(a_finetune, 2)), \"SAM (no fine-tuning): dice = \" + str(round(a_nofinetune, 2))] \n", + " images = [cv2.cvtColor(groundtruth, cv2.COLOR_BGR2RGB), cv2.cvtColor(output_our, cv2.COLOR_BGR2RGB),\n", + " cv2.cvtColor(output_finetune, cv2.COLOR_BGR2RGB), cv2.cvtColor(output_nofinetune, cv2.COLOR_BGR2RGB)] \n", + " grid_display(images, titles, 4, (30,30))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "b386fb28", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ISIC_0001191.jpg\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABpgAAAGPCAYAAABI09P1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOy9d7wsyVnf/X2qunvmnHPTJmm1q7BkESyEyCJIIL0gGzAYAwbZGGEkYzDJGJtshAEDRtiYYGwJw2JAgG1AMsYgQLBkIYQkMjIIlqy04YZzzsx0Vz3vH09Vd8+cmRPuvbv33t363c/cOdOxwlPPr7qfUKKqFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQcF+5aF6CgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKDgxkIxMBUUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBScCMXAVFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUHAiFANTQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBwYlQDEwFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFJ0IxMBUUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBScCMXAVFCQICIvFBEVkWde67IcBhG5K5Xz7mtdloKCgoKCgoKCgoIbASLyziKyEJF/vWbf+4jIz4jI29I8+/Vp+93p910Pd3mvd4jI81LbPO8a3PsLRaQVkSc/TPd7ZqrrC1e23yMi+nCUoaCg4JGFwzjpWkJEzojIt4rIvSLSJd331E16sMCQ2uaea3DfO0VkX0S+9mG8570icu/Ktms2Jyi4PlAMTAUFR+BaEMW1IqeCgoKCq4XL1WMi8r0i8hYR2XkIivWoxdXgFRF573Sd51+lYh3nngfKfaM4hBQU3OgQES8iLxCRXxCR+9ML/beIyG+LyHeJyN894vx/mMaqishHHHLcM0fH/amIyIbjTonIhdGxd52wSv8BuA/49pXrngF+Ang/4IeArwb+ywmv/bCiGDb4TuCtwIuudUEeLRCRiYj8cxF5dTLEXhKRP0gvop90zPN/N43dvzzBfUVEniMi3yYirxeRB0RkJiJvEJFvEZHHbjjvX4nI/00vQi8l3fE7IvIfROTxJ6l7wfWBRwsnXQf498DnAr8DfD3GiW+6piU6BI92w4aq/hU2Z/lCEXnCtS7PowUi8sEi8vLEMTMR+fPEOc/ZcLwe8nnVCe77wiOupSLyxiOuIWJOVfn46qT1X4diYCooKCgoKGAg/Q37fiXt/zvHuM4b0rHvlX6PJwH//ZDznjE67t4Tlv3e0bkffshx3zM67oUnucfDARF5X+BTgW9Q1d2VfVMR+SIR+XUROS/m8fc3IvKbIvLtIvKMI6795aO6v8shxz1vdNwvHnLcXSISN8nNqN9feGTFjwkR+YF0zc8+xrE/nY79e2v2jet4rA+Aqv4m8DLga0Tk1NWqV8FmpAeAFyS5vyQiuyLyGhH5ZyJyYB4vIufEXqr9gIj8vgyep8++jHuf6FqprCd+CVhwfUJEPPB/gBcDTwH+L/DNwPcDfwM8FzjK6/qfAjr6+yh0wF3A/7dh/ycDp9NxJ4KIPB34KODbVHVvZff7AY8BvlFVP0dVX6iq2cD0pcC7An910ns+CvBjWNv82MN9Y1XdB74F+KjUt9cK/xhrg0c00sunV2Ivwk8DP4i90HwL9iL6t0Tk3Y64zL8DjjRErcEE+ElMh7wV+G+YgXEGfH669zutOe8zgTuAXwD+czrvPuBfAL+X5+kFNwYeZZx0rfHRwP9T1Y9R1a9MnPgm4NWYvrveDGLXC94V44RrgW8CGuArr9H94RrOCR5uiMhnAb8EPCt9/0eMa54B/KSIfPmGU/8MM9iufr7rBLe/Z8M1vhp4bTrmJ4+4xucAH4bx6FXDVbFSFRQUFBQUPMLxEuDpwPOxB5q1SEaOdwZeo6qvW9ndAZ8gIp+nqg+uOf0F6Zgr4eYulfHn1pTtDPBJV+EeDyW+DriAvTjokYwZvwA8DfOg+5H0fQp4T+wh8Vw65gBERLB2UUCwtv6iI8rSAR8iIu+iqm9Ys//56Vqb2jNv2z/iPifBS7AH6OdjL0vWQsyD8tnYA/ePp83vCuQH2Ndjk9Ax7gI+DZv43n1IGb4e+HXg87CXRdcC345FGfz5Nbr/w4nvx/r8LdgLvT3sRcd3Yjpp9UH2LszzFOAvgbcBl2vYOem18kvABfCLwM8CHvhw7CXgJ4vIh6jqH11meQoeXnwK8Bzgt4BnqOr58U4R2Qbef9PJyZD/oZgc3AT8XRF5rKq++ZB7/iz2wPsC4KfX7H8Bptf+/LB7b8A/ByKwztHjjvT916s7VPVv0j0LVpBk4vyRBz50+H6Mkz4b+NVrUQBVfTTwEMDfAz4IMzJ9hKrGvENEvhr4N9i86p+sO1ks4vhfYH31neuOOQQB+ArgP6vqA6NrOmwu9JlYJMjHrJz3Hqp64OWZiLwAM1J8HXCk41jBdYNHEydda9yBzeOWkAxhf/jwF+fGgKpes7ZR1b8WkZ8Bnisi/2p1fDxMZbjWc4KHBSJSY3OPGfDe4/cEIvLvgNcBXy4iL1LV+crp96rqC6/k/qp6D2ZkWi2XBz4j/XzxIeV/F+AbsQjwT+byHD82Fq58ygdVBXtR9vnA72OD5a+wlzhngXuxwZCPfR72ou55GNHfgykTHR1zFht4b0jXewB4BfDsNffur7ehbArcs7LthWn7M4FPwDwq9oD7sRdPd2641nsDPwVcxF5k/izwgePrrZRp3eeF6Zi70u+7sZfKP4y9BIqj6yy13aY6XOY970p1fVtq49cAH32tZal8yudG/OSxtmHfFvAg9uL0sYdc4/vSdV4w2pbH+Y+l73++5rybMEPEj6Zj7j1h2e8d3WMG3LLmmM9Kx+R7vPBhaM97TnD8Oyfd+eI1+74iXe8VQLOh/Z5+yLU/Mp3/PdiD4FvXXScdm/Vw7q9vWnOMxzjy1diL9wNygxkiF8Djr3K7viGV62mHHPM16ZivO8F1n3ncPgP+ADNEuYdShi5Hjh5JH+yFngJ/Atw62t5ghkMFPn7lnJswb7qb0++703EH5l7HuP+JrgXUwJcDN61sd5inuwI/fq3btXyO3f//OfXZF1zm+S9K5z8X85RU4Is3HJv1z/cDL02687aVY56S9Rrwy+nvu45ZljPAHPille13sXne/bx0zN2r9+IK5uLYS9Kfx+YUs6RPvwKYHLMuh5X5ntFxG3Xn1aoTG57fSM8+wA7mVf3nqf3/GPhiQNZc69jPoSvn/Xw6/sxVkvvHYpEub8bmZa/HnC+yjL5w5fh72Dx3/AhMV78l1f8vgJez/ln4I7F5w9vSsW9MbXfuoRrjJ2yXL071/xdr9j2NQ/Q7Nv7uBX5mJJt/eZXKdUe63sUTnHM2nfNH17pdy+dEff2I56TR/nvS9Srgy4A/GumQb2TzM8yzsPdc96fj/x/wDcDZY5Yr33cttxylBy+jvE/GOOcvUhu/ObX3u5ygXzeVue8P1nDemr6+4jqN22q07YVc3jvL98WMmoe+s1w559PSvs+8SmNOsLHyexyDlznknS7weOBbUzvup/q/GvjKDcd+O/b8M8ciT/838L5Xo15XoV0em+r5Wxv2/3baf8vK9o3zsqtUro9J9/i1Q46pUrv/LuYYeG+W86tRhpIir2CM78BSDZzFLJ4/iE2MfwZ7abAOn4CFKl/EXh78MFhaFcyT7Esww9O3YB7nHwj8tIh85lUs92dj5H9vqsPvAv8A+FkRmYwPTKHIv4R5dv8kprgWGIGsep28nsHDezWU8Z6VY98B8+i+C/gBrP0uXEZdTnLPJ2HK4S7spfYPA+8BvFxEPuwy7l1QULABaqlYvh/Thc9bd0zSe58AXML05yp+CjNGrFu/5lOBKRahciV4CTZZ+NQ1+16ATYp/atPJIrItIl+a0lvtppRcvyYin7Lh+EZEvlJE3igic7E85V+7qnuPiX+CTWR/eM2+nP7mO1V1sbpTVR9Q1cO8l1+Qvl+C6ehbsZf3h+H3gF8DPi15Ko3xUdhLjbX9lbxqPxD4IVX9y5V9TxGRHxRLazgXkbeKyGtTCrFNXDtGvucL1u1M3kufjk0Wv2u0/Wqu7fdDwBPZnDLkRDipHMkhazCJyJNF5LtH7fsWEfmllMpg3bF3i8hfiKVcfLOIvFQOSaH4MCPL6Der6tvyxjQGcgqMzxmfkMbCK1X1/iu9+Umvpaqtqn6djjzM0/YI/Nv085lXWq6Chw33pe93PumJItJgLzrOY8b6/ILu+Smi9DC8BOPaT1vZ/gJMr/23k5YH81pvsJeAYzyIzbFfnn6/nGHe/fpjXPdEc3ER+W6sLd4Rey76Duwly9cAPyXHy4Gfy/xn6ff4WeHuY5x/FK7W80WNOYX8feyZ67swZ51vwKJdVnE5z6EAv4LNez50vFEsje2JUg6LyK3Y8+s/wV7MfgsmB/8Fi745NlJUzyswnfcKLJXXK7Fo4n+0cuxXYXOz98fWAvtWzBj3RcCvpAj0a43fS99/Ww6mZ/3o9P2zG879Vsxh4TM27L8StOn7JCnKcqTTb1/lshQ8tHg0cNIqXoqloPwlLPJvH0sD+F9XD0zv134GizR8GZay637MOPyr6Tn1KNzN+ndRdx/j3JOW9zlYOq9/CPwGpm9fCXw88GoRedox73k36zn8qzG+vFIcu05H4CTvLD803e/DMMeDb0/3/Xkspe8m/Er6PvB8JmndxnXPTofgW4Bvw/T3i7Hnv+dgur457kVE5H2wyMPPxSLFvxV7Hr+IGczGxz4N493Pxpwqvw1z1PhQ4JflGMsVPAx4C+as+s6r6VlF5J2BdwJer6r3rTn3nIj8ExH5MrE1DT/gKpYrp/3cGL2EOTS9F2YEXI2uunJca+tf+VwfH+BDMIJ8AyNPKUxx/CIrHvUM1ukIPGfN9f5r2v9fGXmpYYPtPGaJvmvN9Z63oXyHeQNcAP7Wyr6Xpn2fNNomWEivAh+7cvznM3g6PPOoe4/23TU6799tOOZejhnBdBn3/KqVfdlL//9ea5kqn/K50T55XB2y/z3TMf9vw/7sDffile15nD8fm+wq8D4rx/wO5qVTr+rbY5b93nReDfwp8Lsr+9877X8hQ6q4F64ccw6b6Cvwm9iE7juwlxwKfO3K8YI9wGg65pvTOX+NTfBP5KWDeUh3wM6afTky7F9fRr8+FnuIfEP6/R7pWq/ccPzzcn0ZDDWfsHLMj2OT4lOsiWBi8Gp8yprt+5jn2g9hUb7fgb18WgCnjlGf2zAOfRDYXrP/o9O9f3qNfG/sD04WwfTsdOyLDpHFu47ZPyeWIzZz50eltg3YS7qvxx4GfxX405Vjn5OObbGovn+PzR1m2DxlY4TYw/XBPBcV+Ntr9p1J+1o2eIam4+7mMiOYrua1ktwq8MC1btfyOXafvVfSSxHTwR8PPOmY535y6u//Otr2v9K2Z605Puuf70864Y+APxzt38IyIeQIiJN6i39DOv7vb9j/PDZ73d69ei8uYy4+usePAlsr+7JO+/wT9M89HD5nOexZ4mrX6Xkr2+/Nx4/riq1z9WD61KPtJ3oOXbnXx6b9/35l+12HnbfhWi9O5/zHle3vg+nadXOnA/2AGcYUm9cd8E5nFNmMvUBUjKfOrRz3vHXlOaT8dyVZOsnnuGNIMKOoYsam/4RFWP0cpie+lTUe0AyRuJ+xIptXK4IpR1b94CHHPD/V9UXYfCskGX2nq1GG8nl4Pjy6OOkehuexm0fbd7C5cgBuH21/EvZscAF48sq1cuTXgQwRh5RvLX9wdLTPcct7U2q/twHvtnKt98CcNV97gvI+j8PfJd69qX+uVp02tRsnf2fpkrwdmP8D/4wN7yxHxzwAvOUQmVp73prjn87wbDau/xRzwDzAr+v6AePxP03bn7vmPmM+rNL9ZlgazPFxd2ARVH/D8SO+v4CT8eHHnUDmPpFhzH0v9sz537H3A68B3nHDuFr3ef2qbJz0g0V9dWx4P5COeV9sLvM1o233pjJclQimK75A+TwyPphXmQL/eM2+D1pVICPl8WNrjm+A3TS4bl6zP6fu+Tdrrve8DeU7TFl/7Zrj82T9RWvq8QtrjvcML1GfedS9R/vuSvvftEnR8dAZmO4F/Jr9fwa87VrLVPmUz432ySR/xDG/vm7Mpn2vZ73xKI/z52MPAIHlB5wPSPu/HJtYHZiwHaPs/eSAIZ3cB472/5d03yey2cB0N2uMONhE8qewB7qnjrY/Nx3/a8B0tP1mLLXLRj22pvw72KTodzbsz0aTOfag9FHA44557S9J537paNtrUn3WTf6ex2Bg2sGMDa8Y7b8zlfUl6ffaFHkbyvLN6dofu2bfTRwz5RzmUb6WMxmMMqtGsUP7g5MZmM6mY199iCzedcy6nFiOWMOdWFTaeezlwzPW3Gf8AHO1H2xfeMLPAf2x4br5wfOz1+x7KsODyZMPucbdXB8GpiNfApbP9ffB1u37m5GsKeZF/mPAxxxy3is5yENZj//wmuOz/vn+FXn50PT7Uxm9hOHkL/PyWFqbSpXLNzDdyzHn4lhO/pY1Kc+w55C3sUanHlKne3hoDEwnqdPadmPggXUc+71p33uMtp3oOXRl//un/T+0sr3G0i+9wzHbs8aeXy+wJp3UqM1eeFQ/MKQw/XvHuO+PpWPffcP+17HmZeGGY/M4OsnnmSeQOcE4rFu5xs8CH7Dm+MdiXt6rhknlKhiYsJdle6nPNvYz8KqV8r56nWyWz/X/4dHDSfewYb7F4Kz40aNtX562HXB4xua8FzAHt+O+mF/LHxxtjDlueT8/bTuQNj7t/49p/7sds7zP4/B3iXdv6p+rVadN7cbJ31l+cNr2c2uOdwyp0p+5oa5/kPZPV7Y/EePEtcaHNdd5SbrOpx/SZvce1Q9YFLMCLz/GPT82HXsgPf2K3PydY9bhXk7Gh3cf57qj638QlgJ4fI03YeurHXimx94DPB17Zj2FOa/8z3TeW9mQLvGYZfmqdJ1v37B/Cwu2eD3LDj65ja6Kgel6XeS74OHHe6XvdWG6r2Jz2Pmr12x7F2Ab+BVdn1bl5xhC864GXrNm21+k75tG23KY7S+sHqyqQUR+GUt1dzn4LX0oQgwPx+tVNazZ/hdYaqaCgoKrjxdjoenPZ5S2UkTeF4twep2qrtNJAKjqn4nITwOfIiJfqKq7WIqFgK0PdDXwPdhk9gXAr4nIDvYS/xWq+ufrMkGIyC1YypbXqOq/XynzTES+GPNgfi5D2qBPT99fpqNFlFX1fhH5mhPW507sBdvahdRV9f+IyOdjabY+K30QkTdhnPJfVfXAYrQp7cXzObiI7t1YVNcLsIfGtVDVXRF5KfCZInKXqt6Lpc7xXFk6w/0193rgBOe/GHvIfj6jtBUi8jhsweq3MKSLuOpQ1fMiMsMeVlbxLOxF3V8d83JXS44+DYvq+VZVXcfz41SF/xiL2PscVf39leN+V0ReAnyBiLzb6v4N+KpjlnGMe45xzE9g67V8oYj8UJ5TpVSKXz067qZ1J18vSPrxqzDHo6+4xsUpOAFU9X+IyI9hL0E+GJu7fzDwccDHich/x14kaD5HRN4xHf8GVf210eV+Cnvw/jgRuVVHaR/X4G7MIe0FWATLP8UMMC+7zKrckr5PomePg2PNxcUWn39PrA5fsCEj0xxLn5bP+QJMT43xMlV9/RWV+GhcreeL86r6xxuuA8t663KfQ8FSQIG9sOmhqi0nW4z+ydjz6y/p+sXR7+FgiqxNyI5DG1MSj/CBmOHxE0XkE9fsb4DbROQWXZ9up4fawt9Hpfu6LIjIFJtH/W3s5dnLMePOB2HRS78oIp+oquO5x0swx6d1qaGvtDzvjBnyauCTVfWNm45V1Q9I59yCvQ/4OuA3ReSTVPUVV7tsBQ8dHoWcdNL3XD+3erCqPiAir8NSjD0ZS1X2UOG45c1c8p4i8sI15+Q0iO8K/H5K6/bMlWPuVdW7L6eQJ8Rx63S1rrORD1U1isivcniayDEn9s8+qvrnxy8qcMi701S2dfOEdcgp4H7yGMdmuXjSBrnI6ejeFYuQPhSqetcx7nlZEJF/hHHcj2K64c8wR+KvxFIaPgN7Vh+X51+uXOY1GPf/L8wQ90WcMB1vKotjSEG7KXXjvwfeHlvHqt1wzBWjGJgKMs6m7zev7kjGl00T2jcdcq21LwpH288du3SH48E12/KDiB9t21jHhHV1OS6u5NzLxYMbtndQ1lcrKHiI8EOYV9XfF5HPHRkFxmv8HIWXYOm5PllE/ieWf/knVPWv162/kHJmf8Ga63yLqj64ulFV/0pE/i/wScko80nA6SPK9r6YvtQNE7q8/sG7jrY9DTPcrHshdM8h91qHIx+0VPVbReS7sLzST8cm4E/HjF7PFZGvUdXVdR0+HHMceIWqjg0eL8W8iJ4nIl9xxETrJVhKgs9I6yR8BvDbqrrOweIo/DDmffWyNJn8WcwZY+OLkQ34OSy654NE5F1V9Q/S9k/H5nZ3P5STx4T7Me/kJVxGXa6WHF3OA8yxHmyPupiqPiQv9DB986mYcff3ReTlWNqIZwOPw7zmnoi133WJk7wELLg+kXTJT6dPXuft7wPfjRlrf4zll2wvwF5y371ynU5EfgD4l5iX64sOueebReTHMa79z9gLxG/WNWvwHRPZqD+9zPM34cEN21fn4jdhbXIbxzdIfwH2smKMezne2lBXggc3bD/p88Vh14FjPqMd8RwK5pULaxw3Toir+Zx4DksHepwy3YLx9lFycYphDZprgS/BUgJ9vqqOX2D9pIh8AiaX/4nk3CIi/xhb6+jTVPWvr2ZBEq/8PBbp/Mmq+r+Pc14y0P2MiPwGZnz8PhF50jH7qeA6waOJk9Y953G4Dn243r+txQnKm5/71q4nO8Kp9P1MDurIX+DqrDt4KE5Qp6Nw3OscxUWbtmc85JyYxs5hBtkxzqXv4zgeZrlY52wxxqkj9j+kSBz03dg6fp+qttYswB+KyKdiARefKCLPTI4fR+G/YDrsQ486cAP+NvAE4FWq+jtryvsMzDHkhar6UBqYi4GpoMeF9P1YLF90j0Tat7BeKeiabdnr6/YN93rcynEwvBzZ9HL1aiDf78ALsYRN5T0O1rVDRmTzInjnruCeBQUFDzPGES1YxM+3pQihT8ZSq/zAMS7z49hk7fnYS9cdDjf+nGP9i4e72fwC5yXYg/1zMaPDm9J9NyFP6N43fTZhPKE7C9y/wZBxUqP7cR+09rCXF/kFRoM9nPwn4CtF5EdXvLvzYpd3r1zn/vygiIXj/69D7vlaEXkt1o6vwl74fe6xanXwWq8WkQ/BUll8AmY8QETeAHy1qv7gMa+jydj29Zgc/csUrfUZGB9dSXTVcbHFlT+8wNWTo3Pp+yQPMMd9sL0mSC9WPwb4QkzffBpmYLoHk90st2+5JgU8Apf7ErDg+kaKbvkfIvK3sIi0Dye9zEvRdc9Lh369iHz9hsu8gENe5iW8GFtj43+k31ei1/IYueXQox465GeQ16nqsRYuvwqet8rmZ/1zV3jtq43LfQ6FoU+vVA9ezefEB4FbRGTrGMaL81gqnZtPcP21EJG7GMbfcXF3is4+Ch+dvn9+dYeq/paIPIB5nedIqyzn3ysi37vmeneKSH5+vmnDS9wDEJF3xdKd3QKsRkwdC6r6oIj8Ghb18u6sjywouEFQOAlYfv/2e2v2r3v/di2Ry/GeqvrbRx2sqi/EsnNcLja+a+T65sN12LQ94xbMcLUuk9RJMObEVV6uWImQOgQPpu87T3DPj70azwwbIsEPw+tV9WXHOO4jsHc4vzAyLgF9lNkvYplS3pvjOUu+NX3vHL+oS8jvOzZFL70XZmT/ahH56g3HtCm6/r2uJFK+GJgKMl7HEGL8Jyv7PoCTycobsJD59xSRc2smjB+Wvl872pa91p+w5nrvc4J7H4Z8v2es7kgPLx+84bzIybwTVvEA8BQRqde8QNtUtyu9Z0FBwUOHF2MGpucD34YZl04D362qFw47EczzTkS+B/MGfTw2OdsYdZEe/E8aIfF/sZcxX5Hu8fWqeliKmTyh+4+q+oXHvMd54OYNuu2kBvvLetBKnoPfISIfgL2A/3CSd7eI3Ia9PAD4QRHZZLz5pxxiYEp4MeZd9F8wo8r3n6ScK2X+NeCjRWSCTTyfgxmsXioib1XVnz3mpb4HSxn4j0XkS7FF0t8ey9m9Li3RVUMKxT+HLdp6pbhacvRg+r4TOOC9teaecMwH26OwIQrqMNxzTI+27Kn7jekzvucUSxXxNlW9Gv1wVXE1XgIWXPe4mL7H/PSxwGOwZ4F1UYlgzwHvLCLP0DXpLEf4GYaUI7+oqm+4grLmcf5kLHL0YYWqXhKR3wPeXURu1vUpxE+KAPYMsyGl3QOsea5KzzxPvQr3v5q4kufQJ6fv119hGf4Qe359qoicXZMm75knuNarMIPMc7BoiqOO/SgReXdVXfdi+CS4i5OnbL0Hi4w7CpP0fdvqjjSfOZ1+5oiOX2Ozk8ZnYG2d52XHSjOfDAg/izmmfLyq/sRxztuA/LLzsLlxwY2FRzMnvQ4zfj0Tm3v1SM7aT8UclP6A6wOvwhylPoShLa4EmQM3vT8bv2tcfUa6Wu8arxZel74PvJtMz19P33SiiJzCdNtvjdNEXiZeizkKPIODvPzBHP9d5avS99/GnqOPc+yHAFfDKe0LOBgJfhi+l+OlvdzIhyvbjxvhmLNwrLbzkRCRO7C1qc9jmVLW4XeB/7Zh3z/AuPq7Gda0u2yUNFoFGXltii8XkRwOmb3D/91JLpRe+P0ANtH8mvE+EXkH4POwXNPfN9qVF1x/rlie8nz8zVi+yKuBX8UmFx8qIh+7su9z2Lz+0n2sN3wdF6/GHow+fbxRRJ6H5a1+KO5ZUFDwEEFVXwv8JmY4zusxgRkhjou8oPXjMcPUcfMYH7eMAZsoPD7d57uOOOXVmA7+kBPc5rXYPGKdcf6ZJ7gOWOqGt2Ih5ZeDdQ+Vn4ZFj/4mNqla93kr8GwRebsjrv9SLELt8cD/PK6n7WFQ1bmq/mpK6/d5afMqNx12/puxyfetmCHtcuTwcvEuWFu//ipc62rJ0fgB5rjHnkTeD8NXnfDzzKtwz0/G5PtYUW8PJ9JLwHuwyKWPL8alGxMi8iki8v+lFxqr+25niAAcr3+XvSj/jao+f92H4bnin3IIklfoxwN/76hjj4F70vcHHHbQQ4z/gI3Z716XnUFEbhKRY0U3JeSXAOvWwgPj9SeKyEesbP8KTvbC5eHAlTyH5j5diqwRkVpEnpyePY9EMubn59cXrlzrfYB/eJzrJHxb+v5mETngtb2y7T+m75ekF0Wrx+4kJ5ojoar3qKqc8HPPMev0S+n7y5JBaYwXYs+6v6GqF1NZfvgQHQCWQjBv66O8ROSJqd+2xzcQkadifXwa824/1LiUrrPW019EPhOL1v8LjnZIKbhOUDjpUHw/9n7tc8XWnBrja7A1Sr9fH/41wzfhezDHsK9Kz9JLEBEntu7ScXEcPoSVzAVpvvr5J7jPw4FfwdKgf5iIrD7T/FMOX38pp7w/EGm6SbcegrvT95end7L5OlMsg8Zx8eOYE8PfFZFPWVOux49+vhyr+z8Xkb+z7mIi8oHHrYOq3nVCPnzeMeuU+fATROQpK+V7KpalRBmtiSYiT0kRlav1eQq2LiCsOLCKyHbqs01yDeaw4YHv2xQxrao/e4j+y2PnM9O2v1h3jeOiRDAVAKCqvyAiL8aU1u+JyI9gJPUxmDX0rzlZjv8vwV7cfI7Y4s4/j70Ey2uBfM7Y41ZV/0YsB+6nAq8XkZ/AiPDvYJOE9+IKoaoqIp+BeZ/8iIj8KObB8FRsUfKfwjzNVvFKbK2UH8dehLWY18qBBeU34Nsw49J3isizsMnsU7E1IP4PQ8qBq3nPgoKChxYvwaJPXoQ9HPy2qv76cU9W1TeKyHNIC0o/NEXkWzH9sauqh3rEqOpbsg4Wka8E/t2q0Su9pIkj3f092FowXyciH66qs3TczdgLrGMj6edfxHKbv+NqBI6I/DMsbP1Vq+eKyJMZcjWPdWR+iPhs3bBekoh8TSrr87G0dZvKdzH1162YweqyICJPx1IkrU4A80uQvRNe8iWYB+C/ZFhA/ihv6auBtS/0oJeTGnjjmoikdbhacvS9wL8BPktEfmSVL0Xk8aqaUzl8D9bfXyUiv7EqH+nlxYeeIMrooVqDCRE5oyuRkenh5Zswj8xvuAr3uBWT7bfp4YtcH+daT8W8cbexl4BlAfUbF++PvXh5k4j8MkPE4tth3pJb2MuA/wWQDPXP5uiFz38Y+BaGtQw3RvMkh47Xbtp/XKjq74qlIn3WIRE/DylU9btF5L2BzwbeKCKvwNZRuxlr0w/FdNM/O+YlX4lx34+Krbu4D/yZqmYHvhdh67e9XER+GEuX8/R0r3u4Oobuq4LLfQ5NuvrZwBtU9XdXdt+Jeev/GRbZcxx8GfZM+AXJqPTLWGqpf4BFhv/dY9bnp0XkazEO+wMReRn2/PdYzJniVaS0Xar6ShH5EuyF3R+lvvxTzKP4SZj3+C+z/hn14cTXYf3xLGydiZ/CZO6DgPdLf1+NF7X/Havzh5FewovITZi835y+P1BEPnDNueO1SZ8G/E+xVHh/jKWmvgWbv/wt4BK2dsbDrgsKLhuFkzZf716xdGDfAbxWRP4H5kT3DOyd0x8CX3yFxb5qUNX7xNZu+zHgVSLySiy1n2JO1h+Ijdfjrpv4a9gz1BeIyC0MKba/TS0a9eXAHwGfkgwav44Zoz427fukq1KxqwC1FGvPx95N/u/Eh28EnoKtQ/yTmDPdunez2aHkR9bsO6BbjyjHr4jIt2FZNn5XbO3gFmuzB9i83tfqdRYi8onYmmkvTQb+V2F9+64Yp1Tp2FZEPh54BfATIvKrmDPjHiYX74tl63gcJ39mvmpQS3n/Pdg73t8QkR9jmGt8HOZM9C26HJX8hcDHiMgvYfOBORbB+BzMQPQSDjoNvh/2rP0LrJmzpTnQZ6SfD4eD6ZEoBqaCMT4LI5/PxB5u7sOU/pdhKZyOvTCz2voWHwh8Kebp8YXYxPPVwDep6k+vOe0F2OTvU7BFyP4ce0H6TVwlpZ8U5Ydgk+TsEfDr2ID9SNZP3j8fI7tnYQYvB3w1yy8yD7vn74vIszHvmI/BQvF/CSPOj2e9gemK7llQUHD5EJG7D9n92WrrAL0Ue3mTIyBOnId7gx68akgvil92glM+B0u59W8xQ9MvYzr5DmwC+L6Yfs4PdD+IvXT5u9jE8+WYYeETgN9gc1ToJvwIZiz5SA6mL3gOZqS/F/Ps+gssPP2d0vE18K2q+hsAyevtnYHf2WRcSvhvmKHh00Xkq/SQNIKquim1xknwr4EPT5PLP8VecLw7xkcPcPLJ4U9jXmHZ++/b9fIXHD4JPgJLR7EuMuWV2Euxt+N4aXeuihyp6ttE5LnYi4WfF5GfxNJunMEeyp6QyvRQPNg+lPgZEdnH0htcxMbiR2Fzqo/RNYuni8iLMIMRDJFh/0pE/lH6+2W6nGP8c7DIqq/moOf+sa91mS8BC65ffDP2QubZ2Bj6SGxM3Ie9nHgp8FLVPg3L87HIxu87TA+ppYv7QWze/2kMERwPNb4Te4n4ERySlvahhKr+86Sb/hnWrucww8+fY887J0m/+l2Yrv1kjFsq7CXE96V7vVJEPg4zvOd1In8G07ebcvBfS1zOc+izsTnKv7gaBUg88kEMz2zvg2W/+CySB/YJrvWVybjxediz3g6WDvg1DBFb+dhvFJFfScd+MPYC7zyW6vjF2Fi7plDVvxKLsPtijIM+HXs+/RvM0/0bVfUPH6Lbn8V4BezZ+FkbjrubIV3ua7H1OT8EK+/NWIqwP8F023+6Uk/tgocdhZMOgar+ZxH5Y+CLsOepbex56Zswx8EHr/QeVxOJo56ClfcjsbG6wBwKfo71RpJN13pARP4+Npd9HsNaNt8PnFfVWXL0fhFmpHlfbF79XIyDrxsDE1g0qog8A/haTH+BvbP8MIZo2lXnM4eli/8ttXTsVwOfD/w/7N3sZ7LMy7913Iuo6muSA9qXYM+8T8eeaf4Ym6OMj/1tEXlP7P3xR2NcEzGueR3Wx1fkDHeV8BnYu9nnYfJ7GuuTXwZeoqo/tHL8yxieST+cQXf9ZDr+clICfiQ2D3yVql4X0biiV5yaseCRDhF5J0yx/JCqHghrLCgoKHgkQIbFhg9DvxCxiLwEe3jZB+7YNHEXW5/lq4AXqOqhqerEFs1sMS/ku05Q9nuxCUZ9mJEkHft8zCD21WqLpo73NZgH8XMxw8cUMzL9ERbi/n1qizePj/8SbHJ1Jzb5+wHMSDXDFr985jHr0GAPQveq6vuv7Htn7GXPszGj0uOwl2lvxl7WfLeq/p/R8T+Q6vD5qvqtR9z3p7GHjY9X1R8TS1/6PcDXqeqRETQi8pfAnceJYhFLVfQpmBfmnakOf4l5an2zqv7ZUddYc80vxx5AAJ6sG3LCJ/ne2B/JKPfzhx2TjjuLeQa+QlU/bs3+e0kGJj3ewuEnlqPRmPqw1SgjEXl37AXYs7Ac2A9gLy1/QFVfvHLsXQwPtk9geLD9DeBH9HgLvT6kEJF/hb0cfgfMO/evsIeRrx9FZK2ecy+Hp8BaGvuj9lynE459rdSex1kP6tiyUVBwtSAiZzAjxa+q6rHTkRZcWxz2HJo8u58BvIMeXDOpoKCg4LpF4aSCy0FyRnh/4Kyq7o62fwyWOv1TVfWy1wouKLgSFANTQQ+x3LVvUcsvm7dtA/8Ti6L5B6r6P65V+QoKCgoKHtkQkS/FPIefpqqvO+r4gocfIvK5WHTxh1ylqK6CgoKChwUi8tlYCqH3UdXLTndacPVx0udQEXkvLGXt56nqtz/c5S0oKCi4UhROKliHxH3NqvPqyAnyJ1X174y2C8aHAXg/LS/5C64RioGpoIeIfAPmWX0P5j18O+YB/HjMW/ajirIqKCgoKHioILZw6BuwNa0+5lqXp2AZIrLF4G35Cde6PAUFBQUngYh4LMLxt1T1J651eQoGnPQ5VGzx86dhqdkOjdwuKCgouB5ROKlgHcTWF34dltb2j7GMF++FpVB9EHi6qv7B6PjHYSnsXqaqr3+4y1tQkFEMTAU9Ul7SLwKeiuUp7rCUBC/F8uUfZ7HugoKCgoKCy4aIfCiWY/pF49D/gmsPEXlXbP2Ou0uKs4KCgoKCq4XyHFpQUFBQUNCvafpNWArY27F1h98E/CyWwn3dmoQFBdccxcBUUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQcCK4a12AgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCghsL1WE7d87erCIg5E+EqGiMeIG6qjh75gyf9zmfTT2Z4pzHe48XwYngkvlK0vU0doQQQMBXVdonEAEFJxEnSiWRixfuB5TTp08j1OzPZuzP9rnttttwrgIRVAEREIc4hwISI2i0+6AI4L0SY8fe3i7nH3yQW265ibpucK5ivuggtAjQ1J7GO9q2ZT5fEIB5FxBXcdvttxPxiDhcqpiqoqo4Z9tEBFQRFEJAu5bzFy+ys7PDzs4OiCOqoppLNpTfGtjORyFGW9/USzpOR+2Y6re3f4nJZEJVeby3MsVo9QZwXlBVukVL21pWAeccOzs7fdlVlRgjMUa6LjBptvq6qCqk6zmEqIGubZnt7XP63FnwNRGHtjO8c3Tdggfe9jZuvvlmqrpBnSPiEOcR8fhUdjT21xVVNAbe8ua/YdrU1HWFhkjdVIgTQlC6GNnfnzGfLzh707lR/ysQrY8F9vf3aeqapq7xXohdIISORbtgd3cXX1VsbW0z2TlDXdc4Z32d2xpgb2+Puq6ZTCYs2hZfT5CqAiJtO0dVEVH29mY09ZSmmeJcjfgqjZAhIlBETCZGyNtUlS6Evk+yPB04V8Su6IaRJFkeRv2nMVq/qV2niy0QkzxFRCOVCE7gvre9la1pg3PC/fffD+KpKs900jDfu0RoW0IXwFXMWkV8za2335HkX/DilsooIkttGGO047z1e5azEEJf9xACW1tbdrxGYhoLMrr2uE3yNTIUQNT+0kCMgRha9vf3iSFy87mbETwxCGEREazu4hUItO0+i3bGxYsXOH36NDFGdnd3OX32ZqSegK9xArGdI6pMmpqu65jNF1za32e6dZqqNhndv3QJnMdVFXWzBVITo/WLAO1sn3Y+Z7+b4U+fRpyHNuLV9J+osGhbXvPbv80b/+xP+ZO//AsC4MRTOZPTruvouo7FYkFd11RVRVUN6ltV+ZM3/uGysBUcQOG0wmmF0wqnFU4rnPZIQeG0wmmF0wqnFU4rnPZIQOGzwmeFzwqfFT678fnsUAOTNbb2wiXYb1Vla3uLxz3mdp70pCdaJ6sd75zLp6CAps7NwhBjBBH8uvshSdfFpc7uug5VE2RVRdFUFhOOoedBUUjCX9cVzgka2yVh8d4nITShc+LwQv/be890OmFvNmfSNLi6AcClgbdu8C7hkLSDuU3zIbkeOq4EiSxSmfu69d+xH0xZKeff/TlACIqqKXBVUyhVVaVBacfGqIQQE2ENg7JXNM4hqjgR5rMZMQTquk7lUsSRlG4khkywG6B9D1ndR4oqNU4qd6CWihiVtp0jvk670uCRiOCHuofAfD5fUjAxdnQxEGIkRmVraxtVZbFYMNnRJcWRFVCWiXEb5oYPMQA2eRkruHXK/DAste+ozlcCWZFLydvUZmmCQ7BxE2NSjgk2SfE4cYQQCF1AEZyv6KLaRKLZoqlrujhogf5eK9+5bmCy5UcDfSyjm9osj7/898Y69//HNHmyMaGqxKQrYjASCTEgKuAdFcPM1TnHqVOn2N/fB2BrazuRrpiGEU33GPWZE5zzVFWF91Xffb7yeF+ZjmJE6CGg0dq+ch7vPDhHFJvkGFfZ5Pnxd9xBVOX//cmfoOLAQax833b5k68dQqBpmp44C45G4bTCaYXTKJxWOK1w2iMEhdMKpxVOo3Ba4bTCaY8AFD4rfFb4jMJnhc9ueD471MBkitXM/AJWqFT1ylfcduutvNM7vGMacCxZ03NnxpXOUu1t6Ac7Lwn/oGhIFepQBe/HimhZx6sqKv2PXgk6J0QdDsyWf1Pq+bekj+stsr6qkEVL3TRUzaQfTOOBuWmwm3hsFlJYPl5kPPSULIuDZh/aDkwxxRh6DwYwQch/52Oz8GdF5pzDe1OgfduNFG1VeVMHoz7rFRLQtR2qyiQpehn1s44EcT0GcssDJ/cV6JI13SywghJp25bGV0m5jBWlIuLwTohdR9u2S32R620TC8ekrmm7jm6xOMDDY4IYt9+gvBjJnAxiIKMBvaHWB1ohXVdVD8jS5SCPt548jT0SiZDFCbB7agj9ONDReECgbVtCMDly3qGhY9JMmGyZd4lXGd1nVN5R5ZfJa5lUxvVf9p6QlTFwTIzGRbbc26QztTGKZou+micBmE7L951OpzzwwAN47zl16oxNWka6x26zPDFwzmTFeddPKivGRCtJPiWRkMl67T2SiMRKIn0TCXDLTTczWyzwIrQxQvLMiitjy3vfexc4544YdwVjFE4rnFY4rXBa4bTCaY8UFE4rnFY4rXBa4bTCaY8EFD4rfFb4rPBZ4bMbn88OXYPJOYcXl6z+Fn5qlVEuXLhAM2l4whOewM7ODnVdDxUdKb6xxT5fU9z624qYpW5VGLou4J1je3sbsIFp3gXjPs8NaudubW31Az43hkIK8aoJyfoM4Hy2DA5Egip1XTOdTplubR1OTmuU/WFYVfLWuuuhMQlZjP2na1u6LvShajEOgpg/IQS6rjPLcKpLDs/MbRhC6EM1x8r9QBlUkxKw8NKqrrIOSYrC938fBpPpoX2zenLizDshRhaLBSEEnDgEUz6i5hUynU7RYOF/STeZsoqR2HVsTSY4kVRWZbHoCBFOnz1HdB7fTDh15mw/MLKSG5NhltU8cOLob5sgDLKbZXSsEI4DVV2aAI3HzeVAV34MJNrfEFBisFBjk5+OxXzO/W99Gxo70Mj+/j5BFXGeqpngqglb26fYmm6x2J8Rug6i4sWZkopK7OyaGiyMXRAq581qbm4mS+Mjy2cea2D96DF/h/Exx2mTXglnL5ao5jUAEBU0Ik5RDYTYslgsWCwWzOdzFouFhZ/OZrRtx2QyQcSnEg3jOctDPh7Mq0hjZNG27O7usr+/319TGfSXqnmh5DHoI7igVBGcggc8ju1mSly0aNvxhMc+jq1mwqntHW6//XZEpA9Tdc4xmUxSWW37fD5nNptdluw82lA4rXAaFE4rnFY4rXDaIwOF0wqnQeG0wmmF0wqn3fgofFb4DAqfFT4rfHaj89mhBiYv2Yo8COKkabjp3M18yNM/iCfc+XguXbzIbH/WW2PtQ3+8S4pe0u/VHH7rYNbfQTjm8zldCEshbJkMViHO4b0nhNAfYwrfhN2scKYMZrOZlcdXOLFOyULQti1VVeG870NUszJbVYxL5ThCMFfJUPo2jggRQRGN9AliRUEDmrbFGMxCHgKV88QuoDEmzwOS5kuKNJhCc+KpK8v9alHADiceJx5UqHxNVTUIFrI4ro+mUL/5fD6Qr9KTta54JRwFXTEuR420ySKa27dpakSUGJNi6loEpak9IhY2a+UEDR0aA87B9vYUEYwsVGm2pky2tohAM52iIlza21vqv6yoN5H1sgV5lCOWbK8/nnJeJWKOqciOQq5L/kSNZEu5RoVc16i9lX8ymVjwavaaEcuhGtuOnZ3TRBXOX9hlZ+cUIjKQIIom+VtVxONxkT1csryMf+dzVuvdb1MIXdfLtHceScNgkHH7CNgwSRMd0jkuj6l0TOW8TQzSParKM5lMqOuavb19Tp06zc7OKUIX8Slsd7nzhrZGbfLqq6qvw9bWFmfOnOH0mdP9BJMRadn52ueJrsThnafynspX+JR/uakbbrvlVt7nae/NzWfPEtqWN73pTahqfw9JE5k8ITpz5gxnzpxhOp1ekRw9WlA4rXBa4bTCaYXThrYunHZjo3Ba4bTCaYXTCqcNbV047cZF4bPCZ4XPCp8VPhva+kbls0MNTLmhrNG0/1uAO++4g3PnzqXjhk43gRg6Ukbbx52+0ob9cOmVxWiAwThMcPn6mv/XZeW5qnRC6NCoS94C2aIpSZEbIYTe66BrW7Pes3k4HxDYftuKwsif8WBnRTlmLZjbWsBJOiYN3hi6XpiBfjCLAjEvrgYxpAEZNOVsdL1iEnKYrpXYpRyN4wE49IOVK8aIrPQdat4A4/DAcbjnqEWWWmLwIRjasGmadG+1BffUvCecDOfldulbUyNdZ54Q45C+EAIKOF/Zoo7iEFeBeEJcziuZw1lX5WVVITuXreJZznKoZVySy1Usk6Ye2LdKMAcUf2pj69Zllhwfm2V+VGTyuMgykcdIVVXJq6BjOpmY4pjPkrW6pesiqo66nljuzlzGFLauub3SZzzWWbM91z3rgLy967rlCSJi4c+RQbmP9U7+rTmH6HIds14QEfMisB5LHlD0n5xX2WRGqXzdh3CDW+mDob2zR0ld1+l3ClNNXkiD19Tyeb3uE4dL9XK5LUbt4b1ja7rFHbffzrSZolHZ29vDe997AmXiDaOJb13XnD59moJjoHBa4bTCaUDhtMJphdMeESicVjitcBpQOK1wWuG0Gx6FzwqfFT4DCp8VPrux+exQk34IAe8EJ56ubRFnC3Lt7e1x7tw5zp49y7RplkIgRQRxuWFNKEQE8a5XXtYMMgiLOJwIUSMSIy4p4xyCWde1WfVT6JfzyyGDdhk1jwBM8dmxyaKKMpvN0Bhpmoa2be342jpIiIhYfteoSuUclfc8eP4BpG7wzaQX9jyIVoV2LBZjz4Ol/ToKURTPEpGNKzOiPxGhjW0v6KHtrJ7Ok7QWOFMxIQScEwSlW1hoqUtCkgfUaqjqmIDXQqw0IQQm04YqCXNURcWK2wUL3/Pes5XyZmY4EXLkIGNFlAawE4dUwqSp6NoFi848OELXoSEyqRtq7+lCpAsdTdOABmJaW2w+n+OASdOwv7dHFyJRwTtTxuI8ztUEQF2Fqye9cg8h9OGH2btlTMDa1yEr+tgrynx81wW8t3pYPUeLB65gbH1fmuyMtq87XhVwiiR7sKI9qfSKrZ9AGOna8nqSjh1PuBxSVVy4dIn5fMZN527i/gfuYz5fEGJkd/9tbG+f4tTps/iqoUoW86xc0Ghjb+TB4EblFmySIc5ZPHGS+Rwynuutqsznc5qmsT4FJJoCd1ieUHOiiThSTuV+HmQz0DweNdW0qqreqyB2AacOTbIqSdkjmryETDFXVUW76PI8AlVrLcuhHJfkoOs6fF0znW4RguXfzaGrWZ66IMTkrSNpkua8Q1xN5T1t0GECnXSEAlVTE7tAJcLN525i2jQ4GRbUAxuDebHMPDm6ePEip0+f5vbbb18rcwXLKJxWOK1wWuG0wmmkvi6cdqOjcFrhtMJphdMKp5H6unDajYzCZ4XPCp8VPit8RurrG5fPDjUwaYhoCl101kacOX2aJz7+CZw+dZqt6dQGYl2j486WLMxJ2Q0XXBLgLKjZGEgKs/OjfdlCmq3UbduakhiNCbtfHjhDDkIRU/ZmKaYXDsuBKhYqVlU4DUgSWkkdure3h6+qA2Fr6yy+q9b3mBXwGozJom8WVUSSF8Cw0b4A70g5USOL+T5b26dtYby8aFhUdDSAwEJud3Z2aJqJWdFZVSQC5BDCnP9xTDJD2bLV3Ttb5CvGiK9rzIkhUiVFI056oczX0TSCNNVpbJ3PZTKBbolJAZtSEKxKkdB27M9m7M9n3HLLrQgKGglRWcz28d4xaTyz+Rzna6q6ZjrdBgQVRxAQ8dYWOPYu3t8r67que2XfdV0f5rxYLPBVhff0MghJRgRTqALOSb9d5aCiXvUcWdqf2uSA98Bq+5Pye8qg4I+GTbJMIUcLCW4XLOYzBCNN0qSGqGxNp9xy22OZtZG6ntJMpjhfpwlZumRWohJ6zw2w/MLj0HCTyWheAVVtktaPySHX7Hw+77fbuD68bkvtqAoqiRxsm0NS/lXzEOpiJIZIu+hwdVLcnemgkGYKTdMQAzhX4X09LI6XfVYysaQ62CTLceHipUQgdg1xpsdEwDlPCJaveWdnh/neHqFre0I2UjDvF8ShDmbzBdPpNiLCxUuX+OCnfxB/+MY38suv+Q0uXLiwNKnM5LtYLDh79iyLxYJ77733mHLx6EbhtMJphdMKpxVOK5z2SEHhtMJphdMKpxVOK5z2SEDhs8Jnhc8KnxU+u/H57PAUeSYvkCyXzjvOnbuJpzzlKUwmFsbmveUOzDk0rTAjJZi/UyFjjL01PR8rqUWHjlxRNqMGH+4xdH5W7nbNleKrpgHs+nynMYWsZgv7qF8ttBFb5M051+u/8QKBq2UYysGyst6A8alW55Xzx22h2VKryVPApfayYyVdI3S2QJ0J9oKmboYw3AP3sk8mz02KRlVT3tWYwuqGvtWs3JJV0/7WEenlugz3W83bOSaWEJLi9BZCmNux8p7FYk5IIbrjCsTQmeeESPJ68VSV7weCiusVVVRAXMphOZDi2ANm7GXRpfysskSgQ7vEHAopLhX1IEFmhZbPXb0GK9vHimypnfJpIznJ7X8UdHStdrFg79Ju741S1zW7u5doavt7Pp9T100f3mv2eYOTnNM49W+MvYeC8a4tBBmD5Qu1sFF3gNTH3+Mw4bG8pwYY2jBYvl/S/VyaHDBuH5Vl/TMmWXKo+9B+bdv1+Y7NS8mjSprMeHJot8igw0wxQ4whjRuISYaEseeQ9mUVkSTTNn4kySsiNjFwAt7TbE0R71An1E3DmXNnOXP2bL/A6Gw249KlSwdkaD6f97mbC46JwmmF0wqnFU4rnFY47ZGCwmmF0wqnFU4rnFY47ZGAwmeFzwqfFT4rfHZD89kxDEwpfMyZ5f3cuXO8x3u8B03T4JwpCrP6umUFOBLasaU9W4qHfdYkveLQgwLcq5as6Bmu11ckN3YvJ4MCHQat5TpFx4o+mLU7oUr5BbuuGxQCQ2eOFcKqB4Ga9uyFah2yIC7XU/tBrGD5TUd1dyJoNIE3Yk2CHJOQRyV0nYX7hkC7aJlOJr2VdlXZjNvtQO7SVH6wgdh1gRBDrzxFRsojXVdE+n5dIpbRZZVUjqWyDAQeYwAkKeuKHHJZ1TXz+czql8OPATQSQ9vLXxcCdVJYvaJnmCTEqCi2+KPzrm+Hsdzmtsh1qaoqkawkq3nuu9zXNi7G/ZiJd0nRrPswtOOqDK1V4JpbcVBuhyl6N4ySXnl2bcve7i4aI3VVUVcVFy9eZDJpaOqKS5cuIWLWbYuoFFQFVJZy5dpCfWnyJy4peSWGSAw2vmTUNuvG9VhmhtBeXW67GHvvkrw9j7884cnXtXHiBhmNmrxvlDxRHaNrWxaLlqqqqaoa5zwxaPIeGa4z1vR53Icujwe3NFnouyrVM2/PeVctdDpNHpwgXsA7xDum29vgbMHJZjpha2uLU6d2OHvmDM45I+m9vQNtOJvNWCwWG+WgYA0KpxVOK5xWOK1wWuG0RwoKpxVOK5xWOK1wWuG0RwIKnxU+K3xW+Kzw2Q3NZ4camJyAEEEDAtS+IrQtf/2Xf4WDPmwLESRVxioalxT6+GOW6dAXtG9IjRAVSUpuLMKTrS3qpukbzK4zhILaZQaPhOG+AQ0doe3MEqkR0Wj5XZOCzNom5zJUVeaLlgu7ezhf4aRC1KErkaebrO9goWrijrbdrcWoI/v2ipGubZOlt0aJhJR/VETT78D+/i4hdEn5JeFyrs9juZqjMw/ccV0krXKmBLqwYLEwK+UkEYcdBKGLxKDWhlETKXSjAd0z7rA4GllVpYuM0HVdX84QAm3bWV7aquLBB88zm82onU/9FgldYG93N5favEOqCucrRDziLM+t8x7nKsR5wBExhZC9Bbz3TCYT84zxntlsBsB0a4vQRUK0ME9rG4dLluasAPshFBVJzSsj4V1VxhsV+QjLyg98L9ssD4w1yLlEIYVu9iou4hzUjWd7e0qMoa/r9vYOW1vbPelVvqapJ/Skhk1gsoIdK7AcppoVcG7X0EVipM+Dukpo+V553PnkdZB/t23LYrHow1nHk6o+LytYmKqO20lQdcSoS/dSIl23YD7fN2JwJhsA3legsLe3Z54pACFYaG4I5AU4Y0yLSqIs5nMq79nZspBoUbHcrUrKj6rUVUW3mOFQ6sobGXmPSyHSVV3TL9LnJOVMtbpuTSfc+bjH8T5PexpNVXH2zBke+5jHUNcVoKlMAVgO2y84HIXTCqcVTiucVjitcNojBYXTCqcVTiucVjitcNojAYXPCp8VPit8VvjsxuezQ9dgci5SVx4vjtgGnvae78mTnvQkTp/aSZ4EaYExcZarURkWHZOYCiGopjC2LuLFIc6ER8AaM1mgc4Gds05fLFouXLjAZGsHcRBzLlWykrIwThsEYaQ6HBAgBogddAviYh+wha8m9XayFprngDMJQjUizjPd2kLqimZriqtq1PlkWaW3Xo4V5AHLoYkPkkipt3Ymy75h3DuuJ6mRjdnyz4pjNttHY6CpfF/PPPBVA4JSeUfbtUjy7kiVIofb2TUtLLdt2yRgljcyK2bViIaAE8spGdrW2reqwNkiYooQU85KUcVF0BD7xf0m9XTwushVVfPqiGp8Lmb2BVVCDLSxI2jEVzV4z958wXzR4p3gq4a6aphUDU1V0zjPfL5g0S7oFnMmk4lZgqsG5ye9JRzELNuAuJQXV6wQVeXpuq5fdDErqq7rekKrqgrFoc5bK4pDo01KRGwCQMq7ae1r8q6dWh7Q1MWSXS9SH2S5tYYYFNj4e5l4c1vn34P0SB4LA6f240OFRFAR55XFvuVA3d6aEjVyafcS+/szbrnlFh68dAlxjp1Tp6irKilMk0Urj0OdQ6rG+jtCXpQuRvC+TjIeiVFNwUfFR+3Dw1E1z4vsCSRQO8ekqpjUFe28Q9S8FExnqC2+qQIRk6fsG5FyJqsoXQrRtAmgS6PPFly0GkSUwHy+b4SDgvO4Kk1y/ARkjkpAJBK6BeqcHRdtbBGVdjGn8onkNDKZNDahEyEuFkSC6T8niA0UIhbWGkOLA06fPm3poGOEOAoTT14vOCOMGKwdp5Xn1tOn0cXcJqh1ze58RlSTC1+ZV1GeWBccjcJphdMKpxVOK5xWOO2RgsJphdMKpxVOK5xWOO2RgMJnhc8KnxU+K3x24/PZoQamsUnUoTz+8XfwhMffydZ0YqFavbV8WRgtvM8aPEuehZ5pr2BjMrvlkDTzSKAXBhuAHSF0TLZ3oBdcu18mhp4gdAglzPshIhoQDcR2gYjg/ISq8slqZx4QkgRANeUUdY7p1ha+qhHvUkcMSn2dF8FglU/tIENIazpx+BNWriGgqb2SUnR9q6a8rEBVedBICha09tCIpDZbxIjzVVLgw93yrUyhRyyXo+tJ2PZFFPO0sLsP/ZIFOo9X1bECsuNC1xHT8TLccKnePd8wKKmoSogWfuq8Q3C07cIWSlPBtx2TZsKkaahT6GjoOtrFghwa631FVU9wvk6aMFvA6WVMnPRW56qqe88FTdZrsNDcqqp6T4yoEE1b91ZyktLPuTLH7ZzZLC/+lr1bTLayku6nC/3Zq/KUQ4xXPQ7y6cKywrd7s6Tpe0eH1OehWyAode0JIS1eOJvRTKc8cOkCla85e24b79KkIymrXFfEWWhlIqlxP+cFCrPyz7IWo/aL0JmMa098gln+vRMq50h+Mcskl5X7SN5Uh7qqmleSKWzf9zu5zZOiR5JXEUb4IUac9+ZBkCYGkgauxo5Imixo7O8VQkc1mQBJR9RV3weohelKrn9qcxuegdB1eEkkoCnEF6u7ta0RI2Olr5HKCTvTCdO6YhE6RJS6rmmDedlokrnsNVRwDBROK5xWOK1wWuG0wmmPFBROK5xWOK1wWuG0wmmPBBQ+K3xW+KzwWeGzG57PDjUwqSrz2QyJMK1r6qqmqWuqtMhellRbBG68cBn9SJak5AVbyM6J9XKfqTL1TIgRDylkLfbhYevyDDpxB7bn7k3SDZDC1UwQF4sFdV0ny3OFcxW4qlc2MZiyuu/++6jqmu3TpwahStWhV+bLA3Ppt5qQCRw4VqOZ0YVhETNW1EWGWVGNHEPocFVF5Su60PYKO4S2v15M165SjtfxuB+3XQih9wIZb9dU+KzQQmuKu64b6rru6zlWPiJi4yG1X9S4LHAmsUnf54bM/W5tauv3OfAeYkQ14LyR2mK24K3nL3DbTTcbMQFdjLRdRxcivm6o6pqqaqiqJoUeDsrGO28LmonvCRQnTLe2iDH0i63ldlksFkwmEwC6LpjFtjKiG/fzuvBTkaFP7e+VY1gjQ2smDJeLvkxJAQiC5SKFdtEuhY3v7+8z299nNpuxv7+PiKNuJmxtbdMulBg7YppqSGr3vl465Lwdy9DqmLCQ1Za6qXq5EaEvwzj8M6QcvyHoAZkVMdnOVnfLUTzUeawj8jHj0FgjD9jZ3iHEyKINnL94kXPnbmY63QLow49X5RtsHObFQReLBaoQVJGgdp44mxapojH09cvtYXU0D6j5fI66CkHwzhb/BOMTZ5UF54giKaRaaHzFO73DO/LXb34T5/cu8Y5v9/Y8cP5BLu3usntply5Ezu6c5rbbbjupyDwqUTitcFrhtMJphdMKpz1SUDitcFrhtMJphdMKpz0SUPis8Fnhs8Jnhc9ufD471MDkxeEbz9nTZ/iIZz07XWyoAGmgDzlQFScpvC01ctAO1HJhei+YYTApwiSUGiMSI642BR66BRosrHVre7u3jKuaJc1C5fSAsrKymVW49sL+rCN0HdPp1Bo8naMqhGhWQ5Vk3ROoqoq6qvGVLdjm3Sj3ozue54npNlNmAr3FVfv2ycq/Hz65Ar2QGMHYAnrz2ZzJZAoaLSfptCF0gdCFXij7wVLbAmBZx8TsXSHLluk+RG7Udpr6wnuS0muXjstlzqfZQM+KL+W+TYpek1JVIC8cqKTBrlnRS28JRSLE0Pdh0zTM9/dpuwXzxQxfeSrv0RiZzedEVXzlaba3qCcTxHtCtDBjBRDFpYDh3NKB5HHB4IEClv8yL1aW81LOZjP29vY4dfrsksV8VQmMt+XvvADbWPEPGJT8OkW/7vpHYTwZWUXXdYR2QVgs2NreZn9vj0sXLiIi3HrbbaBw6dIl6mbCpMm5T2NKA2peKWMYASiI4PPii5BCQFMNR3UbFyuTRFbOIXRIWoyv6zoLcQ3Z40KSdTyPe8ur6lyWP+3JOXt95AnK0A95AVBr97bdZ9F2tG3g1M5pnLNwZVRsoqdKCDosslkN492UeKSua1uMr27wzZTFoqNruyTDWaulBTtHk7zsTxJjpIsdmiaqQ2hvJISIyLKOqauas6fP8KxnPYtfftWv8prXv44//dM/Zd52KEpVmS6s6obp1vZJRedRicJphdMKpxVOyyicVjjtRkfhtMJphdMKp2UUTiucdiOj8Fnhs8Jnhc8yCp/duHy2zhw/dGywSlSV55abb6aubKEnUGIIaAzQKy9G9vHc2HlbUnxiVm8n41BMTQIQkmU5dWbKd0gfnprKFO33ktfC0CVLSsvumYXPJQ8EnwajKaHxIA4hJEXBSFh1aQCuegcsDVy70LgJxkUbriPj8wZl1B+aBm+uiwn4qmKOff3HQm+WVE3KfVkRZQU0rvNQt9xuJO8FC9tcZ13tyyLZWm1hs2ML6tAIA3Elhl76mLKnP8YJS3LmnWM+m1neUkj5Xh110zCZTsG5NOFwvTDkvh3qpz35OhFCiP2Cbfv7+0uThtyWNvDkYP9uwFjhDqS4ct74z34ytNoPl4NxH9vPLnTmDRHVlGFStG3b0rYtdVWztbWFiGPSTKnrKao2KRgG1Vgyc2j0EBrpnFtq9zTY0gKH7kCbZSU/TDiq0WRNl47r+62vW0xeA7lNtfcwMM+gQdEv91te1I+UYzQynVq9Q8o5ulzG1abVlUmYbctjpBstNJg/VV0te9SMx1zO/XzgHqYrZXRP0gR0azqlqWucCPv7+ywWC0L2ZPAVIcLe/vwIGSmAwmmF0wqnFU4rnFY47ZGDwmmF0wqnFU7L9yicVjjtRkbhs8Jnhc8Kn+V7FD67cfnsUAPTYjEnxg7RSNcu0NiBBlPCobPQrBhwLl9IESKSCo2Q8k9G8iBzAqLSK/ms0GNarMw5oXKCplDMdjHvrdFgeUFhZKEfK9B0TdVg5U4NHGOk8jVVVSfh8gimxJ1zoBZq2S4WicCWrfoaNy9mtXRcUpRjHEdJrEPO+5ot8yK2uJYN3kAmMfPMiHRdl46FLgTyIM3tk8ufFfcqAYx/5xDCqjJL8qa6CywL8JKi14HzEjFJ6mPJZSdiq6Ll8kUEpakqnIB3wvZ0woMPPsje3h4xRuaLBa7yTLa2mW5vY1cVfN0MSkdT+HGqR54wCEYkbbtgsVgwm83Y3d21wZlCVtu27UMx88TkuFjt/aUJAWsmBce55hEkkJXPmLCjRhaLBV3XoapMJhOiKl0X6LrOQibV+nc63WJn5zTT6TYaBe8sR+iqt0nU0HsMZI+Ldcp8TAI5p+tqeUnjrq6X7+PcYP0f+m5ZLod7mhxXVdXnr131JJA0EelJPH2m02lPDHZsyvdK1rmjCQMslSGk9pvNZsxmMxbzeV/nKpFOUzf92AGbvPaEpuZV1S8smG7iJf8GoqIhEtuObjHnwQceIHTBvD3UwpAFh7iKqpqw6CL33f/g4YJUABROK5xWOK1wmqFwWuG0RwIKpxVOg8Jpx0XhtMJphdOuXxQ+K3wGhc+Oi8Jnhc+uVz47NEXeqZ1t3uHt3p63v+vtuPW2W5jWDbWvaBpbrMzWlrK8hhqtADgIsTOvANLgB6TPgZmIArVxHgLdYkblPW27QGNL5aD2lYWQjuRExMIYhVXr9tjyafdpW+uQbjG3hfacUFUN3tcs5q1ZTJ2j6zqciAlKVXHbbbehIqhPwpe8D/phpJsHc94X1UIv6bpeCL23ENKNgzwp4Cx8i7ktjjadTlksZsO5AjHaImPee9rWjt/anlp5XRJaFQQPooRo1s48QFfrYEIoRIXZbA+NFhJsCm95MGeraVVVdPOWxaLFOQtvDdHyiY69JBRBZLAMx9ClsljO27adMZ/PaCoLHexUQTsEZdJUSFPRLhbsz/Ys9FJhEQKElgahnkwRqVL/CGkmAapo0DThSIGrKVSwrmuapqHrOi5evEjTNHjvmU6ndF3X/w4hoC6kxSMPLoK3SQEP+5e9Lg7sJ4USH4FMPJsUaybZdCME6T1LfJKrvb09W7TROS5evIgtatixc/oM3teAI0aS3JiCFOcs1y2BGIaJAk7odMitqgKksolg4ycqQZWqL7OS5wuqptSzks4ki9rEJARQjTjnqeuqJ3nVPPFJ5A1LbbK1tUWMsSdr6y8bL6aMK1Rd6hdP5SV9OyunquVvrSOVcyxiYDFfELqWSVMbKTmHisPXFTqb9xNPaxuHryrazpYOrKqKECqihtFkcQi/bdu2b0PBJXLT3sulbVvm8zmndrZ4z6f8LR5zx+N42f/5SeYx0ky2eMztdzBppkymU7a2to6Uo4LCaVA4rXBa4bTCaYXTHikonFY4rXBa4bTCaYXTHgkofFb4rPBZ4bPCZzc+nx1qYBIRbrnlFu688w68OOqUK7TK+S7tIFDzEkAEJxYWZt4BOVTULGekCpsFH2btnNDZQlTepUW9olLVPln0I3Vd9VbJsSW290XQ1TKbQumiKZ7c4d55clil956o2cpuOV2z9XVrMkUEwpJSNmVxFPLxYgy4Mrg3D2kFGA3m2WyWdFa+pxgviaPt5kasRDPCR8sBOlguzcqYOse4VXWp/aw8iZR662zyMoimfAZLsRsdv1wDcUaCzsV+kOfwQdVcFlI76MgjREFtETJVs+5X3oNGQjRrt6/s3qENfUilc6YUXF3h69p8ElK7OnEDz+eq98IR+8mBaiSKyUDTNEwmkyXlOViYbWG32leoWw3BPVzJay6ULu1YkoeTYJ33QZ5AqaYw4aRFNdqikV3X0UwbKueJnXlMoJFTp05ZvzJY4u23KXohhXf2njopfymxL0NURfL9ej0wTALs+IMeKEvjV8CJWf/n83lS9qmdotpwy5NDURTTGyEOCwfWdd1b6DMZ5snS4CFjYcmz+RzB0TRNH4bsnMdCVdN4VSNI6TrouuF6qkPdUhtrtLbTybiCB/vKO4e4ITdq1g2iYHlQLZzYJkOg0apd+YrJTsNNZ8/w4N4u21vb3HzuZra2tgmzOSC0bcdk4ogKi7a7LNl6tKFwWuG0wmmF0wqnFU57pKBwWuG0wmmF0wqnFU57JKDwWeGzwmeFzwqf3fh8dqSB6czp09xy082IkMLYnFnenSN3owDiJCl7rHMUFMU7U8qCWmirKtk9YLFYoFHx4myg59yqTohdi2qgaepeSfXK6nCViSaLdQyJRLwnppA4VcW7Co2D1Z4kHIvFgu3JFBWxFh9ddXWgrWur4e/0nwxhoTYcNl9Dc72j0LYtTeVxPud7lX6Rvq5r0RSqqqmNndCTlyBW/kyEK+VTXc7/2lsy0/WVIbQuK+zBeq5L5zkRpEqhtM4h6g7cY9RC48qSc1vaJMFRVT4JfZes4Dbw5l3HZDpFMAtuPZmgvkK8BzEhz/K3rF11aKM8EcEGl4qR0WQysQXXWM6x2yu0mBZoZFmxb1LyY8Vuh+iweUk+UltvuMz4uLW3yUo+/Z1l2TlnFujOrNuWA1VpY2R/f59J07B1aoumaZjtz1A1a7cTj6ZJgSkkZxMbYSS7y/dfrXY/yXEORiQw/s5j2PrFZKZLYcNnz55NSnrIX+xSyLpN0CRZ2bteqVdV3Y/p8WdczryY3WK+oGkmSdEHnM/ndoxEmxggdNG8gMTG15B32NooakQ0pjDbgxNAnwkhT7C8B5V+HOtQQOu7EHHmWJPmA0LtPNNpw6lTO5zf36OpG3a2dzhz5gxRdglqOX0VUm7fxeHCVAAUThtftXBa4bRxX63vxKUqDu1lHTC0ROG0wmlWwMJpDzMKpw1XLZxWOG3cV+s7camKQ3tZBwwtUTitcJoVsHDaw4jCZ8NVC58VPhv31fpOXKri0F7WAUNLFD4rfGYFfNj47PAUedtT2sWMC5fOc+5xdxK6BaKBarqFz4ZbsZBUnwTEQlbNU0DEW6elHJgabdDEqMzbBbO9PSpXMdnaYjKd0i32Cd2CEJwtqibQNA06WmwuE8UmZR+T5TDngnQok8m2Nab4ZGV3WMisNbnzHjoLC2tunRA10i3mvZWy72QOGeQjSPKKEPwgfNaDm88Ry9kaglnSt6cTvHfELiQCiIQuMJ/PqSvpLd45/DQrOUUtvyJGuiZcjvHiX+M6LCu1Dp+8A4aF++LoeO2VU1aezlfEGKmrKvX5mFAwxTGqp4ZgA0U8hCTZRLx35PC92d5Fzpw5Rew6Ll26QH3WgyTSEcFNLC+seIeXCsWjuOE+YhMNcWLXj4o6C+8NXYd4k6umaSyfZZKVcZtkZTPU5fj9P4au9PuqpfkoDKQwKM3x7xBskUrvLdR3kfL5Pvaxj+XShQeZzfbo5nvJc8AmM23b4n1F00zYmu7QhTRJyEre7rREROOJgGLyvFSPfnID4o30nfpR26V8vskLKYQO58xbY39/n5tuuomQJmYDIVh9s8dACCEpb1O8FgoaDkxWMlnbsRbyKSJUdc1kssW8jcQQiCp0XUS0g+QpMZ1OoDLL//7+Pov5AtWwRGSN97TRwkpVbXLgqgrFsVgsmEy3UVW65I3gfNI7yz2L95LaxK4TR/MUL47dS7u86c1vYrKzg4qF9j7taU/j/KU99uYte/MF3lfEmMip4EgUTiucVjitcNpSOSicVjjtxkXhtMJphdMKpy2Vg8JphdNuTBQ+K3xW+Kzw2VI5KHx2I/LZoQamD3y/D+DOOx7HLWfP4p0NDieOGDsUQQVyrkxrByWEtBie99SVJ7TtUHrVZGmExWxOUzc0dW2LX4mYRVOqFErm8N4q1yoQk7kvhZtqHzq3PGgEIx4BnBe8OKbTKbPZ3PJkOit/TPlYq6QAYzBra9e2JrBKst4Pgn4sJS8WhjYeoIeep6kGQj8Id7amqEZClxaIc44QLYwTJIWHOvM4qJshvFJ8uqRmvQ+ifahvHiS5bHlQWA7GSNt11L7uQ2SNn2Ky7q5RUALZLuq87+/V32OlX6xdIxoDIShOzNPEucrynHYtIeWH3N/fJ4Suv7aKRyP4lM/WuxpJFnBJtGoW+yF/bq+ciSiOoJEutNSuIYZI1Mje3h5An49z6K+UDzTJtXXh8L3UgeNf4/4etZeR/VhOLDR3E2T13OWbYLrW6M28CWxhNgggysXz53sPk/35gqap0RjZ3d1lf3/GTeduoW6mLLoAmCIysk23iErQiPMWBqxuIJohz2hS4lmAsWEqIhY+7QeruTCMiayoVeknN3nbWC6BpXuNj835ePNEJMscJA8KByEMHiuTyZRJM7EJTKd0IaLRVEIzaRBVWpT5PFB5wdWOiw/MEaCpGna2T9EmeVQcoe3SuLCcymrK0HLodsk7IeY+l76/QggokupQjcYmxDB4vqBQTxrOVWeZq+LUPLluOncTb73vAdpFx513PoHpdJvzFy7y5re8daMsFQwonFY4rXBaatHCaX1ZCqcVTrtRUTitcFrhtNSihdP6shROK5x2I6LwWeGzwmepRQuf9WUpfHbj8dmhBqa3e8ITOX1qm6byhK5FqgpxFv6FuJGSF1MuWKMP4X7ZomdWTBHL2xhiIHaBumpo6gl1VQ8NYi0JmFXTOYfL40EViWqhmDLosnyKiDWuxkjXtjhxlks1hTXm8yyXaLJ062g86vKgykTSD7mx/szbR+cfpJ3x9aQ/UHRFPShoWkBPY6Sa1L0ACyCJ3EII6RJ2LRn1Qd8gVoklJdIrDdWUO3UIgRURSAMlhoir67QtK3pJbTW6hybS7YVSqauKGGW5H0eQdP8cjyeScmpKWjAOs7x2nS0I13Vt8lCobTCJIC4tGOg8TpwpdJFE6zIo+VT+3MFRLXQ5xJgWiBxCCQE0mmV53FeSrpsHnnP5+GS1Jv89Dtge/tJc3zxBkCQxG9rnMCSpSddhkItcfo3E0KExeRU4CF0HzsLDRbBQ4ABdp7RtAPH4akLEGVnYlYbxZJoKG8N2/xgj2Ydn7F1Brm9fr+SR4MTEUO26Lg2B3I8wCgEd33f0W6zh0w8hpr8ltWdu09w3w8QMTB9Zzt1m2mAhr7GfFuRxXKXw2eDMmwUNSIx07cLa03t8VdMpvfeARFO+dh/Xt4tIWpAxhw+L6+tvshpM7klh/G4k22ILXsacw1kdztW2SKIqlRPOntqhch4nga3pFFfV1JMp26fOnEimHq0onFY4rXBa4bTCaYXTHikonFY4rXBa4bTCaYXTHgkofFb4rPBZ4bPCZzc+nx1qYLr97Dm60LLY3WUhIATqpsZVNZVPSt558I6glqPTpdBJUUfolKpu6No5MQR8BYvFjLbtEBzTyZSmnuBdTQgtIZjV2jtQEaxLPBXJUmc9k4QlK8AkXy4p8TbQzedcePBBbrnlFqbTbToFdeb5EDQr+TSYQ0C9wzlhZ2vLFicTkDAMcCcCIfZinPNq9hRgWiMJnwlcTArQQulMAEwo3dKQMH0ZWcz3zapZeTRECwU2bQgxELuObmHkFRWIQjOxkDhV+v6wxcMC2nWIt5yWDo+GQEgf3zT94HFAGwIabMgrlt9RXBr8Dhy+Hz1ZYUeNFmqavBC2t7YAC5nz3qe6WlnyObpYWDs6qOua3X0LB258BQKzrmVvMeems2fYv3QBJ8KpM2fTxEJN1pxDnOXvlA6kSnk7c3+IQ4ngLWw6ppyR8xR6XE8miK/7fjx79izn77+P2LUQA+JtYqAI80VL7TpEFHFVUgKJLEURYp/f1zln5JPIUBVCGvA2kAE1xWeeHnGQ5SxPMvI86ZV4UnIqiErKg5uGQgq31i7QtQtctLyhznvquubBB95G6Bac2p4yn0XcZIL3NbuziFRbiN/C+4mRRM5NC31ZMpmIGql2XQRx+MoN5RKQpOiMW4e/zaUmGMGqKcZKPJX3TCcT9me7VN4zacwbpu06QrpuF02O6rpmPp+nYHf7VE2Ddz6FZ1p/aIj4yvXjTmz2ZO2lAechaMd8vgdUVOLQ5G3gJJMHtIuZLSQ4V+JiH181KIKtCVrb2A4dlat7Ig/qcWkRPaeWJ7qLHaHr2NraYrFYEGKgqmucLnpd4Zva5CRGfFUhOJzCoo2AWDht2+JFbKCK8JizZ7n9lpu57/wlds9f5C0P/jWnz93EXe/wToep8oKEwmmF0wqnFU4rnFY47ZGCwmmF0wqnFU4rnFY47ZGAwmeFzwqfFT4rfHbj89mhBqYIFnImYlbJ1IiVpHBE53CqaNsRJY46fhDgtm1B1XIgRsvlGUJke/sUW1tbCEOopC3kVxFpqeu6z4+puGSZJWlI+8M7G1g5bNajLLqO+XxOVVXUdU1VVUsDxnu/lD/R+4rQBVA4depUsqJfhTy5Sclb3QIxBpzzZMuzcy4Z1RVllAPSJws5ERScExbtHFUT/K2tCW3oUtlTrkkGReGcWTtDsIUKvfdEtPdMANJidpbrtOu6PgfldDoFt5yzcRyiO4R+gqj0bLW6P5+TFYcTQRQLGK28EW6f99KuMZvN+9yRp7a3me1esrp5T+VrvK9xvkalMqIVGcmCDs2efg9eAmY5976iqkxxZMuzpDovEkm1bYuEiPMV3nsWi11Emj5scuja5fbs6yrYoNRsp87HRzRa7stcyhgUV5lyOA7y2AshJE8d294uWmIc+i/EQAgde3t7fT9vb2/zwAP3sZi3aBRuvvlWdnZ2LJdozCp0uc9zuGiuW158cVze7GGwsgGQwZmD5K2S7hFjJABd6hsRMaXddgN5jOqc5bxtW7OoO4d3Dt/LZRz1R/ZQsL6MMdJ1HZcuXWLn9DaCyV1V1UTNYcewWLSI2L2m04ZuscvubN/Iqc81DKELdCmfbp1DvBWadX0FffvlNq28p+0ckpcpzZOAPK5U+8UjcYJTD04JBPpuEuGJT3wizX0P8Fu//wZO33QblXdcOP/AYeJTkFA47QpQOK1wWn984bTCaYXTrgcUTrsCFE4rnNYfXzitcFrhtGuNwmdXgMJnhc/64wufFT67tnx2qIHJOY+FYdlCbLlBo6YQttS3FsI2hB2Oqtx3sKo1vuUfdNRNDolUlBTeCqiDxWKOr8ziOFzBBre47MEwhFuOWpiutUXztra2+g5aHagZWZjnizkxdL3yW5H7y4KO/s9bxoKchTimULoc3pstuGOZ7zpr+6oyoeuieSdkRa/KgUGSr8Mwxvrrj5EVbh7I6yhu9Zz+2gwCOlby4/yUVnPpwyqd86gMCsss8MJiPkOAyvt+4buoaosLTpzlP608QQVyeK4MilNyAlRNql41eaYYYdZ1jTjztPCSJhDRwgVVkzVatZ+gSCqX5GtGOx4U71wS7RQa2yv8iOZ9457vZyljMlzT0KP2zfKR2zdNn5b6YnxMPif38WKxSH1qx3dtx2w2o+sitzzmjn5sjAsjLgV/uhRqulwoHM4s+rm/RZbqkeUty6+NLwEcxoOxL6flBNU+p6nVI4XgjiYXGnNoqSH3iY62Lbe19sep2vUnkwmSwpNjNIKx8PmsvyycVzUiaQHA2f6Mpm7SIpK20F9MYa7ee1wv5/HA2Fvtk3E/kf7O40RMaVp5+3Ey1DFqTP1jukI7yy09nUy443G3c2nWMZ/tM1+0a1qjYBWF0y4fhdMKpw19Td8JhdMKpxVOu3YonHb5KJxWOG3oa/pOKJxWOK1w2rVB4bPLR+GzwmdDX9N3QuGzwmfXgs/Wa8CEqq6pmpqqtnyUefCGru0Vu3dC1GDFkpzXEkx9xyQIVrnZbA4Idd1Q1bUpudTaVVVRNTW+toXXfFXh68oqmcLAcKaYfeVxPocnWliaaEQ1sFjMmc1mnDp1arC0jxTcuBPArte2HYtkpUTGYnUFEKVf9S1vSm2R2yNqJARbZC4voLd8Z2vvxcIWoKvruj9GJOVo9D4NpvF9hsXKTKiHNrDjXS+E2Xo/JprcTuPzxopcUmXGdx23q+U07XoPkRACIQbE2YJuIkKIZuWuqgpxjvn+DCeOpq7Z3d1NykK5dOkS83lLjGqhoM6lsGQH5DyZJmv20UFOU2iuc46mmeDEE9rQTzAyES0WbW95ds7uIYCvktdCVGIXIEZEoXLeBmTUXtlriH3+y9WBP24ja0uX+vGgnK2S8fha435C6b0X8vW6RNo+kaWvKqoqLWQYOtrFgtlslsbzoGiM2NIkyjuct/Fl21Y+477u/y2Xn9GEpc9nnMudzu/SgnTee5qm6a/nJHsIgfQEmxSsSK/AV8f1+NpRYy//3nvOnj2b9JcdE0LqNxnGpGrs9dh8sWBvb4+t7e005qSfbDnnmDRNT5TryLYnGzd4SeWPjuTGJs32t3d+qEuqm6rSxTiQVAjMZ/vM9veoK8e7v9u7oTGwe/EiFy+cXytzBcsonHYFKJxWOG0FhdMKpxVOu7YonHYFKJxWOG0FhdMKpxVOu3YofHYFKHxW+GwFhc8Kn10rPjs0gkl8RehaoirT7S329nZ7xVDhCIDGwKULl5jubDOZTIBkmcMGZggtGiOh62jbjptuOsdk0lheRSxMrfIVMXa9wOzNZ3gHHk8XlbpueqIBSeFc2Xptil5V0dixvTWlcjdbGZMVcD6f942fG9sGTSIO70CtoWMKL71SCHkAaf+BxFvJshxjoOsCIbRMp01a8G2gmRgj7XyBCDRNw3S6xcWL5/F1ReOrXpHZ/Yb2yXXPdc2kmAcc0CvDGGMf0quJdJfqkQZTb/VcrmQvkKvf+dxxezhvIcVE0A7quiGEjtlsn7ppiG2LhpZ5u2B7ZxsfI/OLuzZ5EJdy4zoENypntpEOBJrDgY3YLGTZQqFNqXRti2JhwFVVIQIhWFhjg/TeMlVVgQ5tlNszk1hGru86BS8Cla/63LLDec7a71ApWsVwn9B1LOYzmqoGSeGk6fox5WedTiY4iVw6/wAg3HTTTWxvn0oKKBCj0IWOuqmSXB6sx5joc/1NTJKStSFo6n5VPoA+x+po/MUQ6BYLfOXwbpiUrK2xKvv7+wBLk5SBYiTJtfSyl9v44sWLqCrnzp1FSCGuzhEDeF/1kxLnkmeJBtq2ZX9vj929Xe46dYr5ItCGSNvNqeoJecVAW/gyhcau9PtY/scTjq7rTL+oJq+MgMO8Htq2petMH8wXCzufIdw1Ex7A1mTCPCj3v+0tbE8sr/F+WzzjjoPCaVfQdoXTCqdROK1wWuG06wmF066g7QqnFU6jcFrhtMJp1wsKn11B2xU+K3xG4bPCZ9cHnx1qYOqCKWMRT4yRpml6QXfO46IJ76RpmE62qJsJaEAkDzQbfF3X0i5aptNpssA5cIoky7B1tMc5swibB4BZGnvr/kjgTIFmpWlJAp0o5y9dIgZb3K2uaxsQqUNWrXtZYBaLRb8vhMBYXnPux6g6IpZjIh06DgW1xdpkSQBE1HIzpuPywNFkKe66jiYtRpav5RM52cJ6A2KyOmaLs+XIXB68WRmPrey98rcDUt9trmtUxUvO56rs7e0xnU4tz+iojVUtj2W+kqKEGAkh9mXJFu66qohqvgChbVksWlzluemmmxCpCNEGQN1spQ4SSBZWoF8wDWLfbkuKQcyC30OHEMKqqvu+EjGZ1hjx4ox8wfJ2pg5SjQhingPBQmFFlttME2PHCE4iqDOSUz1wnOpxZEt7mbBcvjYe+7BbjTiE2XxG2y4sz/Con631Dd67VA+W+j6ScrmK9KHgeZ+qoiKoaO+dMtQlK/30Ufp7KWkywOg8MBIVk5EQA7PZjGbSUFU1MtzUvIfyYooiw74kf12Xx5fvCU5Vmc1mqa4e57zJimY9sKwPcu0z2VV1zdZ0qw9PzeWtq7qXp0wmzpnnxdiLIPdnHoc5H3MIoc8fHWPEi7dpS+qnfmKWZVCVGJUqhRsrWLi6eHyETmD30kUevHiRvdn8CPkpgMJphdPWo3Ba4bTCaYXTbkQUTiuctg6F0wqnFU4rnHajofBZ4bN1KHxW+Kzw2Y3FZ4emyOtiRMmLbpnlt66rJGwdIVlUtyYTmmZiA0Zzx2MWTo19qNx0OrVGh5TTdMhp6dygzK3xxwI3DOy8P5+bO141sL+/T9e2ZEs6cGAQjcPmRMyCZ52erLG5TFkI0uBcZx3PWLYgp/qTCcOuPSalLAx5kbSx1T9fPcZAXhQwh6hmRZTbbriv9OUYlzMPwnH5sqU1K4A+/yu5qsvkcbB+uU7DPebz+ZJlPW/vBd45nHeoE0JMSsk5U/zJwuqd63O9CilvrsLOzilboE+VtmutPgxKZbVkeaHB7PmQFVmWl1wV01NZ6Xh8UhrSt6ZZeFEGK67Sy5vYzdAQTOEd6IPl79U2FBnus9p369rflPxQNzTld10aM9C1LYvFYuTVE/sJAEBIMp7v7r3v21FVk7TSt2+e9cSVslmZBq+csbwt1XPlr3y89fOQF3U2m6VFL7VX8plMnAg+fVBWosAHWewVLWoLJ8o4NNv1hy/rA5tZWYizjc+68kymEwtPjUP+06qulseLyPJESZdDlfP+HFKelbjJ1TDWBUFDTCGsjI4ZPE+ybrAFIyuapmJrOqHrFuxdusj5B+5bKzsFyyicVjjtYP1ynQqnFU4rnFY47cZC4bTCaQfrl+tUOK1wWuG0wmk3DgqfFT47WL9cp8Jnhc8Kn90ofHZoBFNWrjjwVUNdezQGQui4dPEiTT1ha2uHmx9zMx2eLsRkjY9JRoRLly4gCE3TsLW13V/WYZVSoNMIMSCkhee8Rxz4yoOAkgRTISbFK6pEOipvuUwvXrwAMdJMp2xvby8pz7EwWucOyjxmy7YqtU95G00aDrTGOmWftx1QsOLweCaTCTFG9vf3qesGJaRBC97LQHZZAdnF2NvbQ2Nk2kysLsHIVfNASEqyz8kYDyqJbCXVlfItFoteEWZCzMeHrLjW1H+5LQbllK+VB8H4d76nDQhP27Ygwvb2Dot23l/DZ1kTK4N6T1CYL1rqyYQQlRgFcUMOyixjIhYKvJh3fc7KbEnPxJt0hv0n0tfByueIqiwWM7oQcL4aFAM6HLjMEkuNMSjprCoHEspkl36wqgzXt++yAgshEIONjyrlaFVVutDhUj9eOn8BxcKau66jqV0/sdramqIqfbhjJoy6qfv7rSObg9tMMXZdlwhc8XV1gHCHw+XgREnNe0DT4nXOWTj5pJlQ+woVC2ftJyohDu0XtV8UcFyeXFYRMQ+QuKxozeFgpFolTUYRnIfYReaLGTDkh23bFl81OF/1Hgki1ob9ZNGNFi08BvK168ryS6P0Y07joOyH9ocQQZynqqD2FXuLBYsQiVG483GPZT6b8aa/+ZsTlePRi8JpYxROG7dF4bRxYxRO24DCaSvNUTjt2qJw2hiF08ZtUTht3BiF0zagcNpKcxROu3YofDZG4bNxWxQ+GzdG4bMNKHy20hzXhs8ONTBVVYP3gnOKc2nwVp7t7W26dm55DCUJobje6wAxgWy7BSKOadMwnUyts5MVUySmMFWPdxVER9fOUz7ADhElqoU2Wh8LgmLNmqzWmhV1R2w76toPi7mlsM6siLquA+i/c4M755hMGrMWCn14WrYOjjtoaPzlnJ/j/dl6LKI4J0ynDW3bMZvNTNGrJqU8eExIIpY8qLqFWdGd970XQc7RmnOW5hA9szAeZskVxA2k1ra2sFxd19R13Vs48/5NdRzXz7gm9gSSFfuYSMckkK3OJsd2XoyR+XxOSIprNptRe8tVuTPdovMOEjHU0yl1VaFi/WvEn0IPJSmoEIixw3JirixAqGKfvkXCUhs5Z5bbTFJIJBPEqEGWlNmBemuyDI9OseYYcndqap/DOHTVGj0O+wX6/L4iELoO7zyq1q+LdsF0MqGZNEmOByt9DB3TyZTt7R1CtL5QHK7yBLVwXJzlqlW0D9Hs+1tzPw6yJm4Uttl7/KyvU9Zeg4x4Qow9CeR65vHpve9lJIedL4duj9tykM082VBiL9+rsPLnyV7svVnMsyfivaPyFbPZPqdOT6gqTxcCPgaEnCtX8T1ZHB/L629qUvAwnpT2x7rliZhNIJXKW47o+Wyf7a0pt9x8jjvveNzJCvIoReG0AYXTCqcVTiucVjjtxkbhtAGF0wqnFU4rnFY47cZF4bMBhc8KnxU+K3x2o/LZoQYmcT4bd3HObiwiVE6oqrq3Zi4Wc9SDiselgRdDx2I+p648VWWWuHXWRtWsrLT/Z2SRFE1UNEeM5uNlGIYxWVitk13f0VlJ9UK5oqjzt4WnWt18Cp/sG3VteXXp2qvH9ooecN7aabHozIKerMzp7hjh2d/5mhojXdfinFDlMDsyCUR81RCxv50fwngFt1IOU6hZ++S6jvOfishSCGIe1FYnluqY65nrl/sib8+fHE6bkcnDxnToZSDGSJfapK4qYgj4qqISIXQ1eFtcT9P1TSZ8P1EYDQdTTEmxee/xvbdBPmI4tu+v1e7NMhMj6iKQ84UebmHviVBBRQZlPyJeGSuEY2iGMYloUtbocB0RQUayHTtSuK/Hp7GWw5ytDx1dCChKVXlU0zVGixYK0ssEMrTvMDKllyfJmTWXjpUDTZp7J9eJUZmdCDGa/MU4TKzG4yvEQNt1VHXdX9tCbxXnNHVw1gxLXWleEnWN5UhVUNfvW9YJOQ9psOtKT0nEaG3nxAMWHpzz2Q73OhgufRSEQX5w2usyQdL9lWgjGvFCCF1fx5z3FrXpTuWEyrs+X2rB4Sictq68hdMKpxVO6+tSOK1w2g2Ewmnryls4rXBa4bS+LoXTCqfdICh8tq68hc8KnxU+6+tS+OyG4LPDU+S5PMjMqu28FUSCMplMk4KNXLhwgXqyTdU01I0JVTdv2d/d46Zbb8GLT52TFFPUpCoCkUiUAGqha845ptOpVVUcoYvYuM2Dc9D03nvmszmx63DOUdeWr3CwtlrlY0wLk6WchDnHYVSlXcx7pVo3TRJ8MYuful6GJAn8GFkox5ZzwELOkkXbFnLb7y2kNvBz/aUXGufFQhFDR9subPG6qrJF9dQW3VKFuqmZty2K4quKGEwYK1+DZit0JOdZRcyCbflozWJ65syZXpl0XUdVVaYwekcC7cs6CKcsCXhqAFR1yWI7n8/Z2trqLcF5oTdI+SmdsXbbtnRtS11XTKZTqGu26hoPzNt58ggx75RcBxGSpTuVMFn8Y1S6tkMQvKtompq2ze2tSVyyQutr0w9oFbOKZy8NjWoTnHx+n/Ozr/ZBxY8p+zQ8TQHIEKp7kFlWpWlAtqrnz2KxoK7q3qJuHZWszFFpo+U/3dnZQSQt6gY9WZw6dYq9vV26oJgmsXZCKghpwuBsEcdVIjKZMGWk2Qyuw2JzIIhfr0Yy6QkRdDlXrhMxD5eoI6VsythXnkXbmsxGIyjvvQ19tTBZBepmMuqTwRIfgrK/v49zFc5BiDoIzqj9RQCnhEWXlGlEI4QAXVAmTY2IR5zDu2FCp6n8WcecVNFbGWOv3PHO2sMrUc1PSnBomtSE0PZjLYTAfDani8rOdMKFixeZ7+3y4AP3n7AUj1IUTiucNpS6cFrhtFT0wmmF025QFE4rnDaUunBa4bRU9MJphdNuQBQ+K3w2lLrwWeGzVPTCZzcanx1qYPIVSFZIOMAn5dDi3MQUsEb29i5SSUclNZPpGS5cuI8QlZ3tLTwe5yoQn66BVUPVFv2ycYwTx/5sn7adM500hNARQmS2N6OqFPF2tpNs47WG39/fJ3SteSpEUHWoeIJKki3Lm9q1XdIQSiVJUGNgUgmz+RwNnunWFOfNaikKqtIP2qwgFO0VPLD0PVjaPRqVdhaJnRK7iHYh5XvNoaU+DZ/U+W1HN5/Rti1NXTGpK7yvECcs5nMUxVWeNgY0EV+XlL2kQashWOhmIlEboMlCGzoEaJqKGAOaFBzi0rfHV340gFOdslpNHgSiKW8pAAFxkVtvvRlJZN40W6CeGLJXiJhCVoUumG7RyN7+bt9e8/kcp0psW9BIp4HZ/h6Io55sMdufU08czcRCfZFoA6Ny7M/2MF6tqWoj6kWn1iZoP7gVhdw2IYDzOElhr1LjKvC1sru7m0I6txCJBGfEvVjsM5lMgewdkbwu1OF9RSSrwyTnmgRbFY1igruq62NI3Km9Bd/I32RTU/ji1qSx+vZW5FSnIHSLlth1eFLIt0aiBjQG9vYuMV/MmO3vIb7GVRNUGrzziZCAYB4n4ky2xYGIM0UDeDFpippbUcB7XK85hGwLX6EIVCPStQhQ5bGh5i3TJi8A52rqpqL3tnBKPwC9IFREJwSSF4WvsJykRqIhRhBFvCPEDleZl0xUaJot6npCjJq8UiJKxHshkghEhI5AdEo9qdnb26fe2mayfZoHHzzPtOvwleWL9RLJ+WRVkzdNIsfBa0OWdEEODe/JDEcIkdAGGi+gwZKdemelEyOSSJ78Kl07t5D+yhGC0jQTapQY4fSk4eZTWzzu3GkKjkbhtMJphdMKpxVOK5z2SEHhtMJphdMKpxVOK5z2SEDhs8Jnhc8KnxU+u/H57PAUecKg6ET68R5V8c5b+Ko4vKuIXcs8tijQLRa4qmY6maaBLr0Mg1nSJYXemaJPZJIayyyKnhiiWYSjIlkA8qBV6LpA6GywVFXV5z9NkkI/IqBX8qpqtl7JC3HVycItSUAsTHFZkctw2dEle8viSPGP72c5T02wzSocBoHOSlisXG276MvfJMt8zveYLcq5EFn9giLe2+9s2U5tnA23qpGoZlV3zuFTqGJiyjQ8x4M013v4aQZkGe0d90M00koX7cNEe0IcmqyqKlxVIbFDxMJ4bX8OT4xo8j4QEXDmTYDk4rqlMoRglmbBlO1q/tr+xNWOYejXGBUkLwYI+21HrMPQR1nmNKZzUzmVZVno/x/dMLWbAqJqhLoiK2nTyDNB+4Xw8lgYxl+eGiSSwUK1UevbzGuqVr+QwjqbyYTKVzTNpG8nU/SOoOaJ0ss7yzKRS5X/Ng8JOTDBGeo8ahR0VK+8W/tPzukKEKMnzdTSRMTaqa7rQU+QQ6yNpKIOMpYLqRoJQRnGbSq59EXq/4gaaedt73UjzhFipKqFytf9eBSx6dLq5G4YP/Qh3yLDIpPZ8p/bwHsPIQ9RIWg0PZjKHjGvF3VpeqWD7vDe40QIox7x3rG9vcUtN99M6D2VCg5D4bTCaYXThnYsnFY4jdF34bQbD4XTCqcVThvasXBa4TRG34XTbiwUPit8VvhsaMfCZ4XPGH3fSHx2hIFpsKSvdpRLuUPFQV03dLM57XzO7myOOE/tPZNmQhgrDpcaWqUflEtDIynBrIRFch7ACMSsE5HcSe0cy6M4hPeNB1omp1UlnK8PFoZb13U/2NvWBrlbs0hXf82Vv1eF2QbcMChN4E2B+ayYUawJLB9k27Z9ntLJZLJ07TzordxJUfeK1azWMXRL6tqKlBYs1GiWSOfS4nqDf0heMM0GyUo9joBGy0FqStj3facjoR9fsq5rxDs0WOihhYZaHkpxDkkhzF3X4ZsaV9XJm8IPCs+5fhC0XdsrjBxuu3zv9fUZ+lB7UsmKPg9MkSEcc/z3an/n8cCKjC03FDaAVVfKtTxzyNfquq4/Ni+GqL2Sp+9/1WFRvLFygeE+zgmTyRYxRCbNxNoJ82QBB2FYMHBpnKwqxjTp0xX5z591bZ37yckwEsbt6HJorCreuT6ncQjmReGcp2oaO3YUAixp8hhjTONsKMdAkL6fbORJCGlMZHKIMbK/v2/t5QCVtM/17W7lFFSXJzqmG0eXHdUpn5snaFlGc/h21jUxmn+DQyDGXtGT2jnGSBwRhU100sKLYvfa3t7Ge8/Ozs6B9i84iMJp69tk9e/CaYXTCqcVTst1Kpx2/aJw2vo2Wf27cFrhtMJphdNynQqnXZ8ofLa+TVb/LnxW+KzwWeGzXKfrkc8OX4MJzMKeOir2FTeLnqJoVM6ePcuuC+xd6rj/vvu56663ZzrdsgFbNWYRTop1LEBZweSmqrwHmuHeItSVAwlWeR3KEUPHpQsX2N6eoqrs7e5y9uxZXFUfVMaaBhEWEuuSAgqh4+LFS3aflCu1bc2KubYtGMn9ysDOA3O0BcVyu4oTZNXIDUn5m7dE13XUdd1bTnPuVBGhbVuqqqJpGlOOEew/3w8UG9z5upkgzO7ofEWTwjgtL+yULgnhMMAPUVQbEGKgazuaepkUbRDkMGRPSPku5/M5D5w/Txtaqtqzvx+ZTBq2phM80HWW0/P8+fM85nG3M93epqqndMEIjtTGXWftNe9aTp85Y4M6rC/jWoj0C0WG1vLoinfU1TJhDOTqUh7Z5YUKra21v+aqTKzDQTkZSCkrBaDP55vvvXKVvnzeV7hECLO9ferGcgHP9uf4lEt2MZvbPeo49I1dxn57k9Fcj9W2ykylohaSm+TmoOfGGjghpokPYHk/nVuqWwjBZFMjoevMU6kayH2VRPrJZyIfGDyDsjyfPj0xYlILEW3bFkmeCiI2cdQusH9p13I8O7PM33rTrdT1BMFx5syZvoxV1fT913/S77wwaO7XruuWiCcrecjeAtZfXSa90cDVqJA8OzSNGdNVw4TEC3Qhsre3R5V0xrlz547uiwKgcNpSW1A4bYzCaYXTjkThtMJp1xkKp43agsJpYxROK5x2JAqnFU67jlD4bNQWFD4bo/BZ4bMjUfjsuuCzY0UwWUNqb73MljGXFFjXdSwWLSFEzp07x/5shopw6tSZ3p4umPUrW/Gk3z4ghEgIHdXWpG8sJ7kEpsyyNZC0uJbls7Rv7zy2QNtIESch6UnKm00yBGvkvICZE9fXa7PSkzXjYBDEQSAtrNGJ3cM5GdpLSIJh1Qgh0C7mvRLPC75lS+Niseh/V1XFYrGwhfycT14FadEul9tl4M7sWFElIbMFvfygOGRsQV62+h8HGo1McpsJjBSkyW+MJryK0jQNZ86cIcTOlEYMVCl3pXeO2LVAsqCmxQWd83gEcZWRRojJ2g/T6TSVXQ/02ToFcbA3hwXpvLfF60KMtK0RTtM0S/KeQxp7q/1oQnC4is9yy0iRZuvwCmGk62clGmNM5B57BdtLWYxUzvcTsX5BuqhUzrO9tcViLpy/eB7vre0s7JHeau98lZTmqAb92Bm1rRxdy1UIgHOp7MuTRFm5n688oRvaoqqq3pvHlOowxlQVUcV5j0bzAMmXs7yjli+08ibXQ7hobjnrD+eE7e0d6pQfeD6fAaZgsxyMPUvMc2PQgTFGJMaU9lb6PulDS0fE0/dzahjxDgmmX13yjpE4tLCMxpNL4d9Rlaapie1imXBSWQqORuG0Ay1SOG2EwmmF0w5D4bTCadcbCqcdaJHCaSMUTiucdhgKpxVOu55Q+OxAixQ+G6HwWeGzw1D47Prhs0NNgb21ToaBkwVSnJjSdC41SATn2Nk5NSj+mJcg015ZryrKbJGjL/AwAGRpUChCzkeZFvJK8pfVTLYwLjdsul4+ZqUA3nsLH3WHK4bB62H0ew0GoRwEc7ByDnWBnPOyI2rslXwe4Lm9e8JzDueHcNwhf2X2WMhtmBYCU6uvE+sjawBZ6sfNw3bo54Fbtf9b1eoVYliymmYitwGR87cOf4uYMq2bhrquqSpre5fyPObr1HVNjIMsmPeH5cAMXTQninRcLlpu68PQ90om5r5/6Ns4htB7KuS+Gw/i4R564NrjMox/a27DAzKzPFgzxhb6XIbhWqkf8sRltM+8ZhQNEVGok2znEOixsooaLczV5QHO8MllHRc37Rt2y9L3Uq1yG2S9MSK1dV4YfdvHIQ+uG5Hdav3H1xvubwW0a3Sj/Tk8NusOQwiB0EXqlLO5qRvAMsTGqL2Hw7hfVhXrar+Nj1kNU83KHJE0UUs5XRNL5Umtk6zJRk0/mrQ6l4l+RD4xHqnoCwyF0w62xdLvNSicVjht9XfhtMJphdOuDxROO9gWS7/XoHBa4bTV34XTCqcVTrv2KHx2sC2Wfq9B4bPCZ6u/C58VPrvWfHZkirw8uJ2TXvjr2gNilmQiF3cv4b1nqzlF3TR0UWi7jre+7a3ccsstNG6CF2c5OW1UoUqvCHIYp/cOqFJopnW0dXwgi1jlhK6zHIGTusZh1urYJYsfQwhdTBZMs1hXQ17JvkOy0rTGDCGkhfc8ldQjYXFDDs6RsKwK+bjTJe1v2w5QqsqhGrB8hibUe3t7iDim0+mSossKPgv91tZW72HQ1I0txtULf0wtI0QNtF1H6AIOaJqJWWPdMFDMi2AY0WNFYm2hvSK39hwTgvZjRVWZz+bs7e5y8023pjppqi+pX02Jhi7QtgsWYcGl3UtEjUy3p8TYsbU1ZTqdQNcRg5H3bbfdRhstP2zddYhUeLEyt22Hrzy+MhmMMU8ilhVP13VLCjMV2ogw/50t82mwtG1r4dXevEpmsxlN0yAi6e9J34Zd19HURs5AateI8ykXZr8NG9SsV4yrg3QsA/mzWCwgEXOW0+xhMJYXzQotRrr5DIgs2jmx67j55pupqgbF0YWAiEecJ3HmEnodLxBlIPdcvrGsrmIgOvuOedoj4CufQtZZUqLiHSEGLl66xGI245Zbb03EbhMVAA2DtTzvC6FLE9HcttkLwzOZTEZEgfV1/jsoly5cpG0XTKdTmqbGiWNST2iahi6Fjw/9Mngj5HaIMeJUyeNotQ9jjGnCax4pTdMQYsRVzsZXGHmI6OAppEAfLK8Hdc1sf0bUiIijrm2SjQwL/BUcA4XTCqetCgOF0wqnFU4rnHaDonBa4bRVYaBwWuG0wmmF025AFD4rfLYqDBQ+K3xW+OxG4rOj12AadWgXAoLifYN3jkU7ZzbbByyXaNPU1HWFimOxWDCfz9nd3SVuxRRWmAR+0Be5nQhdZwWuqiXBc04QjYiGxDjC/t4u7aLlzKlTveVX5KAFztrK7pRzK6paeN68m7FYzKnrqrdu9qFljDwR1NR7rwrXCHcebOmO/f/m7WDKJQ/OTF57e5cQqXuhcEmhZEWSBWfsXYBa6GYkLzhnRKAx0oYF7XzRW9m3p6fo5VDURprk0h2sw6g2jHrnwD4Ru+h8viBGIy6rgw2CqqqsP5xQVZ75fB/SIO/allOnTiNe+tDauvaICPO2ZTafMd/fJ8SWnTPnqKsmlTstRrho8b6imUzMQk7IpVpb5GUS6zurH0BZGVeVtW/XdZw9exZVU/qnTp3qCdcmCXYJWzRtsCJrUsL5em5kHZY0sRmXKX/bQoPjhRTdWvmyYptsxxiJ6XjvPcTBqm3EomjomO3tIZUQY6BpmpE124E4EA9iOWAR+qhUWWlKHSn6vmgbxGOpbgzjRlbqn+uax23GdDKl9p6trS3qpiYidCmkPBOHc65fbE9V8S5P1Ez2TCGbEo2WhDW1XSqTKrP9GSHYvXe2txOJBESF0A3E2ySPF+/dkjdGP3z6yRAjUhkU/9jboNcPmZS8Q2PyrAqRbrGwyWQi8kXs6DQO/ZauEaJ5bGUxdmkBv4P5cgs2oXBa4bTxvsJphdMKp/WVKZx2A6JwWuG08b7CaYXTCqf1lSmcdoOh8Fnhs/G+wmeFzwqf9ZW5YfjsUAOTqg6KOARQTYvvWSe1bWvW3rqmqptkzXJUvkJreuFrFy3OOaaTSd84MUrfSEhaoMopvU5LCtYqmS3+QuhaYgygEe8d3WJhinI0gFfLL4jlSXXDPa0M2lvgxpbesWXyYFscpiSX7m73cuPRYUo/JsU/SQNwPIh7C74MFuW1VtukxK0ega5rjVictwXYnE9lVhvEWIibjP4+PnS4nzWGtY9zVFW6D1mxDfUh1yndLYSAdwJqQp6VfAyBrm17zxJVu5/mT1RCVEKESVMl67IVRpwRsx6nQonEswz236n3FcuvuljMadvFKAw79HKYlYCqhXsu90/+jG8pSYkOVugDg5/1E4h1iL1FfJiIZMWpzvK1KuZRE9qASmQ6mQ5KyeWypgSidjGrferfpaLkIkovAgcU9yoGbwL7X+lPTKSh/fZeB6hSNzVV8uKIUftQ17EMbWqndaQuDCQbNSarfWSxmCMMYeo5f6rgUB1kad3Ci6hRxjryWh2n4z4aSHClHdPHFtnTvo9DyAtlLoctqybHIBk8oHIbFRyNwmnr2qJwWuG0wmn97sJphdNuIBROW9cWhdMKpxVO63cXTiucdoOg8Nm6tih8Vvis8Fm/u/DZDcFnh0cwpQ4BW8TKpYEtTti7tEfbzgkhcPbsWVwOh4yK97VZfJuaixcvsljMiSEwbSYokagQ4qBcUbt+VduCdH1HM1gHnbMQwP29PSt4XeOd5XnUZKHXAwPYurvPdzrqtNwRk4kt7IeMFJU7OHBNaR70VDi86WIKq7NKioMQO0JIeUFrj3MVwhC+l70Ncmhq27Z92Ydraz8gYwyE0NJ1Ld45mrqmriZGYs6sxIHQE16ugOgwhkel3vA3SXkkBZf6o65rJJGW97V5OYQ0CGSUDxIbZIvFAkKX8ugKVbWVjrF6kvrb+SHUz/mIOMxDQsG7Co0pv6a4IRfoutqkfh5CVpcrnSciTVMZIQeSPLS9ldtkMfaWb5ChXnGZlE0+XP/3amsOgz0t1CaDku77VYfJxxhZMeSyuDy4VXEiqS+g6xSCMGkaLu5eRBycPn2GEG1hOO9NxnOAr02hVtoua6J1WBoDqxOrsZJP4xCGiUYe7qn++XfUSAwdzWSCUwvNDTFaPtSqXiJGGbVVTxbRJlCrkyJxSTGrEkJHDOYVMp/PmUyaPpzVJlggzqexaNfKC3HaeHV9R/RjMRPlSJ8sN5UsfccY+4ysbqXJ+4ld7mxNi3Qmj4g8IXFZN4nDe0n5po8/UXjUo3AahdPyr8JphdMonFY47cZG4TQKp+VfhdMKp1E4rXDajYvCZxQ+y78KnxU+o/DZDcpnhxqYpO+syKKdsb21TZUW77p08SKTacPZs2eT0KZF3bxANKVY+YpTp04xn81YLBY8+OD97Jw+ja8aiOY1YBUEJWDDxOMwYfYINA0aO0QranHct7fH9vY20+kUfAoJdM7C8cTbWB41vBPBewuBRYeQPhGxsFoFV9WmNFxF1A5RUkkwYdKTTAxSD2pEY6CqPd4J3jlqX3Fpd5fQBZq6weHSAluOdj4HbDEt76UPae26zuoqtlyYSwoOVeZzs3gLUNcTmqoGHCFEHJ64drQepx4DwQ+5UNXqFMxrYXtripfIXJS2DVTVxDw72hZfCSEkxS6R2HVoDJw5c4qADda6qfHeJDqGjr3dXfYuXYAYuP3225F6ijiPquX2rCdT6smUGJPiEMFVZukX6HNmHqiJjkeUKeC8fUz6AFGVyjkmkwmqkQceeIC6rtPif7FvOzsvoESQiIi3MiCQFonrpxlidcy/s7cI0Ofmzcp7TOjL5U49kizMzcRCnGezGbWzftYU0um9x0tNiAu2draYtwve9JY3c8edT0B8lXrT8vEqgkYTp0Esln5skI0NENMX/fd416hemSjH29rFwhSrCPP5HB8DVd3Q+GqJWJ1IVu8gmVxsshNjzg/r+glfCMHkMA6Txarap2kaqqpib2+P2Wxh1/YO5731ayqzhb7CdLqVqp8VrsM7Z5PN3DKpzkPeVF3qU5v0YO1uG0xXqoKjv6/pAYWUVzcEmwQ5JzhfI95yyorkvLbS5+QtOByF06BwWuG0wmmrJdmAwmlA4bTrGYXToHBa4bTCaasl2YDCaUDhtOsVhc+g8Fnhs8JnqyXZgMJnwPXJZ0cYmEJvqZVUvBBsAamqqajr2hZzS4WPOnSqE2xAV4I2tqPrOmb7+1RNwNU1Kj6PO5xgSlkEiQ46RRQaV7G7u4dXiJUd77xZ+7sYCJJsot6BE4JGC9FTTWGgvUwQUyfUKYyz6wKzeUvdNDgvQAplJIWrrVjvXU4MmYQ9WzNHBnrQmKXPrqZK5TyVc1x88Lx1vvNUziMa0U7ptEWjLQ7nnMc5ekUiTnrFpqqEFC4ZoqLBPBVMgCtEbAG6rNB6FS1jy3QuqdXBMdRJRuUXjaPjNI3diKjixcJLzRps+TSDKtoFWwgsGimIRFQ7QjCPE6lAxdokRFvsDTWvEXFC5R04xQmEaIsCejcs0qbGyiYskuRNlnNQLsnvaICRu1PMsyBEyyk6eKDYdo3DImkigncVlauIBIKzXMDEDiEihNReAVWXQisDER0IhazzkiU8lUMkhSdqkjpNH3I4dfo7hS9qUAvv9bXJaNKnzrnUN8EmTGqeGzhbCDPEQBciEcEivCOuSh415MnDyFNFGeS/b8Ox0s4Kdo0iX1Ly6aORvNSgQ4koLo0d7x0xxF4JK0a2ZiW3kOQYkg5y0stu8qNJE72IqODUsb+3Z8Tc1MQY0nixiWpe2K7rTK7yxNS8RWycZYLvuo4udEyahsWiJcRuyFua6qJq/aHRwsPzOAohpLzOaRILaIjELkCIODeMxZiWIlQCnXbW3yhtt8BVHvGmv5wzPshzvKqyCW7bdsmbwMLVC45G4bTCaYXTCqcVTiuc9khB4bTCaYXTCqcVTiuc9khA4bPCZ4XPCp8VPrvx+ezICCZJgmYD0hpvNttPC2I1wwCUIeAth+o58UiVco0K7F5asD/bpwotW/WpZBU0ATGLuqScpiaQAlSuopvN8ChhUlNV3pSyg7btTFCdtYaKEtVCy7IgOhEC0QoQ1SxvzYTsUTBvW3zVWM5BEQazqvad1VdM+z1JiC1ETtLoldROqgHRaOFoUanEUYnj/vvv49SpU9STmip5A4TQWUhm9iDo8yF2NuD9ciBhzpOYwxTrlPfUuyqJv+vD+XKfKIOVfVCImQYYBFc1KTATv166+mMs0K5yjhBaQrKMSuUJIRI0c0bEPEMCaEuMLSF0tCKIeEQ94tKidKrEoKbkmxrUoRroOhAvVLXg68oUe4y42vpJBSMIccvKM9dDhpDFLAu23WSiU10JcaYP/1wsFsxmM/5/9v4tSZJcSdfFPlXAzNw9MrOqeq3u3pTdA+AIOEEOg5wAha985lyOyJHD3r27V9XKS7i7GQDlgwIw88jIzOpNPnSkAGtFRoSHXQCF6v+jRG/v3r0jaPA6ulSsoWCWoX75GsXB3fxw5HYjTtJdd3ZZOqELOTUd1Q7sDR9bQmj7nq1gRZjijFhNdQWCKOC1Yl1Xi9spxdOiS64RGVId19bnVTBM2gFnT8V8FcRfgj27nPfrdhmDVdIvSE2t9QNEPTg0wOewD43k6sGlEW/z2pv4O4sZJtRIFDrQ3+43VIXJImZ+GCqGXxsntnVl3ZLbeMWoEIKTf1CW08y6rj1V+elyYVs3cq6Ht/04hxWXs+aKR1VmpQK9qkclSMWy0lPqQ3+Ow3whW/HGepV0trxyPl08ZVsMDeJEbx59FYKTYkpePsBr8o7a3n9mDE4bnDY4bXDa4LTBaT/LGJw2OG1w2uC0wWmD036GMfhs8Nngs8Fng8/ePp9938F0MJRt22je+WVZuFwuuDfea0a2Bm+660sVYGCJyrxMqCgfP/+d6/MzuWz88v4XRIM3UssZmWdUA4lMthqNIN6YbUup1jBciCF2cFL1RnohhJ4yG2P0mppdwTKr3bnXKAg7X2pq14ki0g2vrfU/O8zc2ycCOTsYS3HAzLnUlNPMp0+feHp6Ypp8fvf71kF7T1s0rHjNRgM0RH9u9XCnlLqBhVCjOTQgohX8eTDA5p38s0OQWpuyHACUSr7W9SJt+zzMDMyvVxViCKz3jev1CmKoBqZJKjDTQbiUQlCY58BpmrEglLzx+9//YH76ldMSWc6Xapg+GamGWyX/9fwPwPP9PXNZt+gNkXqQyYVt27jf7/zTP/0TJUFKmWWZeH4uFMs0kHsptzavfjYqBiVT2YW9JmttXumLeiCi18b9fvfokxhRpTaY9BRF843p96aU2NJKFNjWDQx+/fXXWpPX98bJrT5cQyeTPzuO8zzqgFUCmqaJnL1mrpRHAGpp4q3G8bY5qJ7PZwf+vKeJNk//y3cKPOh3A86cUrcnUT8EBHU8CMDztvH582c+fPjQMa3jRCX8llra0ofbPEII1fZ42LOjHNpnrYZqS8c9rsF/dz0I7FEFqjMApWRijCzzTBElbR4hYJW0lmWhmOtkzpl5igRCrdM7xo/G4LQ/NwanDU4bnDY4bXDaf/0xOO3PjcFpg9MGpw1OG5z2X3sMPvtzY/DZ4LPBZ4PP/ivz2XcdTMd6nC11T9XrjrZJN0FRFQlVxGqdQxEngroVy2nhPe+5rzeu18886yfiNBPjRMvgKuApaHSnqgsse4rmMk2I7B71aZqJql3ZimWs1kL0VEohiAs/pa3XKYwxEoKyFXtQqFI9n98eX3tU3fB3hcNqSiJCqWS4bVsF+Qnw5oJNEfwzX583Gawe1aoQua79SAohBFramytZ2WdnOyBTZfU9AGxKamY9NdLXVPfEWqpqBWpVB584Mc0zrvjesEyDsG2ZlFN/tgNpYF0Ly+lU9zv6bhevGbuVxP35mbythBhZTmfmeanzrl+VcdpKpPpim7F30vnesJ32RPAoAalQJ8K9Nv07nU4e4cGBRPHohXVdX32uQPXSH4hAQJuNUCMCzA823mixzUUOQLDrYtPX417RrxFKdi9y04ti5iQd6em40zS5N1+l1tB0ghCrQvhfGA9r5BGIe2qwqhPd4bPjaIeWUMmnxoTUw8sxlXTX5xZBYUglauue+mVZeqNC132vF6oilJwIIXA6nWpKqvV9CJ38HAsaCVmVXSkVS+wxmsCDRwzY19agwwN9CqqCqn+es8/12HSwDRHp9u2NNvf977KoOp5TohSfN3UNrVnlGN8fg9NeG4PTBqcNThucxuC0NzgGp702BqcNThucNjiNwWlvbAw+e20MPht8Nvhs8Blvis9+mMHUHtI8a81Lvy+seuGOHbuqIooqpWQwf1YIkeV0QtRYb59J20qrvRlCxBtOmecEWk0hxBVWzNMiYwiYSE3P8jSuoyEeFdcF6YbhKWnSN7B5kLeS2o497OHrw5+F7QC/A73P3Q1xv9zB21M8z+dzjRigeoK1G/YRbBtpitaNzamDfSPbIzl9Ncc6AZE6uyPwH/b21XuPKFgJxACt65Na2XJbN0RbPVMXSUvRXCuh+txLBTlPRdRKUsWMoJWIrHC7XlnXO5a9yVqI0RufWUt9PAi1fpcufzopvlzXa8B/lIfLcV93ybkeXryZ3Wm+MEXXG4QKDunhXTvoeSpmefF8Pexxi/ToUz8YvbzQ3Z0QHj3q7RBl5rKjgxB1z1raqnXbO4L8cbTDwJ8d7T2vgfzxuxmHeb5Ovs1mm/deKpk34D2S3nG+wkFNcSxY1/WrfbV2OKjg2Ejl+Mymp+2zFjXQogH8WY9E1feiKv5O0D6jurr9YCS77hZrevt4wGqH1zZHq9c23HUbqjhR728RIeW4/2N8dwxOezkGpw1OG5z2Uq6D0wanvZUxOO3lGJw2OG1w2ku5Dk4bnPYWxuCzl2Pw2eCzwWcv5Tr47L8+n/0wg2nbasoh8OHDB5Zl6WlRZu4lX5aF1uitWEvN030DD4tbppmowNOF59uVbb2zrSv/9I//DcPBXUJAMU/PKpl1W3u24jTPvtDqdSvi3mSKcTqdQINHMxwUzL1wPl+vbZgJtfbpuq5onXvQ+JVy+TMqcHJU8CPQg1GwUhvVVSMrpXQP3zTNxOjzcpAPTJN7DL2easBrze7RBaJKQfjy5VoJQHh6ejrMUfrPUqMnrBwM/TBeA/tvjWYopWRKbZzXxVAV975uaJwqNxgiLbIk8OXzHaue7efnDSrIQCClQikb23rj3dPJQVDh3/7Hv/J0OXM+nTHxGrnFIBeYJ/WfcyZqpDMLX5P7nxkdiKsBNbmXnBEVLqcL0xT53/63/43pHxaWeeF6vdFSOlNKxBj7Hrgnvz1b9oNEjCzz0r37BihOSlrrWb4cO2nsnuSnpyes4I3psBq5AyUnjzyxg975U7z5ogpk61EGWvXpsNPfldNretQ+f+3v7ecGilpd6Ecgap7+o9f8CKpSD5NHcvuK5OrMVb2Z53a/8fnzZ56/fCaE0A9UhVDty/jy5Uuf1/V6ZVkWVLWn2h/f1fbWbE/RP52mw6HseDCj7/fLPfR05F0eLofUD3jF8gHcpT9vnmdvqlnT3D3qySOHnJg8KsTrpjoWHw/fY3x7DE4bnDY4bXDaa5+/9vfBaY97ODjtv94YnDY4bXDa4LTXPn/t74PTHvdwcNp/rTH4bPDZ4LPBZ699/trfB5897uF/JT777l//+OOPnsp1uVx6vc7jxooIhyRJb35nghWhHNNPAbGqlAjz6UyYZq7XK58+feL333/ndLmwnM44dGT/ssz1+co0RU6nE/f73Y3khZU0cEKlv+8I8BioCPMy1xqIgSlOTNNEiAEN2jfr6OGjrc282Zd0b2Hpa/acv/azezetFNb7HYTumf706e/epHCamaYIuIcXlFJSJUZl3bzuqwESIiFMnp5aN/yw6uppdPB4bexE8LVXtq11v1aRFg1x+NxwD7sGB+BulBIqwRdvrChGSl6nc1tXSs48vXtiS5lcjGm5dFnEsGA5kUohpY0PHz70NEKJE2Ga0WkmxoVcfG6x6p/lAmJIqJMzb+zW0g1fW38Hp+rtb/vrxr6CeVqiVuC+3W68f/+eGENP1xaFaYqEcOF+v9fnQ0qlbnxNV23yxgHNI0z87R20sCo7vpozPKYyb9vWa5l2b3Jx4nf9ETDZ6+RWAjufzuRiXWfqFLwJnijSGkya1fk8AusRoEWkA9fLg9CuD1/rV09brXvQMOR+vyMiPe0655reXOsiNwB+Gbl0tM1icL/d2NaVGCOXy8VrJcepHoz2e3IpBPU6ztM01QOXH8Qul0sl/NJrs7YDbCOkfZ+k65lU+3XYaKjTaqAaISj3+41pCoSgpCTkGlUkIhh7ZBCUB7nf1zvFPMU+hIDKjk1t30Uin5+/AOM/XP7sGJw2OK3t7OC0wWmD0wanvfUxOG1wWtvZwWmD0wanDU57y2Pw2eCztrODzwafDT57u3z23b+2jVHVXsOzbVpTQle8uvniwOreZbBj+UNrUFg94BqIGghbQkNk3Va4KYZxPl9QMU9dLS6w7gktBVO/rpQCgZ4K6N45qmE14N1TBJtitaFVkVS0e4lFOmQfDLD91epaDwRQQf6lsRZzhY3TnkLZDGff3F2eu5G4suWc3asaXNHDoZ5mm1O792EI+wJeef73wN7/1gBxvxcTkG41D55pcMUU9XtLya78wWtVblsi1/quoRqYYIQqj5w2tqPRx8h8vtQmg15j1+QlYFci2m35YR1Hkn4ApRck1p7nTQAd9E2VfGj4VkpBcGOfp4iZYjVN+utDj7YXOXi2v9d5Oji5oYp3kuz6cJx/A5fHv+2roMqgE4oVjN0mVZXb7Qbw4JF/VIqq1SKH+rev60UbxwiBo6xfu6/rWz1sHa+1FzrU52eP1xyB78FGrKVx5gq4Pqd5WfpBolizy0q2xZAgX8njSFKvrfcl2TkvHn4/HA7bPU0/WgPPluKfcyFvpe9lKR5BICKklB/s6fj8l/PVqjselVO+wrUxvj0Gpw1OG5w2OK2NwWmD0976GJw2OG1w2uC0NganDU57y2Pw2eCzwWeDz9oYfPZ2+ey7DqbuxdI99fO4AX13gNbYyuuSJgwoBq5OVoGi/oyBRgQhTjPzcmLbNp6vz9zXO6fZwU3MKOIpqPM8VzLZNzulxDJ5ytnR02ZVWC44VwqtSt3TZ+uzpmlC8NQ/X9tjypkLnPp3B/mjQpjtHsTjZ01Jmre4DdVjE7EdwBqolOJebBGXTavLqHXeTQmaElfhP5LSi+15JIjDn+zlWuyBIxqRo4LYLktydt3Q4BEjpaAiQPGIiKhARDC+fL4iGgjTRAgTWAasHgiMtK3crs9uIEAIkfPTO5JMINqB0ToBV/2pSZcemBIq0exreg1Aj0DihNNq4x5PJPueNI+2qd/jjdcy6/o1APieai3ta7SOauIX9MNGKZXAp7nbx0uC2rat60nfh0pIbd3t4OJ6rVBTlDV4SvCnP/6GhkhoXvUmOvEDDhW0unIfdOalTrTvDVRerv21+3yPvfbtkVhL2RsIthTMoz025T0C+5HAugaUPTrAahTQPO1RB6UUUJeZR1jsxNme9/JAcPxS1R450fbI37/boGirudr2rpDz/p721dJxc/bUUj9YSD8UgUeOePqscL/falRUoHUhNRcwRktVFnLesGJo1PEfLn9yDE4bnDY4bXBa+z44bXDaWx+D0wanDU4bnNa+D04bnPaWx+CzwWeDzwafte+Dz94un33XwfTu3TvWde3pqm2zHwTzCoj0FLz+78ETjyAa0RjI24aGmfPliV/niecvn7jfnvn49//g6XxCgNvzjV9++YDGyPV24/2vv9XaklbT2LxGYEtxy1bTZeu8VAQUUk59g1NKrOsKAmGaEeSgWNUaDgrmazIgd3Buq+G4UnNDvF2vlC2DWa+7GkLg/fv3D17Y/R56ity2eW3D8/nMvJxYzme/rhRMdo+nG9vu1fX6qgI8NmY7GvC3xlHZjT3d0rnRlduMXrsxpcz5dEbD3GVllik5cb194fPHTwBMMfLu3TtCmNA4IzEguSAVV/O2kdaV7X6rKa6FXOD5+cp0mQ9A55EeDpaeBti2qhRvghc09rW8diDZPbwFqw3WEMipNj3EWNeVIK2GL9xuNywLy7zw9PREqPUne+pnfY/XB662YXXOgJin+Hagh3ogES/yemgQuO/pTirt85SS6yj+fofmljZNJzCPXAmowrquLOfYdauUggYD81RW0Rqlcdj3b+rEK/r6aBtfk2zzdh8/DyHsUTKye7/74eUA7semeA10t23z+yv5T/NC0BqNsm1czqcXkQl7PeLT+Uw8kEoD8/P5/AD6Hz9+ZJ5n5nnuZOQRMQ7EbT3H9FEwckkPJNYOEoixbneKFa7PV5C5HyaP+77f6weMIC+h2dcslWhK1Yt5mXu67xg/HoPTBqcNThucNjhtcNrPMganDU4bnDY4bXDa4LSfYQw+G3w2+Gzw2eCzt89n33UwHQ0lxtiVqW3SUagNVHyBrugq4jUbAa8TeUAdq83UzNM6VQOn04kgxu35M1/yhuAew8v8jmLGdl8RKzw/f+G+rizTQlBFEYoVSsreqE3EvZdNQYvXZw2Iex5bo75c0AhynJbxldJjblj7r+b/2g7w/ozaHHC9U1ImGqhOVU7UdLXcoxxy3u9PyUlEQ+R0UublRJwmzOQhXc2b6pWqLC3yoRHUjutH433Yo75O++ozMeurE79x/5sqpab85ZxZ5snnAqhjphvVfWWaIsqeMnk6KWFyr7crv4A5+a7ryv1+53J5QkMkm3GOs4NQXUPshrsfG6xuVjEnpuM6Xq6r/e4qUb3C9XmlFGJ0WVpp6YK+t7fbnWU6E6MDhIaA2u5dpq4npQ3wOral1h6VKpRcSjXQdgDxtaSUkGl+dW/meX6YuyB17QaltCCFSla7npeSgUDQQIyR07KgcWJLhVzMbVDFU7urpgi2V/F8IbOXh5IjIB4/P0aqvLxeEYq4rFQD3hywAXlLzWx6XDrx7AcvOgh6PVplibEeMMUPYP1AFbpnv1QebZgV9PHEc7/fyTUi5riOdV09KmGeazRHoBTXU/9sJ4kKULQXWsneENEMjYEYA1vaekSCqjeNfCR9//10OiHi0QWlFEpKjlmT+OG47pnU9zcSaw1CW3r1GN8fg9Pah4PTBqcNThucNjjtrY/Bae3DwWmD0wanDU4bnPaWx+Cz9uHgs8Fng88Gn71dPns9h/EwDNcjbSlT1oCtpqbWtKk2YWkbLLsqCSBWJ9rkbUIpvsm+0a7gp/MJMG7XG9frlZySp62KG5SVzO36zPPzF2IMBBVUQA0sZ2/EZoa4Xvc+eI18xJqxNc9vV8vDiunroYKfl4t0g5MmFOxAKAYUct7cK54TLa1MVbu3z2z3nDYvYi6ZUgxEiXFinhfiPKPB/X8PURsiu+z7z3V1Ip1sfSXWwf74vRvj4btYRdGDPF4anF/qRqeqiIJIQaUCaSXPeZqYp4kooXqxzSM6yi7DUkonDffSnut6hRAnRBV7oNeuNnWabgAd9eBVEHp1WPPal6qTe6ozIn2PUsqIuO53nT7IvaUMOkk/ApzXcfWanGbWl069x3XvQM5VjwSPwJAqa6i1fjF6cWE7bpT1Pd3Tbv1vU90HEa/v2mrStvu+ftaB2B/07AVMNBk3e39FxDuV1GfVpoCuZoJqdCRG/TAj2tT3xatqhFDOTuqqTLOvS2U/rHhkQz30VJ0oVVdzStVu9qdv28a6bg9ra4DZonTaZyBsW3o4O6q2Gq8GlrHi2GPF90ARYo1uKdlTWFXDwwKP6dUtFb+bZMVFM9mxrMt6j5QJGhCUkn+g82P0MThtcNrgtMFpL+U3OG1w2lsdg9MGpw1OG5z2Un6D0wanvcUx+Gzw2eCzwWcv5Tf47G3x2XczmJAJyBiQM4hOvhkiaJh3EOjAXyfFnkanQv+MPlG31mCKi8S9ioqg08Kvf/krf/uPf2e93ykpEVQoaohk7uszZhtBM9iKFCFIZI6RL7dUG2pNiClqzYPmRpK2lfvzF/75n/+ZlBKpWK032DyaPrdOUjkRYiRU77IU9UeR902WgmCouWf8+dNHYoA4TSxxJsY9CiCECRFPaVvX1VMh64ZpWJjnExqUz8/PTKbUToIYATuajja0M3IHN/dMSveklk5wher9Fl4YqBAcUXaAlODvOShkA2RwQ/j8+TPvf/mABMMoWCps9xuWM7+9e8/9enXDQ/nt/a9M84koE2kt7t0thdv1mdPpRM4b9+3OdV15en9muTyRCwQCLeW5NOVuPm/RPj2NL9Idm+rKI2TsKb1gttezPM0LxWoUh0RKgbQl1jXx22+/MU0RKIQwu36YEsOJ0wJTXNyTOylbrrVc44xMcSddM8cdqXUzxYFOJBAaeJshJSNtDSUT2FMYVdzf38ikFH8XYh4hAKBwW+8ghaDwt3//dy6X9yw6gwTXdXN6TGXbowlUyDm4TGWHkqMcvbYv5Nx03Qjm5K3VcNQvJki3cMQgHQ6Jn5+vmITaVHGimB/gQnQis5LBIGrAaGC8otFrnE7B7cEbWW6UbcXNNFMKrFtmloDWGqXX65Xb7cbnTx95f/nvhHpwUg3EmDBLQCCEiVIKt9udp6cPHjEQZtZ1xQi11OxKytkPlGYVFwShINuKbomSMpTsNYwBNUOSIVGRCvIqoWYzGyKxkljifr93mYcwMS0zZkrOvreoE/+2NVsWzvNCSu2I+RrdjvHVGJw2OI3BaYPTBqcNTvtJxuC0wWkMThucNjhtcNpPMAafDT5j8Nngs8Fnb53Pflgir4P5wRP38rsPq2C0e7Sk/fPCyeV6757DoArTVD2MVgWgtclcJIjyfP3Cllbu20pOG6reJC+qg5IVyNkOHklpDlt/Xn1mzpnn52dKyRXY3LBaXce2ngYCqtp/9iiAXNdS3PBo3uDCPXkEAcD5dCZqQF/UJW0pdw3ozaw24lrQMAGe6lhvqHP/9gY+evjtm9e9vKaRS7v96zvtcE/pAKuqhBiYF0+lTCmRS2aJAQFyzty3vRakihJidENXQU1Z1xtmW21cV+vTAtPsNWkxuFye2Cq5fWfxXU5HGX9vzY3kjvsrIihagRRSrUlbSuFyuZDWjfv9ztPTU9+zaZr4+PEjIYSeVhpjwKpFttTNQk3tVkVCW0s1CPE0XKvN2cxsT/1uc31YBw/XtHt2gvOarDEqmKdtdqKo+yayExyiNcX4a3Bv736pX3Kw5YeIDfP1fGXq4ocPn2dLJ420OqmHDNf6DH+uA6w/MwRPu23p2naYn6r22qqllIf0+faZiHA+n2sqp4Ok1x3O3eYf5Ugn5Wmaui57E7+XcvJFqqgTZYBJA3E61VT0GnUTSt1TP/V6uqvSat/63u7vDyHUoJFWEsDTkLOVvrZ2hDOT79vJGA9jcNrgtMFpg9PaZ4PTBqe99TE4bXDa4LTBae2zwWmD097yGHw2+Gzw2eCz9tngs7fLZ3/KwfS9n2mCtx2kpaeQ9f1+ZVj3iDYvr4mBuccyxIiKMMdIunu9zNt6J2d3naloVVLFzFNekaZAjaB2EUhVopQSZoVipddp7YpTlbJqk4uylA4mYtUIpUBNCTSMnBLbtpJzcoIKkaABjL6Jrug1dS4XVBMhREKIVZGDk9UD6fwY5F8zypdyPmLGnyGEetcOOB24vZZsDJGW6mmlYEW6zNK21esVVAn0PyGC7x+Z6TRxe/aIAw0BqnddJXxz3S/X+NrvzTAf1nJYc/tJ9ZCeeriuNUtrhr7d76T8+Ew34tyvFZEawRHcmF957ysC7qB1PGi0NbT792e8Dr77d093VDFKLkzTtBOZqOtjRVazPYlZDs/2xz3qyy5b6jHukYRek/fhLrchK364wqqJ7ocM8FrI5Ow1iitAU6/xurIHcjusN9T1SAXmo020PSql1MZ5hVL8upS8oaUe5NHnUsmxEWR71nE/+nvaPlZdb4eGUPW5pSnbLugHbGxA32Qu9WTsa6XLCnZSN9pBVBC1fkAu9m2sGGMfg9MGpw1OG5zm7x2cNjjt7Y/BaYPTBqcNTvP3Dk4bnPa2x+CzwWeDzwaf+XsHn71lPvuug6kJ73tfTUhez7RAqPUIfUu+8WSfdEobRiEE8fvVwXTbNpb5xBSV87Lwt//xb9xuNz5+/sRvf/m1Ni0L9VGCmZCygQRUmzexKSdVUAUoiDjY5+xpnhQHV0QQDbV3mdGKbpac/TqD2ICh5H19Zly/fCGlFRHhwy/v6twcqD16YG8Wdr9vVYkCl8upNh1zAinFCciNNNQUxa/HkQS+bWSvSN0ejfTVIQX36DpB0UhcDFFFgzJNgbytiCpBlG3dXIa4vLZtQzQQoqF5BsluDFLIJRHEQen3/7iSUmaZF25bJjx5VMXz85WwnJEQXp/ii0PH99b7rftVHfwa6PW6mTmjKsQ4EULwvauRBe29R6DPORNbtIQopTw2qGv1Ul+Z3Xf343v7JLIDbgMKgPP5TCmJtBbev3/vkQXThKLeLBPxeqjm60UEy0AQMI+meHy30OIDur/avp7bt8Her2v1bkPwKAs/iDV9rMC6JfJ6h1Ib4NV0U1GlHECuSs6tS3eP//l8BmoTQxFOpxO///4727bx/v17LO9EmXNmnucaHcADnrX72zMawG7bxul0+mrd1mYjnrranmmG1wYuBRE/RBYaqDf8Sweyhr2uMXgSsHQZtZTWVO1MVCFnShFKbVw6xo/H4LTBaYPTBqcNThuc9rOMwWmD0wanDU4bnDY47WcYg88Gnw0+G3w2+Ozt89n3ezDV8RLcXx9N/EdBfEPHRQhBSHlDhGpUvjSrTfjmZSYGT+eaTzPzaSLelNvzlWle0HmqemStWCYag5OF4I3gKhAbDlqq2utv5mJVOIY/oHnscl+zE1KqwAzaQJnmh/Xrr9crT09nlmWpXsvqPTQnlfblqXqe3vju3bsuzxACudS5mCtwj7IIj97u/7+MKpPXN6dfVK9zb2feNhq5teaHU4yoKr9/+sgcA6Eq6jRNTPPC6emJ+XQhpcy2Je6rG/sUoJTU0ynNjPP5hGigFON0OZNVv3lM+M+Q277kF1ER6qBj1dvcyBioezR1cMJCjxoopXC/3zEzvnz5wv1+51/+5V/YcqGYoRp2T7jWvbXy6lpaJMu3bOtIBGbfA1TX+WXx2pgl6Vck4I3dPCm3rcWAQFUDrRZQa6YCPb22Or/93bKni/7w0GCQLbFtd1LKnE5nPE21XVAq4GXSemNSJQTXoVKfHUNgrZ7/vlppjQ+pJKs9Zbh5/X///XdSbdR5Op1Yb7kfXNd17Smw7aCVUuLz589cLpdaa9UJvAF/S19+GbFQJ4RWEgtxqhEM+0GgHeDMsh8GzKOlUkpVDwMxaieJbduY5hkzYUu5Rz+ZGff7nWmaCNLqN9dDSmGM/8QYnDY4bXDa4LTBaYPTfpYxOG1w2uC0wWmD0wan/Qxj8Nngs8Fng88Gn71dPvuhg+k1xToutispu3f7tdFAXw7efd9U3Jio3sLqsXXPrJByQkNkmmbmeaEUq8JXckpeQ7SCrzfZai9oRtSU0tPlSqkbrrh3u6ZbIoLljDbvXSlo8HRZRNA+dwf5nDZSzuSUmKbIFCemulElF0r21NS0JQxDJRDmSE65Gp0gokhNtW0gUkohRG/M9TpH7tEbx71pIGC2p9Z1KRwMUvYNOIDIwSvdye/r77fbFRFPH+4gYkbUQNoSmBvHlpLLTNVlVQqpRl/EEAjBaztuacPMCBqxBr6dTL89joD3miyO1738vdie4niUWfP6tnqaB4k78JS9ZukxomCapv4zdpjHQcZSN1NEXkj0sdbtj4Czz7USVntmSt6U7nQ6YcX188uXL7z/5R+ceGpUDAe6EZrt1s9LTb0u9QADiLldmlmNsPH05ON8viV7X4/bHLQU1f0AlXPCzIHKzHU+CPWw97i//flmHuBT9zFjfPr0iWmauFwubNvW392IHDy1U4Ontr989jEltf1Nq6RKeUTPl8/sOK+e9mw4iLe4C0PQepjJ2+YN88QO9t5k43gnNf1epTiGdDnvRJ9S6jq380zFvTH+1BicNjhtcNrgtMFpg9N+ljE4bXDa4LTBaYPTBqf9DGPw2eCzwWeDzwafvW0++9MZTEeDaottAguh1v08bIzAN3Dfld6sGZUQglJyouSMFfe2tzqVKZsD/XzidFoxw8GzQFo24mn2eShIwCMJEFfI/n734pWSSWnz5wcXLMU8XRUjk1CpNQxzIcwCqhS8KaAKiBnZCtu6sq4rKW1cns5Mk3s0yUbasntHa+O55rVclhPrendAz4ZOPodmO6UUcilM6hvuhvi6R/3BAF75rCszj+AkCOgObPXGA4DUOott/6xumTh4xBg4//pL95YaxjxNfL5dKTlxPs0P7005s+VEyokYAnGKBCnkvLLebwSNzNOJZBA0EIOTiMnrdU2Paz1+/9ZoxNfWV0phqtEZxQpRai3d+nf3TMvDc9t97bNm8PM8cz6fEfHIGKlRMJ1MGzA32Us9JrT0aDuC39ek1e45juaZbuAJsG2JT58+8e7dO1TBigPgf8M97SXX1FRaGrnPTUQQxQ87SH9uX7rWtEjqNTlXsP8a3F/TRbNC6dEZ2udrZnUd1vV8mickZ2gpwboT7MPzZa9XCsbvv//Ou3fv+OWXX3rqZ7O5hlPbtjHHE2b0aJ6GaSml2kjzYDuqjgOHPfc1+DNbminVPn1hTqZOlDWxV5Q4T1gppLSiMXbwNmtECCKFGPfDsoiybfX5NfU+l9zX3eRTtxFBaz3hMf7sGJw2OG1w2uC0wWmD036WMThtcNrgtMFpg9MGp/0MY/DZ4LPBZ4PPBp+9XT77roOpe9ZUX1XCPz26MltXACzhaaFevxQrbKWQt8z5fGZLGwLEOEMJLIthJqz3K2KGAs+fPwLGPJ9ZTmdPIaQQNNKtCP9xqylnT09P3Ncr0zQTw4wEo+XiOdyX+mWE6OwhRSo4uGc054SVxDwFTsvMPE+UXLitV7ZtY91WRAIxLlwu555OZ2bM84lt2/j48SOn0wXVVjsxoRqIVI+6hD5/ayx6EOjLffjWvjSyePx8B5ZuVCaY5e7hPmgBzfJDEEJVsJQ2csqUDOcpEjWwtnVdLsQ4ISGw5cy6bWwpsVwipSTW7canP/6jA4YZ5GKusBpxl2uf5v/yaBEv8orRNimYGdu6UmoNzhgjzQN+BITb7cayLP33GL3h4O124+npCROtYCkPMvd3OniWUlyzzBenzWX9vzAMPMoFmKaJp6cnRIScPSX6H//xH3sqcAiR1OYBnfD3w8DuFT9OKOftEFXhYAVCCPGBhBpwH3Ws/T3X6J32vHW9AsI0zSzL7EBpUJLX05U2kQLNl26H51oFesx6LdScPV1cVWu94Tv/+q//yl//+lfOZ7c/y0JKuQP98bD6AKAij839qo0sy3IgqHq9KqggRIrnxtPgtqXtxxgpyQkypY3pPJOb3lze0ZpwNgJxMqqp87YfdlJKrOvKb7/91gRcdcgJhReNI8d4fQxOG5w2OG1wmo/BaYPT3v4YnDY4bXDa4DQfg9MGp73tMfhs8Nngs8FnPgafvWU++66DqS2+TeI4jp633YBet8zmI3OFL1jJXo+0NjZr+9iUz2z3MIooEpR5CQSd2G4r52UiBOV2u/Lly2e2nN0rH9UxwvxdmOxTMhd6uFxY1zsq6mAEaI1uUKm+PNtX0/+ON5JLaWO938jZPZAg3O9XUnIgUVXOpzOqEQ1Trxt6BBwR6d7Lo0dchMO1bdpOcK/x6veiOx5+lhf3HOpdNtm8jjilfm5AccIST43E3CNeUmYT17sYA4ivWYN7WNf1hqiynBZXypruWEphnidEHIym6eTRHXjjsJbS9xqBHdf9tQ7u42isxzqnba1msG4r4DVnwwtvbIsgaPce53Hct3VdCdNca7l6hIy8mHO/p22JCtLs+TD3nRzoc/cID5/wDj47SIWgXC4XALZ15Xa7Ae41zyUT44yaUKq329+zn4PaFrdE2uO0LfuhJ6XMPM20q17KQQ/g6ProNrVud6wYIl7rsx16ml342mrK8Au5NtnYgx4/ynRZFkIIPYWzPbu9p8lBiV12R5tsfz9GLLQDwDE9+aVu+B5qBXvXdRPxhnqHfWrrkGrDThIZJ86tpuF7/VRBKQVyKlDTvC17FFTOuVuoVJC3FnmA8CpAjPHVGJw2OG1w2uC0wWmD036WMThtcNrgtMFpg9MGp/0MY/DZ4LPBZ4PPBp+9fT77UxlMItI9hUdFPF5nyMs9qJOq36n6VIxWz3T37tqDoHPOaPfEep3AEIWoEctGjBPLPJHSxvO6YneY14V5OSHSQKx90TdOVQkx8OX5E9M0+wYcQFhfLEH6vJ2gUt7Y0sq2rb0ZGEBKucpKCSEyz5PXUdWpr8m6Mvv3aZr6uuVo7H3DmiHhMziI/CWAvAS8h/15ASTdsA7PsfqiIxC9NhpIprShiBOiGdt6R0UIMWJitemhR06kLRHmmSlOCLn6sv3d8zxDTe9sjfua8vKDubS1vJTFa6N5flWEoIqV3Pd12zb3SMte5/L4/FJTBI960p7Zfl7XlVM9NDSgP879aIht/+sfHgmXR6B7DegfAEn22rctAmJLG/f13ufoIFvnXjjobdNuKsjDcdIv54EZ0zRhQK5z8I93cjjuRSkekdOa1bXmcK3BnR8e9/Xp4b0O7j6vahz1Oz1Fs40G2tu29Z9bKnGTTyle17jNa5omB+j6igb0D3Z0WN/LQ+0jMVf7OdqSI8bDtQ1fck5Vfh5loeq2knNBcLwrxWHcS9hab7b31aGmqdX475Y/PQanDU47jsFpg9MGpw1Oe8tjcNrgtOMYnDY4bXDa4LS3OgafDT47jsFng88Gn71NPvtT+bpHoDoKr301sP72/dDd8472mGViVEIQrGQ+f/5ELu6d37ZEDJEYJkoGJXgtUlU+f/nkDe7mib/+4z9xPp/JOfFv//avpLICCSEjHeiN5rXLJSHqnl8zqymtrbalVGUFkQIkkIyRKLaR88rz82eu1y/k4mm219szf//73zGDy+Udv/zyG+/ffyCEiaPRFLx+aiqZLSUK5umNVjyioqQHg7LSyPN7Ut335iUIPfzthfFa26/jPUdQefFGTwt2+J2i14Pdtq2SVWCeIp8+fUQDnE4zRjU+YL3fd8W3zLYlpBhTiLx7944PH37h6f17Tk8X5mUBEbJlJIQfau7RgL53TSmFbdu6Ic9T8MMARrHMllZiDEzzDgRN17dtY9v8QNJApL2zNXaLMXqKdCPyl0T7co/q/wJf79fLeTtQF7BcbeZI2P7doxw8lZJKXPfbDZAOdvCoB392HGVxOp0OoAfNrtqXN+ErqMK63vj8+RN/+4//wMxYlpnz5YSq9prBKeVOola8BnBbXynebO41OVJXo4f15Jx79ESTc0qJbcukVEip1JLH3qTvCPKwf94AoAC5Ex2P8yqlH1IbkdKfZhgFk4L/Lz/o6RQit+sz9/uNaZpRCaRUuN3ubFshJ6Nm9dICt0LwtagG5nkmp1QJ0q/Z9e1HSDHGcQxOG5w2OG1w2uC0wWk/yxicNjhtcNrgtMFpg9N+hjH4bPDZ4LPBZ4PP3i6ffTeDqYnSDr8L3nhOZE8tE1+JL7N6ybR6FrVKy/CZidQ0uwLTFBCMvK2IJW8KhrmHWQJIIMSIiJHLyrY9k3UjR8gaKWXifP6NEO/E9crzp8+c5o3TdGKaFiwnd0aqOMChGAGvWDiBzIDX56QY3mStkEsibxuW7kQB0cTvf/xByomUvZHeaTnz9PSOZTlj5hEERqCUcJCdVEUsZMsgEJcJK5lPXz5xnmdCDKgEvC5sbXQnDgXW3akGUpXIXKYie+OvpkhHpdpJGazoTrQSEKrXFPy90vbQPY6tGZyVgorv8f1257wsxBiIotyevxCCElSJUbjeb9w3rwdpJuRSWLeNy3JGYyVSAtt693qxa8LsTjE3qsscPfpABKP0pomvEdhxNH0zdrmLmKfj4usgZ+YQUDHStkJZWdd7Tb8UIGOlerMNMNdtxZsxtiiEWGujppb2GiPTsmCr1+A1CjEIWKZkI1igu6SLz07V52nmulYp5+DXt7rdpROwAMVa48aIlQreFknrM1aMKbgtxSCEGMhpY00bmhNqGROPbhARcsouI/X9yyoPgGZWKvj5up2Aa+1PfF4xKLl400gAK4mUPUrIG+z5Hi7LQgwTKt7g0tXVo25czpVMissEcwhvzfYEB96jPrucCiUn8pZAYJ5mrMD97k0w5+nMFCuYbokbKyklUikkM19XxbENKAISAkkE0UAwPy4WMwqKhpkvX56JMXA6nUib12qmFCjZ91lcExGwnGuUCr7nIqw5M0evqUoqxOlEykZOhWk5oVJrTtfDpuB6XFJmmidimDyNtcpnioH7uoE0oh3jR2Nw2uC0wWmD0wanDU77WcbgtMFpg9MGpw1OG5z2M4zBZ4PPBp8NPht89vb57PsOpua5a2B++Lx5oPtn9btVRUXEJ2r10+ol9wvdAIPOWPXm+kRdIKF7kpuRp+ptzEyLN3EzAStCiAuzOJBt2zPbuqImnrrX5opQcktF88ZUhgOa6EtPnFUDK11ht7Ryu127YszzzDTPxGkmxsmXJaErYil1+tU7ac1TWTfIcLDoBCpuXA64TZLN5wzWmgHWeTok7bJ/6XFt3t/DjtE0UYTDOw5/FbxeIz4X3y/q/CGnhCwnFAfmUjJmmSKCBiXnRClCnOZet1HqXra6mGbu1d/WlW1z76gBpe41ooi6cbWDwp8Z1uVV5Q5ghWLNE+8S8yzmQinJ55uzpzfXp5gdJGPV4x8C3mDxsR6qiHiKa/B6mC2dWTVQiiFVh6Q1DmzyP8jUv+TxJNX2oxFEOyRZT+Kt+6j9DEAF6WMasOENDHMp/fXS9/9Rcsd30969L/bhGrHHC5uutANHizQQ8XqoMU6oRrz5YEtPPWALbg+G+bOtksDBDvophBqp0aZe9p0XEawY9/ud+/3OqQKnX+z2X8oun2bLIkIxb+s3hUDBCFUX9/U72Xna7a4Dfb2WfR/bBrX9sGrP9fNccp8rKBTXB0F7ar6/K1dMMHJtOtrTrPVh12gHA15gwBivj8Fpg9MGpw1OO14zOG3Xgb7ewWlvZgxOG5w2OG1w2vGawWm7DvT1Dk57E2Pw2eCzwWeDz47XDD7bdaCv9w3w2Q97MPU9luPvxoNm1AsEqsfYyQHozaeq5roBFK+nGeMT25a5Xq+AoaIE3ZtkeWM+MHWgNIN37z4wxamvVjUQgjDNwt8/3tlSJm1XptmbcLXNzDn1mpEOyG7w0pTshfhEvJHZ7fbM9X7ldruxnM4s5xMfPnxAiJRirNtKDDNa62kaLX3Q0CAd6Js828+qWmtwuhxLqaBwrMcpO9wbezopYqjuDeTkxTu+3rumQHTycb79tnKYube1KfaxZuu6rqgq27aSag3KbdtAhCUEbz4XAvOy+F4KUEH+fruzbWttAheREJAqN1FPr8z2OI+vieuF6vmVHHUyW6bk7EBPY1+/rqUaIhBC3OVTHp8h4imaISjX65Vpmmpki/S/O6BpBbtKwYd93iNu2hypANlXuP/bbOSwttLmDz1yJGioYLg3CVRVtpQf9Msq0XWaF9c305boWfXmhWzb+lqDuYcolfp7zrl/lVJ6reR5nuuajZSVGOf9YNhl82L/xA8uLaLisKl9Vx/2vx147FDntdrr9Xrler3yz//8f+oE5CSQETOCqNd0rYTR1oK0+sSPZNWo38xJU3PZn2l11v0QSb/vpc6aud45aAdiiOSSQZQYQ+UT3ympxJZz5n6/H2q7KiG26CHpNWnH+PNjcNrgtMFpg9MGpw1O+1nG4LTBaYPTBqcNThuc9jOMwWeDzwafDT4bfPb2+ey7DqaaX0erHWnVY+Yb0xSwbkVPldoBXs12b99hn5qHdttS9/y9e/eOqXrmVSNWWo3Cgpl7srdtY57nmu6niIT9iaL88ssvXL9cuT3f+eOPj7x//55lWTCDZZkdRLaVIBDEECkV/6zOGG+ot62s9zuUxP1+I5fMX//6T57CFiJYoNT1qoYXirsbbtvsEANBo6f+qXtJT6eTR0fUu1JKxLh043p84u4lBidA10159fr9uoMRVyVCqjf7BbG192zbXpdVNfSalcuy4J7gRM6Z8/nkSg28f/+eL1++eJNE9agCKgDWiXh0gCqn04lpinVfjThNTMu+bqvXvZzfEThfWSnNQDx6xaNASk5u3Ajusq0Nz1LuqZN++04VWCVXcQA6nc6EoPztb3/jfD77+yuwBA3EGD0FMmeKwXIggl2qbTTAfFyD4QcjYwcYDeo2UEH02AhQ1N3J1+vVmxiqN/xL2fcG2+uyFvsaDLQCvXbV2MHsCOovZd/IUShcr7e+MhXhdD4RYiTGyO+//46IcD6f+jN8HbYf4g4pqyEEct58vrk2rmwHIX1x+Dnsk1ZiASel47xTSp0ADUj3OxpqU8LsUUmlgr3XWQ41aqTgZ9MK1A3wv3HQsGKsae3vilE7+TfZtdrBp9OJkhzcNQTSvTDNkWk+kVKpVu51oo1CtkTKicvlCY3Ro5BSQWMAvCmf/zzGnx+D0wanDU4bnDY4bXDazzIGpw1OG5w2OG1w2uC0n2EMPht8Nvhs8Nngs7fOZz9wMPl46RF7ZbndQyjiqt0UF9sxxh9miFg1kK0CCxXkI61BmGH7cxFyMVLKO2GYv8evD4hCscI0zZRFuN2eud1vGHA5n1mWUwXITAzuvYtRa5ra7qUvJXewXyY35IAb7zTNHdhLcSVVUS+HWAy0dKUwc09/EPfeqoaunC1tdV03phgI0ZXUjU+PYt1h4qBwXibT53sEyJf7s3u8+wf4Jh3248VoXvH2HPcWJ0L1YudqiE2RUyXg+/1OzoU4zYQQ+166oRUUQauRifoESs3pLcVYYnw8DfxgTS/Ujwb0XWdqiqqvyT3CWj20ZhBiINa0Us+klX59Fzh7w7pt22je53ZdM2hVJeXk4BUfgaEDu/SJ9s+PX+2NhwsOOrnXu23PLaWwrivn83uQwrbdd5JQZQ4RrVEaUmXsRFKjH6RF/niq8NHz3fT3teiUUgopb+TiDQhjjB7ZoILg5GQFNCrTNGNdn4VSNmKN3Mj5sKb6bN8eqVE11b4qUD/ohLUD5qNOmBVCCA8RAb7/wW1QPN2z5IyoomLc15WgAQ3a7dMPri7jY2RGSyfte1APoUKTV6E590vxVGkOsrvf75xPZ1o6c5wmkD2SwWvj1vs6du46Bp7iHj2P26/7BgGN8f0xOG1w2uC0wWmD0wan/SxjcNrgtMFpg9MGpw1O+xnG4LPBZ4PPBp8NPnu7fKY/uuA1w3oE+7aQ1vDJaLttpX5WN6Z57AFCUNZ1JeeEoN4YS49AXz1rZmBS01szJdf0VXYjat5I8Fqcp9MZA273O9fblVIKy+JEUooD/RQDMQaQpjQO8g3MUt4QdU9li0YIYSKo1z71DW71HQu9fig8AJkrsG/WXuPV575tq6f4dQ9zNbKvNoHq3W5KdgSRHwzj4Xrbt+zVvW5KdQTylLYaIXCsCep7knPmdrtxu92432+klJimR6BvYKPqHtRYjXGapv7cGMKhpugjIH53eU1uVvWrbY4djVQOOOs6E2pUyJE0aXVL61cDPxHZ01uPwCw7ERQ77Lccwec7Am9/byBXP3CQqN+rPr5sqlhK8eiTKRJCrISlXc+WZel7AB6V04msgb3uoPUtO29fbQ5b2rjdVjAhhollPrEsp34AytkjcoJGpjjTaqBKBfB2MPPpVF0+zGGfx54G/PUBwA9WKSWOhJSSR1y0dNlSijdRbDot6rZWG2t6ZNFWiU8RM/yKg05UkWmNbHiQ00FH9nmVPYW3prU2+V2v10pevl9hmgBh21Jt6lcjpzrYe4SEVB1DIPUIiJa+/3gYGOPHY3Da4LTBaYPTBqcNTvtZxuC0wWmD0wanDU4bnPYzjMFng88Gnw0+G3z2tvnsuxlMqm4kZtVYahQAlBcPdiNTqZ7yYgfdLr44kQY3bHnj0+e/U0phihO//vprt89SIMbdA346z2gREKXkwrremOcz0+TK1OsoGgSdUIEQhL/89Z/49PEj633jj49/58P7d1jJnqqqguVEWu+YBopl94jfV1LaEDzSwUoBVQem0wkIeEO2cHi3kNK9K7R72p3wRKTXY22KXHKilKqQy8IUj7Vfa2qhuHJLVbqgoRJfS48VPFX3a8/7A2GYvbQRByqqoNsmNU86kPNuPCXl6kHPzPPM9fMXihWWZeHTp4+Ukpmmidvt5spU1xLCAWBKBnED+I9/+zcup7OnBKbM5XJhy5m0rlWxaxq0CN8Cx5eA1/SP+ruVDKUwaWCrP5spadtoTfO8+VvA61C6V9v1R7Bs5JJ3gLOtphHvphJjZFkWNNTapCEwM0MlbE+lbqmd304lbAcE39OqxyXXmpk7mB+vDyHWtcC7d+/YtoSIVe958lRugfNpIaOYBE+VDNODvriuuX6UA9i297Tapg201nV1uatyOp96nduM0eoMG8V1QRVToYjramtyN8WZbWvrcdtxNTQ0eJRJH9J29nU92LaNP/7jfzJNE6fTiXme+fjxY5Vl6PpRrPQokGPab6ngikGpNUqpNVLhYDQH2yjFU8tzzt74jmavYT+g2i6v+/3O6bywbRulFD68/wUTJRXDJLDeNzREwjSRrUXpZCS6vkoQlumEYd6cU1z3zLz+KoAJHfjH+PEYnDY4bXDa4LTBaYPTfpYxOG1w2uC0wWmD0wan/Qxj8Nngs8Fng88Gn719PvuBg+lQf1HowlLVg5FVD+Bhf7qYupCa610caEpmXe8ODCEwTc3jGLxhnbkrV6R50L0J3zTNhCjucbaaLtnqWFpTKsEQ4rxweXrH7frMH3/87vVPg89l2zZyTpSSSJZZ7xulesinKVJKwu6edtrS8TwrzJ8N+mjMPHpkW13EUmpTrmq07qh2RYoxkreVYrUJV/WiivvDXYbyUo4VSEJAfpB8dvRw9uccdOHo4O5vFZdtqB7cz/dPhKBAIzBfvYh7P2NU4jTx5cuXniLotWp3b2zTnQYkKSVvMnYgwrb/VK+x9csfgenl+o5DMK+7a1XlxWq6qj8lp0SxjGFM0UFKKsD5YcYPC+UQjOD7ax3Md2O2/X7c27tWYxYLxHhs2vZKZEibc4sKOa6l6lzJ/jyv/zvtEQulpUN6c7ijnF2ue5O8NtxmXaYix/c9EmrTl2bfLQ25gX6cJzQENEQQrbbQjm9CMVjXxLycXL+z0ZoCunpJffURJeqhBQMVxJrXvyP9A4hZBbWcE58/f+Yvf/lLr4P6/PzMsiy9mR54ZMNtvdd0caUc1mfFa+aGKfZ93WXztX3llKDKtslkmiay7Qe14zAzrtfrQ3SOiGIVs0o72UrpEQRWzSDlDRVlipGUDdV6YK71WosVigliOy6P8eMxOG1w2uC0wWmD03wMTnv7Y3Da4LTBaYPTBqf5GJz2tsfgs8Fng88Gnw0+8/GW+ey7Dqa2eKug6+mYzSh2I7ZqTA3wBcdf/7lBl19fqnBLyYh4bdG9YZ3fUSoQVITjmOo5BUVrqpvPz4FU2v0qYP7WZVkoJXWv3hSrQh02OYuRywbAFKc9/Q3r7wwhkgoP6NuAvBlz+zqCQfsMqrIe+C5o4GZ7OmIT5hEUgQ4mJtZ/FpFeM7XtxUsl+8+Ng7G359f1+f7s9RqP72z7krNHGkyzpwOLI8r+dPN/VNW919mwGo0iqo/3NIOrIHSU5fHdfb7tkNGYqxmrVdnVVORSPMJDwmN0R3sGdjTUNoddb4/39PnIAcytkEshyNc68cOopXZdvU+Dsq33nh7bdKqvH3paaosuEaE2Rmyp4Tzc0whQXns/PMi0/ZxS6gQ/zzMhTkh4bHLZDjmOWUYupdYM9r2OB/3f5yP9gPYohka8+qBrR6g3s75XLcKjA3eT3+H3Utyrv8zzAyBacW98yZk4T50Aj7YIVX0Ocm9kmXMmBI8myoeT1Eu7bw1CQ61N60cS15dS7aKlVbeVWtXXQxnZHU/rwcEaAdo+tzF+PAanDU4bnDY4bXDa4LSfZQxOG5w2OG1w2uC0wWk/wxh8Nvhs8Nngs8Fnb5/Pvu9gUhdIQ/Jc3BOrYep2vBuEPABR8592f5wZpRjr/c663QlButcPBCs15UoMw5UrBG+QJuZpbSAsy4xZqGAedmGQybgnXE24XW9cTjOX84XL5UJOG3nLlJx5fzkDrsirpe7Nvpwu/O//+//Otnntz3lZiLF6TzFXUBQVV+pmEA2oRKQ2HYNW/7FDxcHKRIQQI+v97mmO07ST5AEgOICES9T6df+/AfsOAmBQoDQCtFr3sta+PJ1mQlDutysxhEMUSOgKD7AsS21quNBtRbUqpYN693wXT71b15U4TcRqhL5upWY2fhPke7oqh2iXqocNVgCmKRz+VhCBqAHV2OXZ1tz2yAHKDc+jJfZokn7wMU9lFuQQLUI3uvvdo2Smae7zfNytIxC/IBx8r798+UJKW33vkdQzKtMhmsdtU0S53a5eW1jgXMHL2hytoKVQkA5e7ZDWdMzwg1iLILjdbj0t9+npiTWlh3UcDzbrunbgbbI6erf3NRw93l+vv0UJtfTLXAoIe91P6Iewp6cnzmdPfU4p8e7duw787RCWc+k62nSn5Eyu68wpo44ktFRWT5v2eQbZ92WZ5x5RQgiuJ8nISI8sakNVmaaJ+3rr6bQaIzl73dzcDs11/4IKJRslJ9YtURzNyFXX8LMIOWWvEl2Vaj+KjP90+TNjcNrgtMFpg9MGpw1O+1nG4LTBaYPTBqcNThuc9jOMwWeDzwafDT4bfPb2+ey7DqaXwyMJ6IoObrR525AX3tnDXTS4K5a53W9s24pIYJ4WYvC0vtYwLOdMLrllhNVN9lTOaZqZp4XrdSWVlfPZPKVNBDPFciZtiYLw9PTE/fZM3lb+8a9/JYrx/PyZP37/Dz5+WpnXmel04um3f0ARELjfr6Rt85qMIRCqwnhtV/VUuqrcqTYJDKpky0hLszSggjHim4rsRpG3TLZCUK8p6pEKgVR2cOcgR0EwedxEr+G4pwz/r4B+I5NS9qaGZoW5RlOs6x1Xdn9XShuny0RKmfv9xuVyYts2tnXl6emJy+WJeVm4bwnVWFMaFctuXFaKN0EzEBVifOLTp89MBpfpRCmFYHieHsarqgT9WWa+9wWvv3v0+FIBsZRMsUzeWhryxLycaFCjSK9BCXiESi5V1yaWZWLbbrUpZK6yqoYf1L3qNdrkcn4ipczt+U4IsdYQvTLNp4pnLxdkCKXSjachHm21RRBcLpcK3i26w5gnT5VOKaEKIUpN16QSD35KE/VDUrFut3I4oLjs6HgrCGlbud3vrOvGhw8ffM9UWVNGNHRyaLbuB57Cum71YHDqf48h+kGx0a+Vep/Px0mq1v59ocINawAkHGsFWyf402nuB5NtS3z69KmDvadz+lpVFUWrfPaaqO1LkC7/kg0TBQ1AIdfGoe30IfUgGmPo5EY80SA3Z29O6Q0or/zlr785+a9XJE2IzrRU1SPPFQq5ZFLKbHljmU+ESh4tggvxaVkRP5hVtTKsH47G+M+NwWmD0wanDU4bnDY47WcZg9MGpw1OG5w2OG1w2s8wBp8NPht8Nvhs8Nnb47PvOpis4IaJG1OuaXDSwds9f8nAXVsVPOoeqtZ1mDelSmnFLANC0BlPefN6pg42tfmYgdCiB/xhGpR5Wuo9VdvIGErXCBxgtdYIlAoG8zRjthGCEmPg+vwZjcoEtfaiUKxwu98pOaOizNPsT7O6hQVEjZb6iJkDvypi1g3LmtdbwOtxHndUERSr04/ThBUHzgbmX+GbHMC+GUdd724Mbf12+Fnqz/sa2hVi9EzBoxecKjMp/hVUEWuN6MAsUSy53C15vc60EecFCQETT09U9Z+zCSJejzJtCa2e4SDBowdEsdoQTmptzaZvr42DXVSjzITJa5IqSilbVXxIOQGuT7kkVIPvlShka2KpuuYbYhSK5Z4G6vau3lSvpqJGs/05HSHVI0iabrAD65ZWT/etNWXpnnSD2tQPDBNPeDTzKAtkT0ctpQGl7bvc7oMKHLU2ME7eazKm2ecpUlDE9fTgyTfqoYHsay+G5YxizDEQwy4zk7LXJ21UaT6D44FBQ+haaCIcGNg976VU06nPaqnmtFRsrfJz8BFpdUv9Mz9AeWPPeZ4o2aMbSkk9tRe8vm1OGUSZ5gVVP0j6jik5OUiHEPaDDopqxCRgNdXWxezz0+A6YZbJ5jbgsmiHLsAKMQZCDEjw2qW5ZCwbVkolCqlHKOtEvx+23Fi12gTUw2LVKX9PTUc2qk5Bi9wa4/tjcNrgtMFpg9MGpw1O+1nG4LTBaYPTBqcNThuc9jOMwWeDzwafDT4bfPb2+ey7DqayFTRAEPea5lSwAkENK5mgEZXAalWJaXVAE+DeYIFqJJltuwHeuCzGC1gEax74QqmK715ubyhXCiiFGJQYTv531JXT8q50uMFGdcMv6+awGgIxwO2+IWqcTgu///E/OV3O7nHcCnGJWDaePz9jOTEtZ57OZ58bAuaNxFyFS0+pdY+tIBpBQgX6Kn8pmPZKlQ5FRVCJOAAZU5y4Xlc+f3EP6BGm3YoaGj/8of/yGEFgr3yvBNoB3yqgtcvaNY1IBEsGxVCEJQYnk2zMUyDlOzlviBa27UpOyfUgXMhN8TRCnD0dL2fmoKTsZLZEb8YWYuSs79BpRsIMEl2GKFaorddeKK65kQbECYXCtiUkRELwZEMrmx8uxNi2OzG2tMfiESsVkChHOpQeMdJqf3o6ohtjiEq0QC6pe6dVpRJ7wEwpJjW1EqyacPO0r+vd03ElOgm1NFsDy9tua9XLbmbcrldUpDfN81rBNXpCQ1+T9TOHUYqnTKe0sW2F2+okGEJATYg1ysUsV/BsYs2I3Sh5Y10zhInTNBMml1eu0ROqoR4SDLEGjPVwotMeYaBO2GbSU5adyBzQUtoQODQkrF568d1tpGktUidEtu3esSGEUAHTU0dTWmt0QNVtA8NTwe/JoxRO5xMkEMs1IiO4x37buFwutJRWVInTiYyQAbKn9oJjkEZFxCqp3MFqai1OPO34ez6fkaCYQir+XFUlJ+vPK01XWl3Yfn5zjLBSmxkqOwE0ey3Z14uTuB8QGONPjMFpg9MGpw1OG5w2OO1nGYPTBqcNThucNjhtcNrPMAafDT4bfDb4bPDZ2+ez7zuYSgGpe4DjzbFeo5lVj9qjWWpNN2wgnFLifr8jErjfnzETnp5+o9WxbGmTLe3MFdc3U1W53++oSE+Pbffs9SCb0laSUEhr4tPnj2CZv/zDLyzziW1duV6vnE5nwLjfnqEoJU2eYrbeuFwuzPOJECcsRKzIwSioqXcPcNxl0+ZlZl1oLi8nH7/eHq53+fratFvL/vTD/v4vjgrAR02wsm9qJ2Rnk3mOfPly4/Pnj/zyywc3Mkssy8znL1fW9c62bRT1CJAQplr/tGDbxrxceiqpWCFtG9t6Z1s35jA56MWImbGtG6pea7bXf6165dN+1F6rshF875v+lJIRjGKpLs09u55e6s+YpwUQUipIjXCQwx61n/fnSpX/sRmiyyvXOq4iSowTOeeui7fbDVAu5wvv3r3j+Xqt4rcOhm1PRLWC007cpRTu9zun06mmn3paeK4NKkMIHdyPcmm/elNArTWGW91Y62tz/ZSepplSIq3PlJJBA++eTogGWoSOObofFND6uaM9b0vJZVFTuQWXRUqJGB6jINo9Pa3dvqL09jGl5IoHDtAh+L25WK/V++XLl7ruyD/85R8I0dNX13WtMhZQoUiBYKgaqWyghgbHCorrDqU4iYlQxD3/1PfnSrQmkBHW1PQmEMLCpy93ihkhTnz6UkhZSHkh5AWhRp6IkvJe69dtRMhFiHFCQiGIQcmkUu22RpGU0hqUCiIzLeXXU2qt4tIYPxqD0wanDU4bnDY4bXDazzIGpw1OG5w2OG1w2uC0n2EMPht8Nvhs8Nngs7fPZ991MPlD2RW0741x2Kf+Uvd26o4hwH29k1Oqm+H1Gs2+3vAG3ODE4N5S/1rXlVibXB3vaYbRnl2KpyXmbIhUj6+59/iPP37HSuaXX35luU+UXEjrRtk+e1qZCqdlJuXS5+PeYlcU/31PJ5M6Vyq5HIG4K3Gba03jPWqx17r0DV6WxT8+/L2Rp2GurK+aw58c1oy9pcJVn2cn7TZx2NKGWal75d5tremC67qxbQkq0LZUPlEHPFGvQWnq1BZC4H671eeYA1Wcac3QtO1p3cud1Y6i2lnWpVhlLXRPu+9JrnIv5Fw6wIm417oU28H9O6Js+tUJm0f9POrry3mKeA3VJq+UMyHGXU7izSOhpuSWHeTaOr32bulN4xoYAz3CIaf2vH3j9sORoEE6SRxB/riWdqjatg3w1MoYJ5dlI7aahm1V/nS19sifdti73+9OkMd6yAcwO35XPRx6bLenfo3R967bWsWho6xTzlyfnxERSjG2bWNenh7WJi0F2gom1QYEtrwiYsQpEDwHm1Yj1ZJR1LqDvwGWmVFwnScbplONCgggE8naoWL2pnlENAJh8TxvtJJC8Nq94lIozR5DQOvhRm3HQBNPu6VkME9n9hq/BSMjuUWIfFunx9jH4LTBaYPTBqcNThuc9rOMwWmD0wanDU4bnDY47WcYg88Gnw0+G3w2+Ozt89mfdjB162ob0sCfWoPRGtC3y9y7td5Xv6bv1G5ILw3nGFlAUy580Q0MdgPksKG7YuecsFJY5sg0ecpXKcbHj59Ylol//OUvxEm4Pd+43+6s92dQN9D3v3wgX+8+/9aFTANYjVooieZZDi2KoYKH0MigykhaumL9OmCYUTdXfd7u9eVhtxzo3ZqtgtyRK/70MGgRHW5Iu2eZV567rndKKUzT1GUrEhwU1o2UMtMcCHECMoUCCCFERAMpV69m8BTQlDbEjBgc3KcKCDlnpikS4g7WnrK473GDwb7+g3yO+tLWpUE7yB8jUVrkQtuLDvhHWb949r53vNDXVu/1a9AXlUraFei3DY3Rq3u2d9ZDkRzuewl27V3t8LJtGzEGVL0OsNcUbXa339/SOVs0xNE+23qO4NrSc+cpEKPrYe570GT+oEhd3+3QtO9+v3M+nwmNTNql3VZ3wEY90fxx3qXakHag3/ndagTB416VnPt7wWV0hI1SCiFGVJ2UjEItkExaN1AI6nWQc9XZnMwJQcwz72uTv7ZwE09JLiXT0pSNgMpMkUIRKDJjKogoogGJs38XBQsEjSBSwV7r4TT7obBpqAiFjVIPAhqDp9mWjAiU7U4piWJblaOn+Y/x4zE4bXDa4LTBaQdFGpw2OO1Nj8Fpg9MGpw1OOyjS4LTBaW92DD4bfDb4bPDZQZEGn71RPvuug8k9l4DUWoFV0KUUtCoDCCEKdjQuz57yReTC6bQwzxP/8e//ExWtIMFXhtJ+7ooeXQGPBtG8ny3VrqUJtr87GK2cT7/w7t17zDL36zNPT0+owO12I4SJ3367IGb8j//P/8Hz9QpWCEL14HqqntX0Mm9YFvHaqwelbVr1J4fVOQLVEBO5pvHO80wMc7+2lFqDsqUmSocd/+HPv7a+vNS51igCo5NSlz3Gx49/cFoWnp6e+P333zmfT4SgXK/XakzC5fyOD7+8Y71v3O8r2QRyRg1EAiqg5jUj1/uVZZqYTk/crl+Yp4kQI1spXC4XVKcql9posYIzB8NusmukJLzUGzdOB0Y6OC7LQowTMcyEMHV9fA3k23ua/jlI+Qvb7w0YSjHmea7A63aSa5PGp6cnrs93B/qUyNtGjJEQH02tgVUjpRYRY2acTqeuAw20Q4g1ZdWvtcKD3udcyLkZu1QinXu0hZO2A/7tduu2E2NkPk2ICLnQr6Ee3hDr8m4qIyKH+bpNnU7n2lDQatSGp1+XvNHShYGajk0npZ2UBam1QX0d1mXrYN0OknuUjYgf0M7niRACn7/cCZG+L43g3JZcRwxjy5k4BSQqhRZF4NEZJp4Sq7ECey3Vmg0KgVSMLWXuW2JeTsynC8T3hNOZYMI0nXh6/4GUjfuaCDqDNgwTsvle3FNimiYkep3anFOXU1DFNNUWfECYEI9jAArzlCmWyXnFyOScIKdvmv4Y+xicNjhtcNrgtMFpg9N+ljE4bXDa4LTBaYPTBqf9DGPw2eCzwWeDzwafvX0++2EGU7HiIFi9eWbNGOiT39MCDaNQCuSc2GpNR1Wp0QDKPE+90VZ7x/HntnntHaV6s2OMnE7ebO/5+bkDfjMEf4CxnGZOTNzvV1fEUtjWGzFGphhZlhlVSOvKtt67AYPxfLt6ulsIlJKJYrhjVDq4t5S/DvYHYDgapzicPg4zQvUG57Ixh+CKt21u3BVccsl4W7kqW8GdoJ1cdoB6jSxfvLTPRQQo1gEVo0doeIRBIU4BxEh5Y168nub9fuNf//VfmeeZ0+nM6XTi+nxjS5lsxtO8dEIMcfLnW6HkRBAhbRtpW71GKm7ct9uN88Ubuql4Az1wYvVZP0rPQX73UnMAHFUlVI9xLolcHFxjjASNNI+tFfwdPEYOtPq7x+iAHrVSZXs6ner7HMA1OLiEEPz3dq854HpDyejN6Zp+Hlam9dlNf3P1jJdS+ruaPsXuEfc5xhAonRArSIr266HZEP0eqzbp0QMbqqGTlarbmqnPzgwsm0fDmCHqad+CpyCrKira1zNNUz2QaZWbPuijHtdvfqCQuuYG9jFGdl86lFz6Hh9J2MxT1++3G9u2+c/31SMilqdmZg/vFBFSzqhKjRzI9X2BYg7oEqTPoaWGp5TZChQTCLN/V2U6RcLZo2lCmNBwcaxAEIncVqGYkG3GSkAlomg90AAEPI02VpI2YNp1DwhxQpFuV0YCa3VhDaUgOlNKQjSjYUTG/ZkxOG1w2uC0wWmD0wan/SxjcNrgtMFpg9MGpw1O+xnG4LPBZ4PPBp8NPnv7fPZdB5MLraU2Sl1IF9/Ddfvn1auZ3FN5OZ0oJbNtbgwxesO1lxvYvh9B0+r/Wtqee993cGsGdbw/BEUFPt+eu6L7xmo3PrPMum5cn29oiMzLCRNIOSNpIyAE0bru6nkve41UVTnizevjG3+QWp9VxBVK2OXAwautEnGTsDaNuvbHhz9ENfBoGG2o4HUbrUrUrGpcW0cFZiseOYKR84aqsKWN+/3Guq48PT2xLAvTNPH87E3kJAQHsg7UXju15IKVfCAS63trde+0euYR2YG87WVd+1fr8ZPGo94AIahHmKRESqnri6hgJlV+gAl2SN197aDR/tZIsqW7wt448nKZ+vXdE67S96nNOagejaPPdwdEB6VjRMFDCi6tgd4+R9WwH7RE8Yge688xa8/Y7SkdIm8aEIbgJCi6p6SX4qRv+M+qVfdfyEaDeqREzpUwdOfhdh55Yd8vP5PDfr/8u1XZNxs+XtYAvskr58ztdmc5vaMhh8+lbboflHxRx10Qt24R0EDQyZtril+WilQiCIR4woqnbaMRDZPvg0ZgRkMjKSVlfy74/WKKSWgr7PPzlObSIy+OBOmiUYymCwErXhs6BKmBHhlQsIJIX9gYPxiD0wanDU4bnDY4bXDazzIGpw1OG5w2OG1w2uC0n2EMPht8Nvhs8Nngs7fNZ991MOWcu4RaszJ7oci7AjZFyV5Ps6YExhi53x3oQ5iq17GmxtUogWZIzaj79yqWWMH8qPjN8BoJhBCqAZc6h5V5nqr3UHdgFWFdM5++PPP50yf++g+/8fThRDHjy5fP/O1vf2M5XXj/4TdOuCfVLPsmKwcgAjlGMbwcjRuPowlQaqM422XUU/a0AKF6MxVpXv+qIHawpJcgBfAS5AX3WguFXFqaY96BXiukFiPltU4xs22+N58/f+R6vXI+n3n37h2Xy4V5XrwB4jSzTEvfB1BSKSxR6/M2Ss5MIRCDkjFvRFf3fJ4XRAP5FR19uZ4eOYGTQCMGkR34t23jfr9zX++8f//0AkOqYVey4/DcBjrHvWzRMbE2rvM98sZyf//73zmdnypQ7dEjZq3Gpb8npcRco18MPzSJesPJUjJz3BtItjTgNqdGHo2waJRcvEmh1HqrUg0+571+bUsj9egfX9P9fiMl99zP86mnZKeUe7QFgIpRRBArmOU+D19XBRuMGCO3243n5yvv3n+gpYeK7GTXTeElUUOXd1CFmk7rNV4f98Qvf9Tp+/3O7XZD+h76PSEEqPc0grCaoh0QLBvZMlEi3oQTB3kJIJEsEwmPHDATssxIiIQ4s5zekYBcIGUqkEfUAiazE0B9tx+Oqx6ZQ2iPUMgeCRCCR75YlbMZxFgxpqpnKZlc99LxwephbEZxe1OtDRFf2P0Yr4/BaYPTBqcNThucNjjtZxmD0wanDU4bnDY4bXDazzAGnw0+G3w2+Gzw2dvnsx9mMPmm+PegwTejGZ0cH+6AmJI3pFNVphjZto2UvE5fDBMqnqrZPLDfmmDzOLuA9jQ8VWVZloeogmYg7gVNlLx5fUAiQQNhWRCpNQYL/P2Pj6z3jRhnLu9/AREChQ9T4Hq/gxll27C0oYobQlSyGa2sozZPtSiv4FT32O/eUumKKfhnOaVuSNu2McXFvfA9uuDbm3ckPpGXjPL1bPy9tNiEOgpBImAknByjKqV7nI3bzQHin//5vzHPE2bCuiaPTJAAFagbiVLXbaVguRBVwFxhl/MFwzwNkZqGjPI1I7ID8g9Ge2cuiS2tGIUQQm96155ttoNGyY9pfccU1cfnQqu/OU0T6/0OwD/8wz/0g0ZvencYMQZyrvVYz2cHgFKJ1SpZm7FuW68hfLvdur6kWiezy8F2T3pKiVgbFJZSmKeJnEuPnpjnueuUR9Gkns4ZY+R8PjPPS5dFiBGthyxEsJz7qS1WgmpeeKmAmkpmnk4VrFuDwf3wYS1qxXZCfRlN0ACskZmZeXqsUWWrFPN1NRNqsjHzPX53udT0aeXdu3eknBAJlUzF54thORMtYMWJ6+n8Hp0nTJTrulGIgKeuZhEkngnhxBwncnHduZvXR0aVqIFU8IipYsQ5OkF13dHdfgU/vEmux1Cv89owQAN1vgXR9gRP+fe/e9SImmL1S7XJUuuR58/Zyhj7GJw2OG1w2uC0wWmD036WMThtcNrgtMFpg9MGp/0MY/DZ4LPBZ4PPBp+9XT77gYOpAakLWlU9h6stwuoGdIV0L76KdA+/10AVYphIayJOszeQe/mmFwpxjCTYval0Y1BV9uZiVoG1pft53UP3liq36zPLMmPmdUe3bcMADRGdZtzzp0SJLMuJUox1vbPdbzAZBIhBHuYnDbylzfIV6Zlf17aleVnxO0nFOtmF4J5d6jXNLuzF8/w51Gt+BPB+/1EJuhe+Psxwb6bXll0JtYamCFyvN0SF5bT0BnBQ63OGSIihKuLuoff/OYmntPU9FCCEiImnAsY4wQEgrcn1FSFKXbSIYLlU5NlXhBWs1nht6a8xTpRinnp4eKrZrmv9CYfDQvu9Sa+URPNSX68fQYTLu3eIttqW/jehTks8lbQUO+hnAyolW3G9UEWKYfXA0vS7zeV4ePGGea0xnXWBNkBve6I16qGlb6eaLr5tG1OtQewRN9T0YrDSULSm1NbUVPGTEf1k82K0hpfepO5ou9L3nKOc28/mUSvW3lHtwyooU01EVJDihzxhX6eVgop00lrXFffMu61bJVNRqZhQ97sUP8CkBMuZUoQiQiGSmRCNhLigMoEuoCckREQaMbdIAY9AAG9EKlVELarEX9WOfh7p0jDUzHoN3HYY2ve74Vs7/Fr9/94k0evheqiBX9MiF5rAx/jxGJw2OG1w2uC0r8fgtMFpb3MMThucNjhtcNrXY3Da4LS3NwafDT4bfDb47Osx+Oxt8dk38izbPGtqYJ2MiqINrPpkvFZoA1kr3pgvxr0RmYgyTXNPx2spZVUWdYH7TB/MWA7pemZdiN0sXbc6yeSceo3UaZpQVb58+YyZVQBfawquoDFCmEBjT0m7nN8RQmS9r9yuV9b7jbzdPb2T40uPpPSK7NpnNe0NqkEhUH8v2bqhTNEbn8nh2fLq7shXm/ptwPc9KtI8jf7e5n+kk3Mi541tW/s6RYTb7UaIkfPTpde7bHswxcgUJ2IIqAgFKoi5YeRi3LftMBVB4+SgK8o0n1CNoEoROhg3z/lRHwR6Cmi7xu8ojRd7jc8QvKFjCO4Zdrk3abxuDTuwyoPhiQRKgZw9xfb5y2euz18cYDVUQjOPNGmmZNqBttUl3Q9M0temqoQYexpsjLGmccdDZIMb8jRNaAheo7Ps8imlNKnRU2ujMs2xAr1HEaSUOF8uLKcFVEklIyqVeIHe5PBAMiJICA726uRsvhGIKPf7vXr1G6F9LVvj+LHUQ6LrXXYXfcUXda1UT2MumBN/CB7lcIj0KCURVFgmb755v6/c7isply47ww8OZk3HPcopJz/k5QJbgjVBYabICdMLMr1jOv1CmN6BLhSZMZ0RWVBdUJkxC2ybHyBinJnnE6m4Tnu6qnqEQfYDjxyM2Ir19PZGCA2wPfrAdbv0Q5jR0lkbYYSwH3j9Mjl8jfHDMThtcNrgtMFpg9MGp/0sY3Da4LTBaYPTBqcNTvsZxuCzwWeDzwafDT5783z2/R5M24YFQa3WEwU6FKUVkw3Em9vllMleJJA4RRTFcuZ2u3G5KPM0My0RCYZJ8fum2CecrCA17W9vUCZgGSowS/BU2Zw2tpRYt5XL0wlVyDkRgnC93rg+f+Yf//pXtnR/IBcrHkkQQqDk4g21TN0TiKASKDpxOgcu53f8/h//7sQlEM8LuQiF5skTcjFMMpiQtaUTagepUFMlDddxVYXgSmcpoSJMccLKiev1jpkyzYLG4Hd0S2kpl20zHze1KcxLj7hzipEp+50mkH1vwNCoPN8+s6WN5RQpKVNyIZWMxkiYJkKMZJoHv5DWjeuXz5gVpqAQJq9xigO+k2lhnk6k7dmJOk6Ynnh+/oKo8stvv9KArEFwOST96nGdBlYyZgVFoHnjUbDMtt253vw9uVSSWdODzAqeFpyL1/7sD6Y2RHvxZqz+VRcowuc//h21RNSIkthKcfMRJRSIdY+KKGEStGTyvZDzVoHfSTIeDF+AlBO327028HtinhePGMh+RdDAfc3k4l9FIJsTX5hmtpS43W7cnj/zy6+Xrg+fvnyu0QXKh6df0OgRBB5xASvZq2kuoYJLpu+EAMEjdQqACDF4GqcKqAS2LXM+P/HLL3P1pDup1tKjfrgxR4x63kCAvCVKASuQkhFU0ODyF3VAw6Afq7QCqCXIK/fnjzwtEdXI/Z6IcSaEyLKcyaWlqzt5F6sHnABF10q6kdUiou8hnonTB1RPSJyRaeFeMpJzjTwwVAMa68GsFD86LJFSMmZCsYCJkEzrudfQsFSMMEoycgKPdghsPaU1UlAnhGTMU6w1WP0YpjF6jdNKpqV4gz0nhFIjTPY6yq0cwBjfH4PTBqcNThucNjhtcNrPMganDU4bnDY4bXDa4LSfYQw+G3w2+Gzw2eCzt89n33UwleL1+FptvyA1vSsXQuhJj+7tqh7+5r03sxpF0NJWtdutVPBzHRIQDh5c3yARv0pEakOsCpgV1KyUXq+zfR0xLoRAzp42ez6f3ciTRxlM00SIQIh9firUNEdFRZlD4PL0BHhzL9UvMD8hKpWECsJjLdQOWuYe7jad5sc3DlBdPZ0q6k3SyuoEYS1tsXkiITePK48N4fyd9s3frf4rmNfidKtFFW/WZU6Q25ZIOTnBJvdkmkGrc3k6nSi1/mTKucqjNhpU9bRCraAqHiHR9mSa5rr/kVy8PijVy61SwQerBHoYcgRfeLkqB+Ly0Fyuy81aGucBtJ0ZXkRdyNfkSAOqplNex/Rvf/sbOa/E2RtGGk1nA2LZdbbO2yNfPJrAmz7O3khNagxHi4goiZQS9/udZVl6w8lmA30H++K1Gnubt69Jq4e5FKNQKGZsGX9mjUJo9YRFDMmuiVaf+ZXuiPSX7o0q9eC9Nm7XK6rKvCxeY9ikk+PDHnZDp6fyGh5JoOJRLcf3N89622kzQ2qEUNk21m2FklHZKBKYJk/xNisE9YaILjtvvtnmvGbBLEKcWZ5+8egBWbAwY7JHhciLA5VjjDcDPepax42GbW2HNFaMar8f5VvlI82O93rGrTYsCLkU1FrkQOjvfDXKpkV5vcCFMV4fg9MGpw1OG5w2OG1w2s8yBqcNThucNjhtcNrgtJ9hDD4bfDb4bPDZ4LO3z2c/6MG0b0B9KpinpsU4dwNp6XMi0pW1VCA+1mY8GtRL42qTPabzUcGyKelRiZoStE9FhJzdY36snSrizflKKaS6UfM8UwCptUdVvEZhe05Q39jT6cR292gE7MakCyH6xjZi8UqNLQdPHubEizU2WQquAMm8FmKcJnRzz3cDkONim/ybDF+C+1f7dJBSUyyz0olZVTC0RlasXjOzlMZfLnT1Rl/zPO8N3IKnAW7bRozR00GbkjnKoSKk4mnDpRR08v3XUD3WpWA40MfgG1zajh724M8Mf17uNSW9Zmhxb3Yp0MjnpWHwbT1sMnwAejOen58Jwfc2hECq97bU7Ze62EBgXdeejup/7JdVD7DXK20NJB/2zh7n5GtsqZv7w0R8r1LKFMsUDCQSJycluu3VI1ZNe2xE18D75TgevlT14K027uvKvCxun/UZL5/QZPsg4arapRQInp6JgckjiDVptkOdZW/kmVNiy6keAAPn6clhqWQ0TnV6QksRLyaUoqwlIDIRpzPT6R3JJrJNoAFpET+leLNLP29VuPPmkBZ2nMMgxIZpSgjycNg42uEj0NfD6EGuTUY55wOe7k1ItROsf7X6uj/C0zG+PwanDU4bnDY4bXDa4LSfZQxOG5w2OG1w2uC0wWk/wxh8Nvhs8Nngs8Fnb5fPvutgmqbJxV03oWQjF695GqM3GCu5sG0rotEbsFUPfkruJV2WBXBwaIaeS0bDXt/w5aI7mBlYaca5K3opxmSFnJRaRRFR+PzpMyLwVCMAAG8WNy98+fKFUoX57t071i2x5QKWu+GYVWGLp1yGOJFzJhfj+X7jNJ2YEUJY6Bra4giOAI+blFRv4SvqjoiQtoQGYZkmluXEAyiJe2Ed+Pf7vgXyr40OECaoBhQjWJ1dgELmy/MnSva02agTMlWPt+cfM00L07RUI3eCQAPv3/+CTtHrZfb3FSjG/fbMer+TthUpTpghOuAsy/IKpOwy+XNDXCeLkZIr/jLPxBAQE7KVw9ZYpeB254+N4wg4TkzGu3fvuN+/7GmB0/5QA08rNEC9vq3Utf7xx9+IMXY7sLIb7Hq/Ymacz2fmefa0xUou7h13e8tVTzQoqpHQG7NlUtow8+aHH//+BwiEeeIvf/1AmKduSxIUTDyKQAtI2A9W39yRr+XUyHVdV6BG7JSdlEQdYEV4IFiglgCuxBEDitdYTTX9HNlnUtqumfXGhu15vucGUrh++UIIM2FaSNudIgGTQM6e3lkskFE2PbGcnliePrDpBZgQi2RTNETMhNzOGhV34jSBZYoVtjXthIChEmkRVg+HUx7B+Sg/EfG6slWOLSKnHYTbfU3H9kiF0A/O27axLEu/5z+DB2MMThucNjhtcNrgtMFpP88YnDY4bXDa4LTBaYPTfoYx+Gzw2eCzwWeDz94+n33XwSRt1XUSQQAOTfRwxW2T0Wr0qaaEWt8Yr+fXJ942jq9BvgmqlIIVX3SkpcsJWCHEgOENyoplSnHxf/n0meW8sCyX7vWcYuR8PvPx40dSzghOYPd1ZVtvzMU9kQ8phFXYIQQulyfSlPjb3/5gu6+oBk7VaF0jzOuKSkDUU08x9zCrCIinhJbDutrit7QRCcBUPbXe+C7k7EpVDaOltUFLwZQ/CYpuKKUUglUwonhd0ZxIaSWvG1rTiLdt4/3lg6dPriuXd56am3JCY+B+vXG73bxhWTasTjEEN2wRQIV5mpiCInJhCjBPM3GaSBK5XC6Y4eSq1QDMas3Lgy701L3DsH1ZmMuilIKq76mIUORYE9YB6MEYpH17BPwGvnsEiv99qw0D//t//+/827/9H4SpNnQ76MqRnJv+itAjaY7eZJ+zp3Gvm8t+mmavPVu92maGAjkXihVSKgRTogRUK6FWJnO5t7kmnt5duLx/T5gOpq0Bd0BbPZa09GL1CI+SXmLyLvKqP3uUj8v8dr2ybVv1oO+yrirrz+s/HJ4n9csM6mHPAU5rPda6Z4eQiyBCNiOX7JEtMSAGt3Xj+XpnmoXLdGZNmTBNaJhrjVJAlBBnTst74nTG4sIfXzbOy4kpzuRUmELws0PJYBkrHt1x9O43/DrqSc7NFh2LzKzjzvGg0PSxHYKt6tc0TT1S59OnTw/63yJP2jOP+9CiFkLF2/bsMX48BqcNThucNjhtcNrgtJ9lDE4bnDY4bXDa4LTBaT/DGHw2+Gzw2eCzwWdvn89+6GA6CnzffN/ctumqWr3xe4rqUTmaQe4G+H2QakIyrBsL4nVHoYFETQmzgpkAxSMFXgjAzLpCClJrJ3rDqlwy0tyGrb1bn5rPU0QIMTIvMwAlZ3JKhGlyT7+4oYt4zVHZbz0I8gAsFRfc47qDRAOm8iBqefH9UT7t56PX8uHupqTU15iBuTLntJK2O2aGmCGi1VMdQDydNDbwpKbk2p76KiEgWpvIdaEZXp6z1XFUVKWmBHvaZowzoure/hfzfJW8jmAhfrBwMLCqUzzoXtcdR4uvH/f1R1/J7jgP11nltJw8eqbqnk9mv1+o3nPZvbtN35tNqO6KYWYYLWpgT8WWpkAKYkdCF4QdbDGrBwshs8sixMg0zX7AkHZf9V67ItYZHHTjsNbX9Oj4WZvv+XLZ63SK7sD+8lni7/VD4b4/bc/QxrxSbcP6aq2+u1iNqKie9KjSmzs2E/K1hVrXNIDO/hQJEGY0PkFYMJk8FVUiJhHIB/WSOkf/rUUJPPKV60OLFqFLcd/v16ILoEWlVNWx44HkUfZH0D7q9JF0Gr6O8Z8bg9MGpw1OG5w2OG1w2s8yBqcNThucNjhtcNrgtJ9hDD4bfDb4bPDZ4LO3z2ffdTCpam28tiuSCztxuVzIyYF4jntKXGsepqqcz+fufWuLatcVe2xadRxmu9funjamEEEDNC8vTho551rzEgqFKSqx1V6FrhifP37ytNl54enpaZ9DKR3o3ejUk9GqcW+VCBThl19/48vzlVIS19szH6ZffK7Fm4Y5yBsqRuGgHFYwUfeeFvcMtziK3ZvdPPH+iYgrMrRsy28nE74E+pcgBeLvqx5SK4lghbTdud+uCF5r0vfr4nAiSowTMc7kCkjZvJFYjBPTfGI5XxAN1TNdnCzN0xjz5umTVhQLAQ0TORm///47f/nLX5liJGePFDka+9FAXCp9IW6EFbste61VT+cLBBUoxnZfsQqOOSUEJ6KgbjhGa8Tmvx1l6MR0SN+s18UYwYSc7w/6Wur+2mFvPMVXKWUDhHme+fLFDxqqyjwLQadKfl6/1AlzP7iY7fqnIt4yzqTXnm3zBUMd2zDKoT7thNZ9cR3yixoJYdIbY5pR00y/BpImk+MQcdKeYuRf/uVfAEg5A6VGQTQi259z3MeW5m5AnCc/Whm+J8KDyCXOtAABAABJREFUjh9t1MpGSt4U8uPHj6zLiSlOIIEwLWicyAhhvpBNSTkwnU4oEYiUMFE4YyViRJbTjISZIrFGBLT6opCTN9FsDfQaNoXQ6pWCNzYsqLrOmEmPOmgg3ed+kGPOGY1ef7lFA6zryrZtPTrLzHoUwTGa4Zia2qK0St/Hb6HDGC/H4LTBaYPTBqcd9Wlw2uC0tzwGpw1OG5w2OO2oT4PTBqe91TH4bPDZ4LPBZ0d9Gnz2Nvnsuw6m46YDmOzQdL/fENzrNc8zuXhqXUqJ1mCvGc3RY9aeV14IgPqsl8rVpayKp316Q6wQlGkOWK1TmLY7v/76C6h7rDkI7PnzF+Z5ZllmlmXp0QnLPFPyVt/r5OD3SCeSECKmboynZSalzLauPF+/sCwn5vnkabsivWFfU2DEauOugol74gW65scpMk2BeZ7YknswmwxjkJ7K+QL2HgzxWyDfruu3WUHE/Jm5sG137vcrAPNyZlkWzvPC3z8+gwjTPD88M6XEly9fHEClerMJGML9duO8TJgV/v7H30j3FVFlmic+/PIbqJLynqqsKpB3I/qhmrY11gtLKR00lmUiqKBmlQCsE0icnLS6VBwf+RZtHvW0yW+aJvJm/I9//R+UUoi6A2UjoLZLpRQsJYzc02ePRuhAKYgJ8zSTysYUI9M0HVI+v55TCC2KxtfikQwOsNfrlbRtTFNA5MxpOTNPJzZwUJd2iFKfq4rbsUgHZn+2ffXe1/RqW1c+326cni79INEjK3bJccQeaxIXoK4f9kOMRwN1unxtZwjqKe63243TvHiKbXCdNQJb9sgCCQshngnLB7akmCgyLQRdEAmoBLJBzn5g8ANEjQLBbdjM9/Z4QDWzHjlRSnG9aJENsUYZHYC92WXOmWmauq6nCtwxRkopPa3/dDr1a7yZaeyNSxsWtfT5Jqvj73sjxDF+NAanDU6DwWmD03wMThuc9tbH4LTBaTA4bXCaj8Fpg9Pe8hh8NvgMBp8NPvMx+Oxt8tl3HUzHNKl90/a/hWq4vl/WheAezcd00bbgdq3ZozIdvblHBZPqDS0I4CmhTRdK8fRUaMKPZINUijd7S6mmNjYj2yMWQgjMLwyRvkktrax6dENwEA/BQacEbtcrQq1ZKNBsoduENFm5kpu1Ne5zbxEDzTvd5ERQCG1Kxst04Zd7Igel/MqraFbl5hMyK6z3G9u2UnKqtRgn5sk90GagITBNM1YBvZgh2pqqSZVbhTep78cwy+S0VXv2NWnw+0qtYwk+xwb65pN6qXou5+PPVuGieaPL1zVhrZTazK7u974VdSc4/PQ6qD5OYY+iud1uxOhJl6pKZj/ASNGHNRyfsiwLZsa2JU4n6fV97+vdSVfVIylM+iP6PCrgtFTfw8yqriRKad7tQEql67kDqzwcLDqUVoBuSupxMN8+MLTPHFAy1+uV87unPeoCL13rtz3K9uE3aSmmtu+p1Z99Iq7ptqeptr9rPTjM8+wRGBqQOFMI/StMJzSe0XAGPUGocUAyO8lZW7VgxRv6RdX6Ptvl0kmw9D3Zt9ev8Zqkhlnpttt05RgZdbTJtkeleORHs/tjtNVLmbfDb3tOu7ZFEYzxnxuD0wanDU4bnNY+G5zWhDk47a2OwWmD0wanDU5rnw1Oa8IcnPYWx+CzwWeDzwaftc8GnzVhvj0++66DKecM4uCkqg/b1ybmC64e3LrYGOPD5BuAnU6nKrQXynDY9CNQ9QVWcMiAYt1Tn1KiKA7+Zp5eZp4St67rw1p8/nuKWCOibFa3zijm9SpzboJ0o2nXqipSvXvPf3wkaGBeZoJO/gzZjaQZGl1RHHAb0nua7M4QItVDnjNq+7ZY//f7wHQEvSN5NuNpYFxy4nZ9Jm0rWGGKkWWaqmcTRJUYPCW1IOTaG3CaPMJCVVlOp05eVJDBPH3USvF04Rh2GWdPK/X9N0qxGm2i5GJg+au19dXbLoX2rFwPIDGEB46wA2e8BlY/Gvs9O7F41EIm5UwIvl+qgSKNTGrTwPpeVaFYM+7C6XTmdruSkjftK6WwpcT1duP0tLhnH5CglFyqMTsQWQXtBk773jrAbNtWAQokBMzWqne6y0VAOERsmFXSkCqvQpAG9v2WbrcvDw655J6K7vsrNV21Rg8cbNvMsN5QcT9sNaBqqdvtHS0RXYR9I+u1QYQpTpzPZ0KMTn7TTMpCsQA6Mc1PSDwjuoDOfhgRBYlI2Q9ZalS98wOMv6pFECgtwqKU3Y4ayErFQ5HguMMO0C8PWi9tMsaIpK1HwUzV7lqa6st7XJ88jVnrIVPE5S1mh/TmHVfG+P4YnDY4bXDa4LTjGJw2OO0tj8Fpg9MGpw1OO47BaYPT3uoYfDb4bPDZ4LPjGHz2Nvnsuw6mlNcK2gHt2Wy+uLbRIXpqqKFdMC3FqnnL9pSu2Ov4IXsN1BaB8DjcUMCoLaqQ4sZnZpRcuF9vLOFMjOrpftlYTicuT+8IIfD582dyyszLmWma3RuePRqh1Gf5lrpC1bdQzEil1iCMEY0TlrLPOQSWZeHz9Zktr3z69Hd+/fUfaGpatxjEvZGqvslSQUtF/F1mvjpzL6yZb2qcIqfTiZILJdPrvx5H92D/EMzK/gIyJW3cb1d+//13lmXm8u4dl6f3CIH7beXL8995ev8bIUYH06ryQQOneXEvLnC+nNhWjxowEeYQWNc72/0GwOXdmRAiSOB2u/L5y401Zf7lX/6F69UjPKYwk/Le6OzVuT/owu6ppXj92WmayGklQ0/rbfJRjYh8LbtGfC/442tZitd3vd/vpPXO0/t35O1GLrDlgkTXowaPR5AsOaPB68maFXL29N49ndCYp+BedaNG38zk7DVkXw7tROI2UcwbPq7ryhQDhpNHmCIpZ67XZ1jOr+pHe0yzIzEwrZ7zFyD11b1mqLitLcuC1qZz19sNrY0X2zHngYA5HFVU+tY+kkizxPaZut5mYb2tYAkpiXla0DBh6nVNw3QihhkNZ2R+Aj1hTNwT5AbeORNp2e4VyK3iEBNCIxuteFU6lh3tLMaImaeFthTTdjg7gn1LkW/ra9cty9IPKcuyUIqnGuecncDqwailxLYGgx1rQ+j1UesLWA8EMcaPx+C0wWmD0wan9XsHpw1Oe+NjcNrgtMFpg9P6vYPTBqe94TH4bPDZ4LPBZ/3ewWdvls++62Balonu2QPM3GPonq8Nbzblk0jZF9XSUduEX4JSqN5fb0jnHk0VRUJwhUbcQycFKCDF++xhCBm1hFhB2BBqMyoVCAET5Xa/YdcvYC01TVGdqrdfq7ccNETElOfbMxqCkxMtba1gJszLCTRQDCRONJUtAk/vzmzbSs6J+/rMspyJMruhV2sy8TRPKtC7h7R0Q8tmJINs1IZ3gYgTpKewuuIVtCuNG1tA0d166v6UUptw1XeJFSgJyXesGOl+5367YyghzMTpRIwnV0QrGNoNFtwgp7qH9+dn0nqvkRaJQnavqzjQ5FyVvxRSyoQYWZaZVIwQlWjGtq1OnloN0KwD7yOhlWrzLm//sWAlk3NCq854LUtFBEwgF1AN9QBS06Lx1OXdM17cuVzJGOrNDY3wxou+Z5m8Xclp5Xw+c8XQODmJWYHiBxwAiZXYS/GGjdkoZBRFDMgFtebNLphlhECQiBKgQJCwRyUYSPMSW6n641EGad3IuTbwWxZyymzJ91BUCDH4GnKu9YsNCWHXF6vyELwhn/nhplYGhfqvdaE45TcSW9eN9b71usEhRLyxYVNGT0fdY3R2QG/PNcCk2Xv9Sz/sKRTDClAExZsz5gLz+YmCUsJMmd+Rc2AKT8znXzBzkDcLiBna9hIDCXW/jVyS64CAsVLMwbIdQj0N+xG3WnppO2wc6z3nnDyKCfFIktKA32ipxAC327XK3rHUrEVpVZ0/HharjgqRUgnWRJEgPV17K76vpRi5fCsaZ4zjGJw2OG1w2uC0wWmD036WMThtcNrgtMFpg9MGp/0MY/DZ4LPBZ4PPBp+9fT77roPpZV3SbIVW+2/b1u79V1Wk+AKOXrQWKXAUmIhbtlgDekBcQL7v1jfeqnK7x7YCFwbkCpieHlmKRzSYQNo20rqiGrFsYELQiKeW1Zqh1KZa4mlfHU6s2noFgRBjbZIHQdXT46xgYiynBdSwe2Hdbh5xEIIb7UGhd0OpowF23dr2JVSPMcq6bUzTUjMKW5KidLlKBSeRRyNqUoOaPmcFLGNpdaDfVtb7nRgn4jQTgkdXlC1TjFpj0gnG30NvVrduKyltxBioKORkUoHD96FUQ8hgrj/35OQQJzfWvg9t3iZ7FMDhc2l/bjIz89TGUggh9saG4qi9A4soGnwvfD6dT+rPzfsvYJXQrMq3knyLwPCIk42SN06XC+u27REKlTSsGqhq8H3JxUEKbyipQSpoNX11GxKsylYr2LphK0oLMbG2bst++Knv8hqohRgnQvC6tKqBRO62ZEY/ZFREazjX/nEgkZpa65ZMi9uRR7U62KLDT0tPb3sgVfb76C+jgf0eOOA2ZRXkBcEjKPZ7GsEZVr3nvpfTsrinXyJMF9/zcEbiE2VzkDcU8TiCZjWdhOpRrWNSsVz30F+9RxRox6yWDroTgfVDLFAjCRrO0YG+gX27LqWtH2x6LeRq8w30/f72NwXVWjKgYlIIjlm1pq7XYi5HhBnjO2Nw2uC0wWmD05peDU4bnPbWx+C0wWmD0wanNb0anDY47S2PwWeDzwafDT5rejX47O3y2XcdTEeQB8jJjbkDOPSftXrmpmnqqVQNWI8/mzUo0groioZaU9Xcw/pnhtX5bdtWvdZLTfUDUeHvf/8IpsQ4V/J5nCfYV2lm67oiwdNsp2l6UI5SMoogEggqiHoKbwyR33//HfCasMuMe7FVX0AwXQ4tNbfVjK1/cHu0wu1+J4QJDU3Orh2uDB2mHqTRUhyFtj7/rRQj3bwu7O124/n5mX/8x39inmf3iAP3+0ouhdPptJMvO9Gbuee0pR7P84ymRKnyoVSAy6XWJ7Xq3TR+/+Mjl8sTT09PbZmPGPAnhx2AT6vCtzq8x4d1A4FuaNYN2PrvDQD8azdqOiBZlZ+T1zzPhNuKiL9TNHoAggEUShEUoRxTCYFJY32mcbvduN9vqCqX9+8fwOJhjYef+/rEde9+X9k219mnp3cVFJRpWljXhNc9dQJrRwT0+yDwDYn7rTVHPaVEFOV0Wvj111/7ga7pskqNQmgEzDe2uJEPRzI359YiuBc/VxwoiBT+/W//xjRNnE4nkkWu9xWLyof5PVMIqC5YCVBrn3p9WkNNqUVYOXrpQ9fR0mXd9KaUSlyiD9h13Kdes7jqWSMBVSXGyLqu/efWkHB/Tz3kldKxMqWE2V5D+na74bgYuo22yIV5nrtOtsiG4zvG+P4YnDY4bXDa4LTBaYPTfpYxOG1w2uC0wWmD0wan/Qxj8Nngs8Fng88Gn719Pvuug6ktEmrjveoV02qtTQApJXotxPpZW3xboKrSGvC5DUm/vnnFBepGuzIcN+NrfTHWdWWeZ2LwhlW3+zMqIBJY15Wny3tOp3M1KD08z79KjYpwwXltyTh76l2r5+rAKpQCiDcXK2aeLqsTcRLOT++88d/tSohNDuCe6l2GR+Jr3sgQnBAsZyeOGHsDrqDB/yYdUfxeivuURfG0R5eV4B5qM/dcp23zOpAx8vHjR3IuvHv3jsvlgqqv5cuXKwDTNHM6XcjFvf1tDl7L02uCns9nYnTAV1XEvEGhpcS2rWxpIwYhxpkCfHl+JlvxBn61md++BsOK26DwtZK2SIb2cwMUJ6gdIEuNYFF1Mjw2gfN7/W3tOQ7ytRauSdVL2Q3dqL8HvjxfASFO0576WnFKzBxYcI+9UChIJ6FmjLCnz7YmddM0eZQK+4HFzzxabcCoiNOJveDrv95vhBCY6v2pZEwgTJFirtM+QamOfQe5OtUjJSIH8Nv1s5FjjQgJtb6peX1gFeXp6cn1vbg8NQa6F76B9xHlbbfn9rN0uflcSjE/MJh794ttnuodChaNLLAVhRR59+Ef0PmJYgsxnoCIFcWIGNpl+ngckr53vr+uJ55mOnW8CiFittcvbSDacEJEHg5obsdTv8Z1Mz+8r2GliSGqtIiSbUs9ImOeJ0QU1cjpdHHZqBKmuddN3XV4J+F+UBzjT4/BaYPTBqcNThucNjjtZxmD0wanDU4bnDY4bXDazzAGnw0+G3w2+Gzw2dvmsz/Net4orIH87q02cy+j1Bc3rxzsXka/ficG0cPPHL10NXXt+06x9uJKMO5xa+8D6aQzLyeW05kYJxCvptqMC1EXOtAaWDkhyaNnTtr8BK/3WBukZbDigLAsJ1SVXDL39UYuie7ZP677sLAmp0cv4/5uM091bAT48hlW/1etkFZjtcmm5EzJiZwSqRTWbcOA5XTuqZwiypYSGqIDz2FOLeKhPpDb7UaMsUdYtAiDWrDSr63ktSyL18RV5Xy+EKeJl6l0DQiM74+jfLrX9PAscabwn2ny+7ZntT1njyygy9fBrc1IuN1uGBBirPrjcq939AVYMayYp6QeZNjeE2Nknue6vxUQXhroSx2Rr+VVciFtiRACMU6knCk1hVNEvZ6p+THGDamGzchB72kJnO0z2f8iUg8oe/RFJyzxKIl2QDnK0cyOqreDfAP+ndEOX/sl9OdI/Trqd2E5L+g0kVHC/ESc3qHhiZwC2AQWKMVrhfZ5f/XqHSBL2W2t7aPUg2e/Tupei6BBKzGbN8AreZeniKd31xCl9pkBVSUe5tHeI+LpuiJKiJNjVJ1Ls7H2dQR0qQeJY53p8R8w/2tjcNrgtMFpg9MGpw1O+1nG4LTBaYPTBqcNThuc9jOMwWeDzwafDT4bfPY2+ey7GUxHg9+2jWWuDelaemdV5m3bmObFwe6QpteU5CXY+6J11+W66VKFBD9G+vbetuhupNFrmIZp5vz0xOXyDkzJ9xXDG4qpRq9RWb3wLRJCVYkhIgdPteAG42CgDqKlATNoUE7LhbRt5Jz58vyFaVmgegWPsjzOvaWqicjBMnYZ5ZxAQMP8ytqb0Aodr5pbvhpSyZmSMnlLpO3GmhKn88T56VJJrjbJK4XTNKOq3O93TuenrjTbtjFN7ln98uULv/32W/fkr+vaXtwNSNRTfM+X+o4tc36/OEiVJgOp+70Dqj0uDPuKFna5tTvb31sqZYOGBqY7YL9yv+1T8M92UG6pn6rG8/MzT08X4rx4Mz8DtaoTB4+7FdtnUKxGMuz7PM8z8zxxv98r/rpxplrz9GiwBjyEXNRRSnGgT5kYJqZp5nq9dvLTGCiFCvZ+IFNRkAr8bc26y7xpuNnrsnLyFqgHs5QSIURm1V6X2MwoudRIDqkqaLzySG8eWA76UuV/5AXaGkIjVXj/yy88PxfWVTm/+yvGQi6RlJUpBIxALiDBa66W+kyzRmmP+tDSxFv6dQP+Hu1RMQAzNIbejPN6v5FqHVp61EA7eLlAkxmmbofZWmXZA522va6Y1SKHpmnGa+tCmCYkpU5aR5Jq6aqOEfnxUDrGD8fgtJ+X07JNpAy3uxCmM1jg9nnjfH7CUHKRzmmlBP7448avv/3GLCdKmVjXlVopnVICW4rkbMyciNN7TJSUMk/vF4rBbW2tVkPFWatTVqQoHnm4q4OUXSdKgm0rpK3AFMjogbsqp4mxbsH/Qyx4081SHMtyblzmUY7ampOaY0usJTcAUhZi9Of//e93Lk8XwnRizZHbFlANLCmyTD7TwWmD05rODk77rz8Gpw1OG5w2OG1w2uC0n2EMPht8Nvhs8Nngs7fPZ991MDVBtFp9NOUBzuczZsb9fufDhw+PhlonNU0Tt9ute7paqldDeBewS2D34nuaX86u7MuyVEE+Oh00BH755ZeaFpjZto13T++5rzc+ffnC5enCmhJyu/P09B5bV7IVUi6cpskNwPYUSNXANE08Pz9zOl94evfea06qG0UuG60hmBNSwCpggXF6ek+YZz5+/Bu3243ZvG5kS3cDOgmWUnh+fubXX38lhOhGghtHyolt24jBU9fcEKwriH/tG0/J7HVQHeDTtpHuN5wIMvd15enduxrxEEEDt3WrBH0i5YyaMS8LRw94AyzY6zS2dbg3XMAK//63/6CkzcFFlS0ntmw8X1eepgl2iK/KLC7/YohWI+jIYJ3n22dbSq6DpTZwtP1vrdGZBmFbN+bJiTGlRPeMN51pnmPJYC0tE3JOtNTFaYqklLjd7yzLwul8YV4Wcum+baopNK7FcqGYp4NaSr2+7tEISyl8+vSJDx/e9wORBI9woR5+UskInnaaUgLzdGQzb8B4X+88fXiPqZCscLpc+Pz5U20GKVyeLiznE3GeMLQDPNDaydFqpRqu/2bZd6fLtIKweEpmzomc/VDyfH1Gq51rS+VuIq26iTyCqkA/fBy32ErxA1aN/lBgy35YKlrIa6ZQyEBmYr48cXr3xL1MiC6YegRBNk9Ldn9fm7ti5qnDDcBLTj3SxNdlPU21R0lpRKcaraIeZZNLoZinxedS065D6KDt9UqnjpEqyvm8VAxL3O/Glhz3zuezH3CAoMHtJQRiCGiMB3xUJE5I19n9oNh+V1UulwullBpZ8sNqp2MwOO1n5rT/+//j/8L/6//9f+4ReG1I5ZsqdNqpK5d6UIL9P5IOo9ToPMc5ffGAPzOO1712mPbP5TvPa1FNL6+x409fTUm+enP7PR8agQriawTmyfi//V//n3x4eh6cNjhtcNobGoPTBqcNThucNjhtcNrPMAafDT4bfDb4bPDZ2+ez7/61pXB6jUD3uGpVynWtjZ6qIHPZAb5vOg3AH1OtEOmGa/bC0KxGILx4TlPY3bNW+nMF96pN0+RgmRLLcmGKM6qB6/XqpEIzvoBIwYpfO00u1O955Nz7V+dj+/z9ZyepaYLldCHlAutGCNvBSfP6MCvkZMT6bhEnyAaqZtaV+GF+AsUyQktnNawU/r/s/XeYJMl534l/wmRWVffY9Q5rsMDCEVgYgiRAEhRBglaid6BOdCJ/R3mdfhJ1MtRJd8dHutNJJ4qSTidKIiWRFN2RoBNA0AMg4QgsDLGw3AWwfmdnx3Z3VWZEvPfHGxGZ1d0za7i7mJnLmKemu6uyMiPeeOP7jczXxRBIIeyax8TBgwvadlbDemOMhD6i0ToWly3w43kb/z4YgMxgoc3dKddzztK2LavVCjEasopYUg7lNEYge5AYm3+ayiXDwBgsryJCzMYi32RwGc2JeqZIDTUuFvD9rOPjuRz0r4Bb+V11qe96ZvO5XtNaYuizQSqHJlbvEiEZDdNNSfuqslAPl6L3ZR0MGwtLGpG3HjQIdexdUEAfDN431LzDVsOv62dNm71XDGrUk3paPWlF5Xy9QfLK3aNZMHms+cNinDNGQap4C0gSjDPDdUa6Ur1kBv4oB1UdQlRv6zwYsMYRTUNKiZAE22xi3QGs3aCPDd42GNPgjUXyuWJSw6WCdJ6jihnkvhvGLF90pQBsSqmuuZSL4I3zoZb5LB4EJSxaSSITYPa6UA8BNa4qnkmeXquFCbPy2wzsJa+sNruGd+Vn6WORbQH54lUwtcduE6cN7VLjtO2lJybD13/pe3StZgwuC2vYxAsxRbbOnmWxWOCy55hBaiqOM6dPAYrj88WCVdfrzWA7J+XNrKSEQp9u1o1VGao62PVbUyPqSZVfodPCrdYpHhRdUK+njBeSWC5XtE2DbxpSVG/B0Z62KpHe/A1Yp2dSHLQWzVXed7SzNudrV/3a3l7yyXuv5D13vhDEYo2bOG3iNCZOu3jaxGlDmzht4rSJ0yZOmzjt4m0Tnw1t4rOLj89+4w+ezz0PHgWEb3jdB3jg2CHe/r5b6mUOH9jm61/3vsJmlc9OnbY8ekqd3C872rGx6Cc+m/jsouazxzQwlcFqajRbgT4EBRlbLFhSCpjtzfGpAhiKVlWMFDDoah9ATPYonB43KBJ5csriMTklmh6r/bDO4xtdFDvbOzhXilnlnK35OsUCWPKzYoYInjGw6kSb2p8UoUTAkMnKWsNivsmZM2cIQb0bioXvXAaPlIQUA77R4wyaZo7xwsuIWCe9yKL+pv2KQfOfpmyRFgFyOOhsNq+hsTEmYtD0eI13WKvGJet8Xajj+Uuyd17VhG4haVG2sgCapmHrzBauadnY3Mj5KWMuBKc3DeWGYjjfOoFlgde+jK2l1loFl7EQRaqVf9gIyK4TVu2q7+w3JVpcUCOmDmxs4pynYKmUGbJDPlZjDM6o90uKg14Ca6GMAN57JBdaG5+jbmrGfTQmY7KpGx8t2udr3s2yYSoyLCSgXwQjBjG115SbNApZ5mNqVtSyASsbh13y0TnTzVWQocijXVu7KkMoGyBLuUusYC4ZuPKq1pBYVVbVDwtoATxJYO0BrNsAOyclj5gGYz3OFG+IhIiSgjceh9PChbnYZar6MtY37YcaJvUd9YAw1cNoyLVs8b6hbZq1PKSlJTF1Q1feL4Uz9frqjRCiFissa2VtnZUQ3rruTaaGvWA/XofWjouITu2x2sRply6nSYKDm0u+5SvfpUV4vadp27XNZ5mPPnQ8/PCDXHbZZbRti7VgSJqjOiUeuP9erFWHiSNHjnCyctohxLWEEEkp4qFuuo2zmn7BGox1pILoBiBh86Y2pcRya7tymrdujdMUkxJJIqdOnmFjY4PFYkHfh4pfw3AEIWGtYqXCgMWgm1C9iUvs7Gyzs7PDgUObNG2LsZqK4tHjJ3jLu1+gNy8jbJk4beK0idMujjZx2sRpE6dNnFbXABOnTZx28baJzyY+uxj5rG03eOiRGXd8+IZsYIJXv/RuPvSJa9YMTJuLji/53I9y9NCKWSuVz+570PHp+xs2Nhbc9KxtjhxeTnxWvj3x2UXJZ+c1MHVdV61s8/mcFHuSCN45Quhpmoam0TCtmIqgWBtw6czYoj58oo/7zd4PKFBmjA64GgZEo1X6PrCzs8Phw4dpraNpG7rQEyVhvWdjYyMXxgrMF5tV4FYSxllSjMSY6LpuyEWYIPRqBe/7Hms9pTDWbGbB5kCzpBZVks1gL4Qip8VBEoZu1bO9vV1DeoEqq7IAymTHDA59HwgxIQaWOzs45zQ0MSvlmnFHhaMLKEX6lFjtbOOtpW0aJAa2zm4TJXHksqOaz9JavG959MQjQx+aFt+qAquAh4KK1ViTVJFKsT1IxBDpu47YdzoPhw4wnymRnD17FlxDoOHg0QXOWmyy0Pdr+lWGUxQ6a3nVgzERW2v12iVVXZapAmsiyRASXJS+Agt2uKgoGCUZitRZ5+oC3N7eIoS+Ehc5LNEYNxRVy4vQiAKmz0RlRIhm8ABwzhJjwphUvXEkg5wAKWOOzeHQjMFjJJtl32GtYb6x0DDU/H0BnVenuVAlBrAGrK1APrZKF3JAzGitydoPk3WqHFA2VympQdKYWI15uvnJa1tMLi6X8pg1rD3FNHgKZDBLMVVvl5S9X0QixoIYQ4jCauUwboONxWGSnYNpEdNgG0PEkCRAtp4bY/GNxYqQI3GzsXNGiJGzZ7dorcth9gVYVd/8KNdwQaMkkSAJ1zZ1nbZty2KxUOKNEfIcGwxdF3NU27BuoggpBMQYrPc4YzS/Kbq5sX7sjWBYrVZVVmm1VK8tO0zTmBSKR4OI5LDbON24PM42cdqly2mrrlNMaidOe2KcVjtdXxOnTZw2cdrF0SZOmzht4rSJ0yZOmzjtUmgTn018djHy2bFHN/jOv/Pt9GHgsR/4P74GWVcw7j92mO/8u9/BP/uBX+P25z1Y+ezARs/znx05cvQwxju0XtjEZxOfXbx8dl4DUzmRc06takYVJKVUC90ZY1iuVtWKWRZg6ZQZKb5ObNlIFg8BXdiaYlVBs56jFCWLI71MajmEvLCNoRQDO/HoCYzT1GzWei0IlyLNxqz2I+UVlHJRLy2y5zQ8zoLzPodz6nkLyGaB1F8p1lq0jzosAeewxmOtLortrW1ms1aL1sUAovlfiwIVIlVCsVgjGO+wtsviyFQ4wijJoW/GKmGlGIh9qYGkJBRjZLlcIgYONkdovIbxLpdLSvhdsYpiNZekWvaHsZFBFCvZg2CYQ0lDh4xA4xsa3xBC4uqrrwbXgGsV4KzV8Eab8rkHjwyRPBmj0EpQsisW0hqSbEy+4ZB8AzK8YPBW0dew8bBWqj6r/PJLFGTry8BqtcQY8G1TZhnYm9fUGZ/zvwo1n62FJhdw1LVhCbEHNMqrbVuKlX21XGEX6t2yZ93tWoO61izWee03kkHWVdlouHjC5zHF5PRMFbMzyJff65V0zRUvHB1uAXuqnDVkPShYWquhxPkMKRXFKXOa5WykgrwkQWLSAnspkXIdr5RD4VMKGF8MeI7ZYhNjFuDmiLSIeEQc1jtCHzT0ufZX9PIxDzlvEFICwdB4j80bpcF7Y/AkGocT41SejWvq+0XW4zDRcZiqXpj62ZosGbDQe08Y5UMuG5ixl5Ux6km0Wq0wCZqcI3Wsv+PzF0+v6cbl8bWJ0y5dTiuY5ezEaU+M08q41Ytq4rSJ0yZOu3jaxGkTp02cpv9PnDZx2sRpF3eb+GziswuVz1Yry30PHeKaKx5l1XkeObHBFYcfVL0zhhh3c89IgKM5TMnwH37+lXzu7ffynV/3/spnTdsM8D/x2cRnFzmf7dWyXa3kQJVUNn1q8ZaCf3WwdqhNM8q5WKM+jMlhZeuKXISuAD90WKACgRkdCYPSee/VspvX36pbIUlwrqW+uabUWUipKCDM5/Mc/ucG4shAN/ShbJDJigRklTHlv/wSwOZ6Rt57QtdVgJekO2ZTx1sUNee0zPJyZQOus4sR0byOtS+DcFKMOUQ1F8PLhftW3Qpjc05G59Wqm4Su6yu5lH6W+coSIpcaQowQicQUEAZFKrWIQgh0qyXeZ0OVMXRdz2w2Zzab6fyU/lPIzNQFV2dHGO4k8suMxlc9G1JCl+5wHEX8ZOAzhhK6qB4XKV+gfNfkudN/mpdSP08SdSEak8MMVQkGyVB1qPxRwI4cgltyyppsBhZyTuGUaui0AH0f87mpemRGZyxzn6qV2GKs09GPF8dYu7P+Fl2q4F57b0a/6TXK3JyrlTmDXCDODWu7nLusB2HoUpFLDvKqx0lKSEzDGiwvgSgQxJCMw/oZxs/AOC1kKEb1Tswg8z1rvOiJDGH2MeJMnou8gIVS2LPoSl7fSNUla2wuiGfr75Ikz5l+XvS4EELVjTrewfpfvQzKupchD+uw2dNNmnO2Euzad/N743Do8Y371B5fmzjt0uQ0yYLVegwTpz0RThv/MXHaxGkTp11cbeK0idMmTps4DSZOmzjt4m8Tn018diHy2dZ2w3vvvJ6dZcOJU5u8/6M3IHgeOn6YD//xVYURmM86nn3DMRofOLS5w83XH8eY9Yfx2yvPzspNfLZPm/js0uCz80YwOWM183EGMMQQQuTMmbNY32JdixjHfGM+gLJj7cLWad7CJKU4lKDZUNVyLEYXbsq1fFQ4NodHinoCNA2IKggmslrtsFwumc9nOlkISSLWO3zT4t2MGIS20TC0lMLaBPR9Tx8iIobLLr+Ktm1VoTHEuIX1orWDolQiSSlhoxsAOL8vEjXfZLbAxyi4XPSscZ6zp08hocd4izXqvWCSKmDXdRjrMLahTwZjnFqkk6ABkuCSICj4GGOJUuQnSAiErkeSWied177FEDlx6gRXXnklG5ubtcBeDJHVqqPrOprZjGY2w7etWufzmCI2r98ELtGtluystjm7PMvRdDmemVq0xbB15hFOnXiII0eO4LwnROHM2bPgGy385jyN9ZCt6xXaBWxZsJVER3khSVhnNeyx72jnc0iRvos0vpxPicFbAzhSEmYNGCOk0BNDwFr12LAFaozRjYk4krFghMZZomgYpoK8YL2jmbVInmsjAnmRq7dG9npwCnYxk6CxBt/6Op6QNIQx9EKMic3NTULcIcaIIYJksM+6V/AqFiu7JEK/wlmret0sVBcy4cXQIzGQjBBS0Fy2zhEl06ixw2Yky1pFESuRZMGsL/zR+tXQZLW4Hzx0iKEw4og0rMaHCkJIojI2JRQ3yyqHt6aYkJAgJKLR9Y0krPOscETxeDsD04LxiFj6FFEkSoQuKtk4T+ozCIohxQHk86LXbpik5Cd5O5kUUNvZDGstMQ2eBCklrLeEmIi91Hhhaw3NvB0Ms8birVr4kwhN09D3fdXhml/ZmOptJSJ0XQcI1mkYs+RNX9lIIIlSsNG5YVPknMs5W9dznpbzlmOm9tht4rRLl9NiXp++nekY8pgmTjs/p1UtMhOnlW9OnDZx2sXSJk6bOG3itInTJk6bOO1SaBOfTXx2IfKZSOKhRzb5Vz/5+dx+233cde/l/Luf/UL+1Cs+xq/97ov52V//7DzTwg1Xn+Qvfttb+Sf//nU896Zj/Okv+hD/8N98JatuMAr85T/7+7zktntZdhOfsfb+xGdwafDZeQ1MGlqnk1zyeJacmKvVipKfUsO93JrCjIHVZMtsUeCy2MmWs/XvSH2vhG8VyxsoWBUFU8Hp5C47DcEsHlJFSLuFklLKCw28d3hviTECCanKa4aJN7oQrbUw6H/NY1oVaCQ3QRe9azy+ben6QOgDhw5uqs6LQNIQTqzmi7TWZmO6jlVEtEBX6HFeQxyTaBFB0D6vVqsaCjqbNTTesr21xdbZLTY2NpjNZjWEMYRA13WEEDh69Kgal5pmba6MMVkWxSht2Vku2d7e5uDGJs4ZRAIx6LJuGs9iYwPjPH1QcD5w+BDOe0pYp0aI6nwL65AytowOoXoQk+pcXTysp6iLUUMdgVrQzGaiTWmwtFqbQ0jRkD5QMnWZfEBDBVOKhBDZ2t5iNpsxm7VPeCNYrcAFAAoYxwgYtPhaQ9vmuQjq0eFyKHiRu45lAJ4Q1GPGWvXUUGIYpKjyUy8cEbtmWTa78Ptc7VyWaF17KuvlckmTvW0MjNKpmrq2Cujo+dZmeiQoar7SmBIhRWKKGOfwzYKm2cD7DUL0JCnbHVlbX4VwpfZRRtiireTtTUnzHRcwLOG9y+VScWAXRnSrHuc9s1lbz1vwqwD4+JViZNX3NVy4yHP8GvfJNxryulqtNHf8SP4Fs8abnhJKW/qwH9iPf07t/G3itEuX0+bzOfnuYeK0J8Fpu2lg4rSJ0yZOu/DbxGkTp02ctn+bOG3itInTLq428dnEZxcin/2bn/58fu9dtwCGf/CvvoK+d/S946//b9/C6bPzev5v+rL38YJnPwAI3/P1b6FtIjHJ+mQB/9uPvpZXvOgT/PlvfBOz2Yyz20d46MRRbr3x+HqH1/q+/vfEZxOfXch8Zs/74WhxFaUrRcPGeZR3t/L+fq/1z/e76vo5az7C/H5Kmhh1nANSRFgul5q31fu1CS99L60I1TqH975udGP1YjhXv9AcjintmcB9mzFglAgLgPahJya1Ho/DUa0xGErIrxmBnsneFQnJaQSSJELo6fueEIKGEucxaxFCfX9jYyN/vxTy02tba5nP53iv/RJFjl3zmJVJjOZHNYaNjQ0NGRUB0T4BWGNpmpYQ9NpN02TvC1P5XF+mEuiaTEdKPp7/en5bQqDN2vF7xT0skvX8qRqiG6MWgSsAPG4xRmLexLRti7Mug+e55zilNFit9+nL7vEVnSw6Ws5R5mecZ7N8r3zum2bX5mX3+WUP8TyZtrZxySdMWad259DVLwzH7QYrU5BYABmDKQPQIxTNFuMQ04KZAS1q+y6FDvV8xijCG/0j35zaEbnsHc9wXYuI3vRmNNENhCkb1LwxERiT11ivik7tlhWsF8Ab48N4jn3eAJXXmKTGOlv+Lhug3fMzjGkdV6f22G3itPV2aXFa3mjlvk6c9sQ4TWTitInTJk672NrEaett4rSJ04Z+T5w2cdrEaRdTm/hsvU189pnls1Xn+ZXffj7v+8i1PPjIIQAeOHaYR04ewFjhhc95kCsvO1vP/eAjh3jg2GFEhKOHzrC52NmXgx46fpA/+vh1vPn3byfJAe576Are9oe3kNK5cWLiMyY+u4j47LwGpjLQcvGu64gx1gJ8Y2HsvvB4MBWsd70/dLJoRO7UCKjH37UZuIwxOK+5RtUzStje3mY2m9WicGNAHltpi6C8czS+yV5RQfOJVsGOxjVaeDW/4ujc51pfBdzadobzHkHYydZL0AVfQN5l7wozAnlfFScvBREEIcae1WrFarUihEDbNrS50GnXdfS9Wqg3NzeB4hFGfl/D6tq2eH6tz5uIoI5kUg0oxli8azhw4ADW5PA6k0ipyEE9TkIIrLqu6gU2v0bt8Srt+DPnXJZPnrt9Ng7rOpuqt0vxbtEbFqkvsrdKuVbX9fShB6BtZ1jriCFqdOU5Jnj3gtbf18FFhDUvCVBrcJMXfNk4jXV0PP7yaptmHST2U7bKrRlkn+Bedr3fhTzUo2O1WrGzoyG2SWRd/0d/MwLG2oG6tE0FaowgFiKJaNA0GLZFmJHSjJgahAZBQ6I1H6uOzxqpMhgDfZFzeRWPoUqg6A1s3wVSVK9J71usdRjj6karbMCKHMrcrd8Mj4gL6mZr7F1U5lLzNscK9JJviMf4WTClbFzLhq28V44r75d+7daTqT12mzjt0uU0xXvW5mXitMfPaWnitInTmDjtYmsTp02cNnHa/vM7cdrEaTBx2sXUJj6b+OxC4bMQHI88uuCf/9gX8NG7rsIYoW2CYiTgXeK7v+6dvOwF99ZzvuP9N/O29z678ljlhzZi7Xodpk8/cDU/+vNfwemtK7jzj2/gF9/8Qs5utcS4PzFMfDbxGVw8fHbeFHllA2iMYblc0ueQLO99BdRizR5r1nhTWb4/Hvx+Orh7kYzPFWMkITgSIcTKCV3XaW5RY9fIp0zSWBFLmFkRqpKFw3SWrtvRYmi+Xet78azCUJW89L6SGzCUB1wfTxKYz1pmovkeT586waGDh2ibhrZxuRBdwmY7n+SF6hw0rc9AoC/rdCO+3NkhZY+AxWJRx2yto+t6nZuDTQ0N7vvA6a3ThBCYzeZsHjiQ5alEZI2ruW5FJOebVU+HlCISBJMMrZ+xWu0gKWEMdP2SVb9k1QdCFA4cPgzWslz1NO08j6m89m9lvneTfrmZAF0cs/kcsqKnGDHOVtKnyl5JMMaASKJtPSGkCqDOqTW+aZoMSuWbieVqBxE4ePCg6rNvaJoZkCqR7G5Kwgogu2+ijLVYLA5LEj22kLlgiAJ9SDTZktz3PfP5vHqBFGApANq0LRhHTMXSvb4+MNlLwwzegOfcgTxG270GUy5c13VdPaWIQCKTp+YKVZGO1sZoE5S1C0E0byvCKkSiGLANtmmZbR5FuIzEgpAikHPBZpBUOUs+b8IYS9u2tW9d1+XCmbqBKZsea9WbB+sxKYFN9ClWvZjP55UYiLCxWIDRwqBADfcGWC6XFVOKh1CRx5jwx8A8Jh0R0Xpnsh5qOp/P2d7eXvMkKN5B5Zxj4hqnBakkO7XH1SZOu4Q5beQxOHHak+A0o8V6J06bOG3itIunTZw2cdrEaROnTZw2cdql0CY+m/jsQuGzn3vTZ/EffvbFhKjnu/7qU/zl17+Nf/kTX8j9xw7T9Y4//4PfTh+Gtf76r/5DXvDs+wFwTvVjNrf83e/7TX7xt17Muz5401p/UjJ8/z/4WkK0dL3j2//G6/mB73sLr/28P97T94nPJj67mPjsvAamcqEizNlsRpPD5sYD1AtlCyYDCI7BZHz8eFHo+2BzbubxOccEAZKtmDk3oXM5TF4VoG1b/Mg6Nw4b2y2wMfk4lxdVTNjW7tO/MraRxX+0sIcQw/HaGhQ+JsFYh29bZrMZXbcixcDGgQM64Um/HKWEAWqaAM1tGen6FRtNg6BWxL5fYYzFWc3raBjSDXTdCmcdTfYsGCtI27Y0TTN4ZhTTM5qTcq3rItmLABrvsQh93+V82oKxcHbrNDFGmuxFMGsaLepnFejErMvKGJPTaQ8LYj/dKFIsv5fNBHksKUbKvUQhhg/fdRVv/oPnkXJqhQMbO3zvN71X+2os3heyyUUcZciBuVqtKNZifQ1WZbCk1KPeEwP4VguvmsXX9MHYrLPWYHGE5Q7eNAqG2bPCOsd8sSBELcZX8myWMRljqiV5NputyWWYI/3Ve683ctYQogKJlQTYJ+pMsM9cZDOkGfQ5U/FozZerrFvfy+92YId8iMpVS0Y6rG9xzRwxM5Jo/lOVe9SvrG1Ccr7TkTzK+qt6whDGXr4bk2BMCfYGby3eWsifO+ew+SWlhyOPgXHY6HhspY2xpsxded85VzeZKalX0HjTW9bn2FOkfK9scsvxY0+FcZ/Gcpja+dvEaXrdS5PTBuFf7JwGAkZfzluslCKePC2cBspd1uzVh4nTJk6bOO3CbROn6XUnTps4beK0idMmTru428Rnet2Jzz5zfAaOf/tTL+XdH7iO7WU76KYRFvOArYBp1j4HaH1g1iTK43WTJ2nWdrzuVR/ltpuP8RO/8oqRNNbPsb3T8mu/83wePHaAb/2q9018NvHZRctn5zUwOedqR2KMbG5uroFF6YA+cDeM4AMRqV4Ge4B9NPFjoigtjXJM1gFIOWekbdyal5MCtst5UE0VcrEWlokoVrcx0O+eJJ0A9vTZGKM5PkcLu062GVaeqHbmlyWmHmss3jfMZjO2zp4hBsPmwYNAyl8rFmLBiFTQFxFSDBibw2T7QIiBxrdVmRGQlAh9rwrjhpC5GLWQGUCTwx3LWKQsUkFzepYxG/1bkhbP886BJGJQjzOM9mu53MKZhlkzr0Rhjc0hqjbLxFRZ6D+VzTDnI53I81N0ogCMMYaYF3A55uSpTXZ22gw4ifd95Hp+4Tdur+e6/MhZvvRVH+fqK3bYmEeMcVV/hnOrCnZdp7IsHhnGYUy50TGUEFfFl0GHTBnfLg8ajEUkh51aS0yCG41FRDDW0s5nbC93qn7HDGJlbRXQms/nw+ZnZBQszTmHcaP8nEkQIwOmPpmWrxdjUOAr60IHCQIp38wVWRV5rm2UZHhVsAZEFOyxDcYtcM0GSVqSOBIWzfmbwdAaTNJNnmTDXulf0UVjTA3xHIOliJBEgd7a0abUDAU5JUnNtWudI4RYvz+IY8gXX3VyFFpcrlX+HpPPOJS2nKu08r6GnLf1mrvzqo6vDUP48278mtpjt4nThnapcdoY7y42TivuCkUxRIWAPrzTcH3FN5f/fuo4rfTb1o3sxGn17YnTJk67wNvEaUObOO3C5LSutxiT8F4fFk6cNnHaxGlT269NfDa0ic+eeT7bWTU8dPwob3jzczlxemMfDTUcPbTDqbNzzmzN67vWJi47vMWsjet8lg0jbdPzihd9mhfc+iBvfMtzMcbS9Z7TW4s9V3j3B2/g2KMbfP7LPwnx+MRnE59dlHx2XgOTMYbVakXXdVpsbTT4MsDdgFgG0/c9Ozs7HDhwoB6zfvx6TsVq4TbD+UvTzaqC4Pb2FrMjh5nN5xgRTp46pZtL4MhlCxBHxra1kK6dnZ0qDO89JQd06VdR0uodVRU4A1UFx70gP8B8lQ5k7yq1qAvWOTY2Njl75jRd39Mtd2jni+JCsU9T62fTeFIM9KGny6F3s1lL47MF3zesQuT0mTMsFgvmszmztgWE1WpVC+CV/KdlYRhjMnCw1v+aacAYSBD6ROgjZlPDLYVACiuMZMMN0MxmYBxJAFO8ysooGG4zChrUS5g1BZW8mEMIdfF67wldv6YTP/KTr+Z33vHctWuM2/GTm/z5H/wu/vHf+G+86vZPMRDpQOIpaRhv1y2Zb2zSNDO8a3VMa90cPGTGi/n8bVh8Pqdx8F69O1ItttiA7FDyW45DH8dGrGoRl10jFfUAKaDT7QR8s3jy4M6w6SqgXK5TASevGcXufAOYZ7iEra7JSf9CcnhmSokQhZAg0uDnh3F+A+M26KPFWA8CfehJEjB5zyBJPYh0vRqsafJ5B7lVEh3Njd6Iai8NTjcqojptcHVTKMlUsHfWsF+6jYIl42vFGMEY2tlsbc7aVr1RytwVUnHOITHVjeneOd67wR1fb7yxLn0qn0/t8bWJ0y5dTmPEM8bIRcVpu1uKga5fZs/Hhsa34HQe6zw8VZz2uJ55TJymktM+T5w2cdqF0iZOmzjtwuY0w933XU7jt7n2igcmTps4beK0qZ2zTXw28dlnks/e+6Eb+aF//WfOQyHCn//Gt/PuP7qRn/zVz67vHj6ww9/53t/AewOUaDsyj8ALnv0wIgFnDX/129/AbL7go5+8gR/7xVfve5VP3neU7/hb38o//Ev/duKzic8uSj47r4Gpz9ZpUIvmuO29wKCk41DW3eRQjzQWRjkBx+cxZj08zntPij2hD1pgL1vFOyD0vYaf+UZzHxpA1sMHx0KquZ3rGDQsLMmQu1AKKsj6AjN28MkaK3NZGEmG4l8Yg4jBlL4IWKf5G/ssW/V8EDSeb54Fsz7Zq9WKkNSia62hnS3wztexdX1H160IoefA5qJ6UKxWO3RdB2gxPFUKC6b0L19Eht+N2tp1Eev6JYSevtfzWAurVcfp04+yXG4zn3uM9+pd4tSqbIzNy79AgJ4qYXGktTkZ60R5L8ZE3/d474bFZQ2/8bbbeONbngci3H3v5ch5EU2v/e9//nP5uTfdDgb+9nf/LlddfhZIRAl0fU/oYw29Hm9A1NJtkByqOe7v7j6PLlkjca1zpBj1GjHiRfB5QacctmitrWGtIYQ1r5gS6uicq4BQ9LH2BzQNhTGkmNhZLTno51k/nxjaj8MhRRKSQSvGWH8vN20F8CXravZvqPpfI+NqS5DPGUMiRkg4rJth3QbYDSJzxPhMHQnjDQ6XvxsR4gCYuAx0WjCvNJH18M3ixWRzSHcyBuc8Phfua3JoO5R1nXEimtrnct5y7bGcxnpQZZXndZaBv+QuHRt1TV5lu4lhDOZlLLvXStGJsXy7rsMYo/lep/aYbeK0S5jTBmkPN4wXMKchei5jrd5g1fWvNzt9X9KD+Fx8t0ELxxY1K/rwJ+e0fdvEaROnTZx2wbeJ0yZOuxA57b+84eUcP7HBX/tzv8fPvPGlXHXZw3z7V31q4rSJ0yZOm9o528RnE599Jvns5S86zv/xd9/E//wjX8ypM+v6p7yjr1tvuJfv+JoT/Mybvogv+/yP8SWf93G8c8plss5nxkSSRB56ZMbpM57n3XKCpt3msiOJdmb5yV9+Gc+9+VFuf/6D/Jc33E5M2VAI/Kdf+jN0/TlwY+Kzic8uYD57TANTuWAJWx0PcmzlGgt83AbQG1v5sjKkQXHHG8d1INLCbzEIfQg473P+07JI1JpnZ8X6PwinnKcI1VqbvQg0n6dFgd5mAK+eDQyKjapu/n6ZKNYmLz8bGo156LvJQ1TQMzS+QbJCxhDAmwzA5XrrwBdC0Kg+53DO46xDQ/QS1hj6riOEfgCPrER932vRN+uqFRujFtM6BxgwdYTsBxApDgVYjQgSI91qpcUNvc8pCzLIW5MHnF+m3ABUX4JKY+P5rtdKOq4YI02jCp2ScMed1/PuD97Aez90w57+ARzc3OKaKx5V2YrQ945P3HMln/j0FfUa7/zgs3jxbQ9xy/XHSTHlQoaRxWJzNIcjPR6df0z27AEyqgwZ6beIjsdYV/8eSHDQeQWnqA8XEbWaxyFc0lpHSILsviBkT0Cfxxyq+uzu3mM3qeMb6//4lTtcj1nrikh+sVeFMkkpeKEFA43mQDWmRfAIHkwDBKAAYPmOVJWSMsDROhn3b/fGrrxM/sxmT8uyGbUjT6M6HLv3/LCea7UeO8K2gSjXcy+PN5zqhbErbNmsh7qWa42/s/vzMfYWr5vHvLGeGjBx2qXNaaWDeT4uUE5TTMobPhGsgY9/6gqOn9hAJJEkEUNPiDu8+uWPZGfDki5IKtySb9LKZv/Jc9o+beK0idMmTrso2sRpuT8Tp+nhzwCnbe943vuhm3jJ8x/h8KG+3qd1natdfvcfPYtHTy545wdu5MN/fA3HHvW8585n7+G0j959JY+eWiAJXnLbgyzmYeK0fdvEaXU4E6ddsm3is9yfic/08Gf4Hg0sIVr2W6o7q4Y7//hqbrr2ITYXW9x6w0leeOuDPO+Wh7jl+ke496ErswzX79HAcOpsw3IpxBRwrmFjEcCe5babHuH5zz7Gc256lNtufoTn33qMex44zOmzaty661597tn4uN6Zic8mPrvA+ey8BqbVqsNateTvzrm3+yF7+XPc+ZpvsMitHJMHg1WwLxOh5x7Avw7cGFKMrFZLtbpZUy2cXd9jvcc3LfPZghg1FG3c1wJUJZQ/xojBYp0CXBFs3GU1VYQeLTIDMgx0f6FlxZK8pI2xkCIShQi07QxjlERDCAiGxniMFbIbc/6+FtuLKeBp8L7FtzOQDAQIrfdsrbbpuw5ntXxZsWBGScznG/i2xbqGJDlgzxbCHk1IHZwM7xtd8DFFYog6BiOIrlYuP3oZ2DnGeSg5UI2G5q5t+jEV5sUazbs6up4wBiYF+lJoTUTYWSb+13/zWo6fOLB+1lHXn/Os+/jGL/09Dh8+iiThkRMLfujffRUxDYr3z/7TF/F1r72Tv/7n3kYMidAHUkrM53NWfSTFhDHZemusimCkiedaSHk16BizR10J2zTGslhsZN2mEuLuNZRisbZna3iIWkDPOKxxpNiDEdURUXkJZEJsEIHQx3WSXVPJc4NA7UumH5vDlpPJ1u6c29wYM8r/KpAE7MjDoNBXvVTxJSmFI4WYDFEMGE/TbAAeEYeknCbDDPpbvpdSovWemGVjDbrZQdfrEOI+5C8tm9OCKTEEmnlb8wH3fZ9JZN2TabecxvQ6Bu9xiLuxliSy5j1VPLBSSrRtW39frVZYN6QUKR4jY30om9FCLH3fM5/Pq5dE6XvxMpnNZoQQ9pDQ1PZvE6ddupy2N6r1wuS0EALOjPKzY/iJX3kZv/PO5zJubRP4xR/5cebNWWIMJIGUDJKUT7R7yikTp+39bOK0idP+v9AmTps47ZnkNBHhoUcO8g9++Gv54R/8JV504AFW3X73adr+9j//GgDuP3aYj9x9E7/4Iz/OrDlL30ei7/kPv/DZvP19twDCj//Qz3DzdScnTtvd/4nTzimnidMurTbx2cRnn8l7tD/62NX8Tz/8ZfuK+tijB/l3P/cq/vLr38i1V5zG2Z7v+YZ3IUm4+57DGBv0Hk1DbXL3DILlrk9fxmUH7+eyg9s8cPzF3NA8RIqKva//0++rvPDd3/gefupXbue9H7pee1yNgOPx6VgmPpv4rHx+IfLZeQ1MSQyNb2lmzSgIHRCh9ZpbUEJP430t2Na2mqMzpYRvGxLFcmarFVsEfd/mTaII1ju1sGcgU+UCRIuCSuxI/Yp2c4HDkmJka7nENy3ONcQgdCHmxUYVnIiGkRXh1XA2qyAQY6iK3DQadumdyxtXW1HIGIOMrIKgoYkFhWxWawRSFrqSSLZgO0sfIssQkWRoFwcI/ap6MxijwC5ErIedrqOPEd+0GDyWhsbMNN8ouhBCv00K23iTmM0a+rCdc00KtC0ym5F8C8YPio/BWKdeASK5wyPlthYxSqRkyyc24SysdnbotnaIOz3bbDE7NKPdWGBnM5IkNEQ4y0TVAYuASYgxBFFvA4NAzgmKKLgbEhJ7Uuww0iPMeOcHbuJf/Mcv4sSp9UJ7X/fa9/CiW+/LoZ8RK1vEnRVdu4W1jiObgb//3/83fuq/vZKPf+rq+r03/8FzeN9Hruaf/LV/z2Le0LYNzjW0OPJIkaSyNcaojgTJ5GSwxuLMkC+8AGulZwGDFjxMMRFDx6z1GSAhhQ5v1ZKeEOazOX3fEfoOUsJlMA0kZn6OdZ7QRSy+kmWKPc47rLFsdR2haRDANV5vypIQJdBYtxvv92zUjIGUIoaYV7fk4xIp6nvOWaydcfToUSVqSTTGEEW9SiRlZEh5UyRCSj7rQITUkXDE5Fj2lvmBy7BuQWQG1uOMAQJCwtCQ7Ayxw5orgCcmIiZWuSeBrgvEUG8tadsZzuncdL1uUouXkDENzra0zQxrWxrvsE7xoOtWiChp9P0Kaw3OG1Zdp4UtnSOScmJWHaduFrWPKRdKNCOsAGqR0oG8wDv16ulWXe71gKulr6GPWCu1iGXx6CrnqA+mZQhpnVIvPL42cdqly2nqFq1/MyrYeyFxWjJWb1xFx3v6zIy/8r9+C4+c2Nyjq13v+J6/96382a/6A77mi+7IHBJJAsY6ughGHM46lssOa58cp9VNLJrKYuK0idMmTrt42sRpE6c9k5z273/uc3nT790GwD/6l6+jaXQN775P268VTjOm3HQaTpweipz/zX/6p/mK13yY7/76d0ycxsRpE6f9f69NfDbx2WfyHu0VL/k0//mf/1f+6j/8unNwmuE//fKf4sXP/jhf/jlvrc8dNSLMV30y1vKs644xmwng+cCdmzjXMJ8fwGw52qYhOKOPEzOGbK9a/uV/+oK11Hx/4zt+mkcePcJPvfHLJj6b+Oyi4rPzGphKHkFrHSlJ7VGd3LxQTTZtFuu0ZAtxzYO6CyDLrzW0bPSdPU1y7sVU8h9mgYpa5JrWY20GccnGEqHmkCzAPpvNKtB3XUcjHnAY1PKYJOFwAymVTuqedW8zxfI+fmsUtiaCSASxxc68JhvN7xqyhXHJbOEoAo4x0nV9Jqgmf1tfJiOoSKLvOyQlnLW07YxlHxBBQ1Sd19BW5xDJxhAzWJmHnpv6V1E6k98OoVdPkizL0AdiTFjraHyD9w2+abIssjeZGaz39UqlzpMZ5JAZocpYREiiwNF4z++8/Tb+8I9u5qFHDgFw03XHueWGYyDCs591jMuObOOtZbVcEroV3SoiUYHAWsORzTN89gs/xQ1Xn0SAd33gZraXM+57+DC//Lufw6tf9mmed8vJc+hdkYKsvWeU/fPPYU6ymtaFX+Zf+d3W76s3AXVDUPShgE3rtXCcpKTTJRBTxHmvc17COLP8nHU4Z/Hic9E5W5bk3hGNQH54r/xM9Y/iYR9CGNYve3NwqiePSiWZQQYxRmzJ2SoJA4SYCNHgmjmYBjEeZdUchpxDrzU9lHpiDNcavDCcM3W+RARn3dpxoOHBKmu94RQDJntDgaHvA85ZYl7zapX3pBQJocc5D+QNgjEZU1Kd1wKwQD7H4EVQWgHl4nEwJtfhvb0hqePfy3fGuZs1dHnwRCnXMLvwdWrnbhOncclyWhXv6J0LidP0wGEzfecnruFd77uR+x8+REr7Fd01PHT8IL//vluBxJ9+zQfy5lf9vqy1CEKIemPpvadt2yfOaaPrre0RJk6bOC23idMu3DZxGhOnPQOc1nWOX/qNF/HeD13H8ZPqEPHoqb2OEaW98Nb7WMx63nPnTbzseR9na2fOxz59Iw8dP3iObxgeOXmA9955A4c2X8JrP/sPWMzdGqfd/9Bh3vqeWxCB5996jNtf8GDtX5HOxGkTp02cdvG2ic+Y+OwZvkfrezhx6giXz4TZrOeaK07j7H61uvSLz7nhXq6/8kFEhueOWEMKMf+qBqCzOwswKzYWKy4/EhFazuzMEBFOnF5oZM2oeZe4/fkP8MGPXcNDjyhP/tHHb+HsdjF0TXw28dnFw2fnNTB5r2BRTl7W5hDiNQxgDCDakRymeY62/3f2NgFSFZoZCUcnr50tKDkmkTLB1JyHpQhWUdQQghahywrQNkOqmhJ6bzJgDVyWx3keWRVFGMA+1XBam72WdZx6/DhcbWe1QzObZ+4QYog5jE2tzTGOgM6ILoxMWClB4x2+bUmrHqzDuwaTQd5Zx/jrkge171jKFBillL7vcNbgmiHfZooJ7zzzxQI/mw0FXk1ekJiBKYrsDFp0sFiIhQEkxVQZa85tWIWD/OKbb+cjd10LCItZz4uecz9f8qoPI1FzP9r8oC3GQEyaQ1PDJ/XKIQQ+57PuwlhLFMOdn7iW7eWMPnh+6o1fzNVX/h6fdduHsn6gIchPZu+3tkExdTGPF3eVwyDeDOiqJzFGljFiFwbvfD1W0Fyg3hgNEx69jxl0KBlDG9u82aES5/m7PQKfTEzFal/WyObmZvUK2v1dESHWkFwD1ummLKlHjI5TMSCERB8TzXwOxpEUfev60s2cIEREbL3emGiKV0EFUJGaB3bcxrIv/bTG1I1f33c4P6vjcs7lYnfohms2IyXVKWNcPZ9i4GC9L30REdq2Xbtu2VzullvZaJbfx0A/YNBABs5ZmiYXEc2y8N6rZ8Uu0pna42sTp126nLbvneIFwGkiQtM0OfW7etyd3Wr5g/fezE/9yisBmDU9zg030CE6uqDeQe/84HO4656reNWLP8KRw4mmhYQB4wgh0vfKiU3T0DTNk+a0ulecOG3itInTLpo2cdrEac8Ep3W950d/5nNYrs7ttdr4UGtFfO6L7+booS0+fNc1fMFLP8DDjx7hnoeuYmc12zNL1ibmbc/OquXOT1zLXfdcxstu+wAYj5iGpuk4u93yoU9cyf/9M68C4Bu+7IPc8qwTWGuY+R38Pg8EJ06bOG3itIurTXw28dkzfY/W94ZHTl7JoaPHsTaxvdUq/u1q1iTms44veOkHuOrIw8R+73NH1zSawgzD8ZOH6cMZvN3myqNLHj19DafPHAESjzy6UbLb1dY2kS9+1V08cmKzGpje8t6XIimPYeKzic8uIj47r4FpNiuhX/s1nWRqBzWs0zAo0Xii1ltZLBYo+UdHi2dX6/uuTkrTtiCJvg+cPXuWRYL5YsHm5kGwlhBjtiYOoWK7LaIHDx6kaT3eO2JY6aTEhLXri3OfIZ8T7cd5GHX8wwQKqYKVcw6DTvBioeAeQk+33NHiX5LY3tmi9Zq7cWNjg5MnT5NCINpOFTZFYujVI6JtiMCJk6fY2DwIWMQ4ZvMFXYiEkPDNbG0M5xkGKRfxs9awWm7TeMvML/DGYo0FaxHn2Tx8lORa+my9N8Zmq7mthFWvIoIgg27I3kysMUaSJO558Ah/7Ye+iy74LFfhr3/Xmzly4GwOnwRvLQZY7mxTrNUaCg0YyemHcs5YUY+Y3c3UrglN0yIG+hD3HHeuVoitirX+YdYsvOPFXvRKNxGSrdstBuhXS7quI9qoYBM1fNf7lpRiTg2RKkGUcG4R8M4xn8/x3uU6HVSwP5cuD4CDzqvRY5fLZQUS51wNjyxjTAImFW+Z0fkkjSOe83sgyRCTFtNr2wN0sUXQXKZkLwHJZGmMwVi7Fu5eQG5sra+5jY1bW9s7Ozs1JL30t+jh1vYZ2naWgTzV+Tl06BAnT54kpUTbzlmtOpzTEP0y/hjjqB/rIJ3qhkvP572vPwtxFkwoxDzuW9GbQlqlb2W8fd/X/KcmE1bZeO+3oZja+dvEaXuGfMlw2jl8ET/jnAbqERTR78Rg+P4f/NY1T+5v+fI7uP22T9GtloTQ876P3sov/O4X1c+PnTzIn/17f5l/9BfewOe+5G6M9YgkVsslp8+e4fIjl2cvqCfPabuVdeK0idMmTrvw28Rpe4Y8cdrTxGmP1b7g5R/lda/6IADz1mAN/P0//3MgS646/Ai3XP0JfvjnvpOd1WLte7dcf5z//pvfyj/9sddx7MRBlquWv/RD3w8Gbrz2JP/uH/4s/+hffxnv+8j19Tu/9Jsv4ld/5wUA/M3v+S1e+7kf39uhidMmTps47aJqE5/tGfLEZ0/zPZpgcc7jneEPP3Aj/8uPfAVdv/e54XVXPspf/OY3gixJ8fzPHY1V48LDxxo+dc8hDh88hHNN7icaIQT0acDurZ2Wf/xvv3jNieMHvuuneOjRo/yHX/iaic8mPruo+OwxI5hE1BvWWpvzHg8WMbVKF8uaWjeFYj0+f/iUKW4JdcnL2gP7cYtRH804q2AiYbDU9V2nIOMa/Gxez1Em21pbhV8Ea4whhkhKEZ/D30rY2uDhMPTr8bbxeI1Brdijha6TbavM2uxx3DYtfeggDH333lelsFYLA+qQIkm07zEEfC1oaLHGZzjVom5WoeEJbWpE1NJvgRh65n6Gs5blcoflakkMOT+r8RirxeCMycXSUBAyee51LspP9X4omUMRDSUstaAGQLGses+YUb3tMfSkmGh9o/lVRb9rDZq71ZgaFipJstFpeO2dKwWPOj+Pf5rHZ1n/YvZAKa8CBmO9G3ucOO8z0BqQROg6YtCNiW/8MG+ixKwWbaOgmhIhBJWbs/V6JSxYQplPqT93W50LCOpPqdbx8ZopfaghkjKkoDDZY0TxIH+W+yG5z300YBusnYFtIDnAYWwD5NBr0byfknWkAFgBzDEwl/lSiDF7iGitrxSyk5wrVDQkuNXfUxJ2dnYy2WjBQpPJwxgNay2fpQTGDOPT8e+/tnb3o+QpPXnyJIcOHVojrfL9QiRjvRkfU17OuZob9VzHT+3cbeI0mDjtmeU0azTi9j+/4bO554HDpATHT24So2Pednz9a+/g1hsepvU9xIAzkVuvu4+v+4Lf4r+94wvpQgsYur7hZ9/8Sn7nD1+Q+2IIMWDMkr/5Xe/Ce+3zxGkTp02c9v+dNnEaTJz29HPaOSceMCbxZ/7Ue7nl+gdxVtPuOKsPtFofiBGwwqINfMuXvIs+Os5uz/iVt342r3nFR3n+sx+kbSLGDA/MSgTvA8cO8U/+w5fwx/dcQYijBz7JEnNq2V/+7RdzzwNH+Y6vffd+vWPitInTdreJ0y7MNvEZTHz2zN2jPXpqk9Nn5lhj+Ik3vJKP3HUVXb/30fgXvuzjvOCWewc+S4/13FHYWa6IIeJsgzGeK452LOaJex88oH3bdY1ZG/iGL/8Qv/+em/jkvZcB8Mu/+wXsrNrxjDPx2cRnu9uFyGfnNTCNO15OJPU9/W9/5SkL7PGBzAD6u683srYJlTyiDLkGY0rEpNY+lxV3rBBFWdb7pouFmCpQrudtHo3n8Qhoz9hzUbsRyA4L1Y7kphPnG8/OznbNv6geHB5rhhDh4SwJSYEUQy4U2GJyzlrBgNFCbIhe3+4el0HfkyGsdn0uyPiVkBR1IRphtVrSdSuMsSxmc8QUrwF9FZbX/g75P8t8VN+BDABVBvn6KSWOPXqQex+6vPZl3nZccfQMlpgBRrBmIAkdTjEyFVDVawwgv/9cjd+XuskoG48CZHu/PJ7TvdqRZ2nXQh/LuFjNy/mtdXgv9EZzgA6FJovc1lsB5XE4pLGmzoExRkl613p6PG0cRlks27st1WUjp+/bdf1mfc2LQEwO61qsm5GMy3rjqo4UuRtjNHtU3gRViY7mYN36btbe293G4KhjUlmmFBEZSLTrOspKUfBUTx8whJCwVvVht1fUfhb8dUPp0Mp3i4fG+P1BVsP5hn4Px47HMia+6YblybWJ0x5fu5g4rW5A98wFnzFOA+hjyyc+dQW/985b+finrqz9OrixwzVXnOKVL7oL79Iap11++BQHbjvJB+9+AQ89elRzeQN3fOTmPXM0b3u+/ks+zU3Xn+HIwVLE8/Fz2q4Z3y09fXfitHzdidOKPGHitAupTZz2+NrEaU+O084l43Kf9rLnf5LNxTbIue/TvDO8/Pl3I2I4dXbBez/6bF5y273c+qxHAMeVR0+xXDlObw21nbZ2Zrz5959/3jl9+NGD3PvQ0XN8OnHaxGkTp11sbeKzx9cmPvuT3aOF4ImxYTHvePDhQzz48HqNQGsS11xxks+69R6ef/P99P3je+5oUGOZYGgajSxpm8R8FitnSP6/SMRa4bqrTrMx7+v1H3jkckIYR1NNfDbx2cXBZ+c1MIUQcN7iR/kukRIilcoa1QtaDVWNSRePy+AD6Gf5uLGilj4W8MsjroBYCspZ64imryFj3XLFcmeH5XLJYrHJfDZnvlgoEeR+rFarNWGWc9aJNxb1lpJc0AqM1dA06xLnCrIdj2P3zyqjfH5kKKJlgD5GnFGlsdbUfKfOObrVsoaoHTqwiXe5ONgqQNKiXjH3N4SO2Hf6My1oZzMOHDzMmTNbzNqGtp0jUTTnqDV7+tf4RvPLRp0rtTfp503TQIparE4SknpiH9jeOsNqtWK+scnBo5cThUwmDsSMQKIUQCtkYsEaSjrTulijhpOSKSCEwH/55c/j19/6Esoifs5ND/IdX/O7SBJsJsUUtV+kBCli0IJ2rjHVCm6sFp1zzmKsIcW9QJBSqmGIzthMbnrpskkp+qMAka3IJT9lLqw+6MSQ73ZnZwdr9bgQwppelLmICLELugYksb29DUnq90ru3iiQJGKsxThL6Dp2YY7K2ag+GasgauyYeAc9HYOotUqM1lliCGxvb2v+3HztrusqcFUdEiHGYulXgYUQyNsqyqIWEVIU+uSYNTOsX9D1hmQcYEegHnVDYQw51rh6KpUXDMX+dpNmqTlSLPFjomjbFucdWEvXLWlnMzY25sSoKaSM0dDcWasPb7uuz7qRNx690LYO5x1NMyOELhNFQsPPh7B0Y1T/tra22NzcRESLFrZtq7mXgSuvvLJ6SZTxjL0BChGshbOS1rwGSq7aIpvHS+JT0zZx2v7tUuA0m9NqGGsxo7XxmeS01nseOn4Z3/N3v2WPzF91+yf4ys9/n2Jq2MtpG3PhL33TG/nVt72S337PS845d8uu4S//0DfyN77zLXzNF3/oCXNavcndhSUTp8HEaROnXeht4rT928RpTy2nnas92fu0wwd3+B/+7K/StK163cfEf/fVv8Xb3/88fvn3XnWemd3bvvcb38mXvvpj2eN34rSJ0yZOu1jbxGf7t4nPnp57tGuuOMMN12xhgf/5r9zPW/7wVv7+D/+ZKvfNxYr/4c/+N7wNpPDEnjt6p6n3mtkcSYn7H97I+B5weNUdIffTsLNs+T//4xcQ4qAJf+X1v8CDj1zGv/25r5v4bOKzi4rPHlcE035NH1JILSinykD+afKcD1audUU/93nLR2XCQghYq5tWvCphiDrYjY0NZvM5TdtomKLzSASJMix6MyjOuBU7ekoa7heTPpRRoBj6bY2p1tOUtMjXmDD2s+QpIY6tgkpkOzs7zNsG7xw211CIMRJCR+MtfR8xkmi9p4T+WRTINKRPUwiEfslqtQRgsbGgmc3oY8I3LbZpMc4jISjZYIhpyK2YUmK5XFbCAUg1H60uYiMRkaiF9ozFO8uBAxt0IWFdQ4wCTonMyGD1LlKl0uRYNvtbPFNKbG85/u4/+zY+df/l9bhv/rJ38uwbHiDGoA/LJBEj2EIUKRFDGAGu4JLLSqiF1GOIhNhx9uwW3/ylv8X7P/4c3nbHiwH4qV97GXd8+Dp+8Pt/nfIUzliDEbsGVmU+ndMcnY1v9uiSHkP9zlgnCtGvvW9UrslEDc2Mkfl8zskTJwh94ODBg3oNUzwwnIJKMZRl3ZrP5+wsd7DO08znbMxbYgbY3fLeDyRFwBipehhjVHAsJDOyipe+j/0YywZPRG8sxRQvnkgfhT5COz+ImJaQHK5dEAIIFmc9EDGkwTshpbW1NpZ/0c9xGGt5Fd0eb+ZKGK/6O4BtmywS3VxoQUANcdfxWozxeYOouYA3NsqaiZzdPkvblpD3XZNfH+IOfRn3p+hBkfH4QW/Rj2FO9ORd12GMkvBujJnNZnmtxnr8dAPzJ28Tp13cnBZH9YaK595nitOkbJDtOI9zkW/iu7/mt7nuykeJMTwmp33h7R/m9ud+miSGX37rZ3P3/Vfvc1XVwYFnngCnNfsXbZ84beK0idMu7jZx2sRpTwWn/fbbb+VnfuUlrDrt3+WHz/LnvvadSArM250nfZ9mdt2nNe2MFz/3Hq69aof/+Iuv4QtfcTc3XH2K//zLrxi0wQjf/Y1/yEfvupLff+/NAPz4G17JL/7mixGEv/Rtvzpx2sRpE6ddgm3is4nPnup7tJ/81c/mne9/bj3y9Na8HvfKF36Mz7/9w9hcouOJPnds2hnGWCT13PqsUxw/dYhTZzaAUZSQMdVYszHv+cvf8Qe86feex0fuugqA/+tnv5Y+7P+ofuKzic8uZD570gYmMBgjVcPH/+dP9ecIKAdAoD4M0WPGijhYPkteTWsMxvkBPozBOsfCL2i8r14LYgzJCCkLbHw9Rt8tXVUlzx/skpPJ/wlmWMRPTDysharuUl4ok6+k5b3FoN4DIQSsydbEJEo2eSEkIqvVktB3bG4eUHC3DozFeiWVLEa9phFiHHk0FJmMF74MSrYmrZSIsaeE8TVti2saEtRrjqSV5Tt+b/zav0kS+pD42CevXStsd/XlJ7ny6Gli0kKxZH3AmKw8SV+7+i+SKKGGMfSEGJEkXH/VSe59eKue/8FHDuWc35ZBe02ec8n6mZTI8neaRj0wdodSGlib5zU5j8e6iwTW5bYup7IWhhBawGQPm6QpA5umoVuF9U2VVNw5t8zXwEFIYSgOWEB+vV/jdaPruejQ7uMgk3cCweH9nCQOLbDnsBaS5HNg186R8vyNAX6/jdT4OuW4Uk+rAOsQWppzNSOQrf/lu7oftYSgGyhjLG07q2HwzpVr6wZAPf4H2Y2B2eZQ6bHHw9jav5vs68Yxt91hzSI59HdEFkDGCr93Xqb2FLSJ0x5DPBc4p43G/RnmtHKjafdZn8bA9Vc/yuHNs4+L044cPMPRQ1skLM+/6VNA4O77h2LnziZe/dK7ue6qU3tuuh4PpznvdvVv4rS1z5k4beK0i7VNnPYY4pk47TycJhjeeccNvPN9z+Kjd2l612dde4Ln3HiMm647Toq9Fn9/iu7TNLKp4+CB49z+/Pu47eaHueaKs7zsBffz8U9dgbXCrTce55YbHgWBk2fm3PmJq7n/4cPc//BhAN71wds4cXpzbRwTp60fBxOnTZx2MbaJzx5DPBOfPc57tFUn/P57b+OOO2/iw3987e4jeOGz7+V5N9/H9Vc9gqFE0jxxPiu40AdPSrbq3dB3U0VirfCsa06wuVjVz+99SA1NjV83vEx8NvHZhc5nT9zAZEwOgxtAcgwcA5DsP0nl85RGIKufAMW6mcM4JeG9xxrBOIvN3fXeM5/PcE2DdT6H13nCSFDr10oDco8+Gx+z1tdRv3T95fNk0Ho8wq0qYoqsZO2TlC3hfd/T9x2Nd7SNFiA7e/YsBw4cxllHEvWkiFEIoSfEFdtbWwjCtTdcTjQNGId1HisGkiGGhEkaKhcRUuNYrVY455jP52zMF9WCrcMdlMsaW0k6xcBqp6czQt/3zDYP49sFgsE6Ley3pgNkoJexCMcAtpcqQxSWS2HMuM4mEA1lbZwDBMW2hDUOMZAAM9oUDOHTxTMg0fc9MQnz+Zw2e3DsbmURyajPIuSCdhFvh4XpvR8KKOb8o4LJDxFt7meqpLI7dHJsNS5jLeDQkx/25fM7PV2G+lR10FlHF3RttLOGVlpwrhalE5F9xLxX7mNi7PseyWut5EDdndNzDPQFVM3oXFiFw5gt5km0EKNvFoRcjDeJwTmPEUMf1FNljBMqK6nzUrwCyrjGJCqSQ+lzaK1zrsobFCNKKqiYIi4XKFQPmtLzhDGe1Y4W7JzN5hw8cIjt7R22V1vECN7reWezBatuG2NywUxUH1NKWOPrPIpI7UcJKx17HpUw1N2bvopTY1nnj8t5U0o17LVcY7qBeQraxGmXBKc5mz2KRpvjzwSnpeyZ5X2z5pW4JsuKeU+M017zsju47soH+PFfvSYXODe0beBvfvdvsTGPhGgpztuPl9MKJpV+TZw2cdrEaRd5mzht4rQ/IaeJGPre8iM//mruf+gQem8mfMHL7+bzXvpJUlC8eKrv06z1GAvf/lV/SKm18V1f9x7+9X99FbM28Oe+9j2A4bNue5CbbniU/+VffSkhDvd2/+VXXgMMD+QmThvONXHaxGkXZZv4bOKzp/i546nTnn/+H7+KZdfukqHgfeSbX/d2Dm6cVa74k/IZjk8/cDTz2WBsK45/RQ+TlOi888zxxGf1XBOfXdh89iQjmAbhY8DmfIMYo8o5WtsV6Op3y6BLmNUwsYhgRMPVUg5Rnc8X7GyfwaBhW0Xw1jhm7YJl12GsLvu+DxrGKALWgLOINUQkW/VHfYL8nsfZBucEqlKlOs6Sn7H0cSxQOQ8KSP1urA92Zt7ROKNRnhIVuPue0Af8vKGZz7HGceb0o8zaDvEeRMNlV33H1tYZtrbOcODgATY2NhGB+WxBH4SzW1scPnS5kkfXIzFy5sxZcIbLr70a7z1d13H27FkWs/l6X2VY+DFErNEQQkyiD3qu1arn8GXXMNvYRFxDl/Nu2rX0P2b4XUaSEJV/yvsBIxmoBX7jbZ/Fv//pV7Ds9KnYlZed4a+8/ndZzLc0/DgDrreay1RiRNfDiKCQtTR/JUenMRbvDW0pXuj2JBHNxd+KxV5PWRZwioGEwxpDzABePBkM6wvLGK1p0ferHI64/tCu6Ev16LCWEPochm3Y2tpiNmvxzmuO3w2P8x5vlVBNFq+lWKgjQsvmgYNgHAnY2trCOIezXmU3klEdnK6utbXXhx5rDIuF5gQtYZXj/uctDhiLWMm5TEdpmURXWUqiDz5Ni3WbrPpE285o2pblMpJE50+ibioEC0Z1yDmdOwX7AdjHVvYxsEnp267NozGG2Uw3DTFF+i7gfIMxVokoRGLUcy3mm5qmKm98Hjl+rOZ6bawnhJ4YAzvLrVyoUbHLOkPT6DW6VV/zmK5Wq9rHtm3rvO9uxethIK51z4JCpuVVjmuapp6/HDO1p6JNnAYTpz0VnEZU76z5Yq5eX/utUXnynPbs64/zg/+/N/CvfvrLeOTkQXaWDd/19/47jBFuuPo0//Lv/wpPiNPOsUmcOG3itInTLuY2cRpMnPZkOe2PPnw1/9M/fw2nzmg/5rPA3/6+t3BoY6tymrVWOe8pvE8z+T6rF3XgE8AB3/X17xk4LWOppL3Frc/XJk6bOG3itIu1TXwGE589Hc8dx+35t9zH67/yD9iYbT+tfLaO+oXPcn2g81mYRm3is4nPLmQ+O6+BSTuiQKeWQ7X8YQ3dagfnHE3T6kP6LPgoqFXb6vdMLjZdIKBOGGuwQLEKYwzL1RI1MhoFw2x1FKv5GVMcBBBDAhIhJIxxCEKUBCWUsr5MfQlZmUQD6LxviCnR9V0du5DQQnIMPRUFlTVlG01i+V3KqGT0U5O0YnFYgRgDEnvUWt7g/BxjG0QM1qu1WBBmrde8n7nIoLceaEjSYO2cFFHF7QOGhEgkpkCSxGxjjnEOyRbcpkGVPcaReSQNIGK1AJtBNCS238Ebg3EG61qsa7AmA0/OJVlnTzLPZxkYM8CwykAU4JOm6LFYfv6NL+Cd77uBk2c0ncFnPed+XvDsB9nc2MJbAEeMQc+fhIjmik0pIjFhEuD0mjGmnOrHIJI0RFXAYNUjwOTUDbva2FRkylyKkJEMRL0KSuG0ccjraOL1cZ4oOVo7AKrKQiogFr0xYjBiMdktwhqL8x7nLF3foV4RCZMSxuRcuAz9Symxs1yx4RqMFRJkcB/mb+jeENJbsquS+xtjwhun8577YgTKFVcr9TJoNg8MohHB0GS9FoQARjdoMQH2AJgNsAcxtiWJzgFGN3IWk6Oci4YMpGQMeJ/DrUd9L3+boqwYjFd9FmNIAsb6emBCN0jWOc0dGg2gRS4TEZtDTmNcoZvORN/tkGLIc2wQgkb6ZW8eBdtBPbCZAHN4uBKRrSDunK8APVIVjBlAeuwdMUTTjfMt6xfqGexAcgWTH99WZGowcdqlzGmp3qDJZ4TTHJYQOpCEs5oy6K13PJu333FDlefVl53i5S+4m3kbnjSnOSvM222+6OUf4cN3X8edd1/PidMbAMRo+bFfeAVf8ZqPcc0Vpx8Xp+1rYJo4beK0idMuijZx2sRpTzWn/frvPZt3vf9aTpxSXrnx2uO8+Lb7uOzwDpaI+k1EfH7A+1TepwG5YLqp92eCZWMREJ1cAO6693I++LGrSWmdv/70n7qT01sz3n7HzesLZeK0idMmTrvg28RnE5893fdoN1/3EF/xmg/x8296xVr066tuv4vn33w/BzaWWlLpaeSzKofSYxH63vA773o+Dxw7XPs08dnEZxcrnz2GgUk9XQ2GFNV6ZjIAr/rI3HmcogdiDMWy6JyrCwpjh04yyl8ougDq5IlOnzUQ+o7GeQzQdx0pRbU6iiXGRIhBgVIUsE1UwLe+wZhEkohzVi3H+ZUvpH0VTRcjSRe3cx5nIyFs18lUSBdVflOXBmZtNGXy1sG+QIaIaOFTAElIDBjxGARJAYkRa0wG+hmCFjB0bUMfeyAxmznNcR0Cse9ZbGwgeEJyQEOIib4PhFVHjGr1TCkiBmabG2p9T2CMo2ksTdNQwgxL9h5rQKxCgDMm57EMLLstFu2c1s9wjcdan62/YE3xDlkH+aKkRTlFiie1npes3M4YfuHNL+K+Bw9V2b3oOQ/wqtvvIsQ0LNrslpIkQUpYEpIikrKRTDIJxITL4ZoxBvrQadit9Xm8idb1HNpccnprBhhitDxyYpMjh5Y0LqLMoWBIllGZ2/HDuLLQ6pyX+c1p/fQYYS3X7p7laLA4vZZkHcw6m0RIkrApITYqFYkgxOy5kcMWV4F2FrEOxFi88wUuR0QO45yxxavDoHOVYsA7r+s8A7cRg8WSJNGvOmKMHNgoQF8MdS4fnzBWgKgbKLEYt8ggX4AeJGRDpuRw3RI5JiCSAS2/1tIWCjlXqambzhJmLFYwxmGyZ4hzTdVByYqt4bKGPmTPA5tzK1vdpMXQZT1K9J1e21mD9ZqTOOU5dc5mL4Kyzk12hzEZ6FNeByXHrq3eCeMNYQF259xasbwSWluOHRPEuidF7QApahHNfR4PT+0cbeK0S5fTJI0w5DPBaTY/aBPBOcejpzb4nXc+h19/220AbC5W3Hz9MV73eR8ESXqz+aQ5LfL5t3+MWRO49+HLK6edOrvgP73hFbzshQ9w7RVnHx+n7Tf/ZX4nTps4beK0C7pNnDZx2lPNab/xllt41/uvq/K68bpHee3nfQzvGlLMeCRPz32aw4JVXUUU+8XUxzv1uesn772c33vXc2sfnY0cOrDN1772j/jU/ZfteSA3cdrEaROnXfht4rOJz57ue7Tn3XQ/N1x5jDf85kt3GZg+yc3XPUzon3k+M0DfW97zRzdzZmtW+/T5L7+bBx85NPHZxGcXHZ+d18A0DrGroGpKx9NamJRa9qkd3N3pvSdXVVe1HJ0jp21RAWSwzWRTWrfq6LuOQ0c8s5nBWD+6hl7be1/zI5Z+jAG5WgWzpdY6uybQfZtQ+zOWy+7f9UCFD2sMMU9K13XMZw1gWS5XiKgXQTNfUFDGGMOBAwfY2d6i71YcO3aWg4sFxmgo4cGDB1klRxJha2sL6x1bW1scO/YI1jsNH7WOy6+8gi6E0ZypbERQ5TzHGK2x9CmwWm6zWq5orKdxLfP5XC3VBsRoyFxEQyTHORzH+SjX51+QEi2F5Hye631IKRJjoHEeYimMlu3+dTORi7JBPVcBUYkJQS3wse8xzawwEMbA53zWPdz+vEf4B//XV7DqG+576BDf9je+jf/z7/waL3n+A8Xwv7bIil52XcdisVBwiBHjLLtJX0S9wr0bFq1ztsqnacpyU6Kx1pJSIKQuF4Vf0DQe71eEPmCweN9m70Adp/c+AwQcPnRITyVVfciUpXMgw9oa4QNCzpMbIzFGmrbFmsGKXbx09pOF6pAlxR5r8vqyDX1MhARRHLNmAaYlYXO4r56naRq6TsPJ65zJUPhurA4F7FOCGDvadkbbtvTdNs7rWo2AMw3tbMZstsFqtaoRZiIRY1PFqxAlh6DGoUZkHq+1DoPFGM0FG2JgtVoxm81qCOp8Ps8hyKrjpSAf6MZ2fL4x7uxuKvN1DCnnKUX0ytxYO2zMdudOLe+V4n77YuzU9rSJ03a1S4jTzrXjeaY4rQ89oe/wztKHA7z+b3wbWztDfu/v+dq3ccv1D/F0clrVBQtYeVycdr42cdrEaROnXdht4rRdbeK0PzGnlcd4pTlrK9bren/67tPyRGExpPK0x8oI8PeXyY3XHuMf//Uf47KjB/nU/ZftfxATp02cNnHahdwmPtvVJj57Wu7RmrZld4dSiLk2/WeAzwxsLHr+9p//dX76jZ/NHR++EYC/9y++EtDMFfuqx8RnE59doHz2mAam4qmUUsINiFEFWtOFZcwvgy4L/Fxg6HIIK2QVzoszllAxY3OomSGlkPOgerUIO0fMgNI0TQX6ND7/Pv0Y/60DpBLKrpHvKw9RpD8nEYyvUxSshMGViem6Hmd1MhcbmxirFkWrCVIBqZMnSei6Dru5mSc8sr29Tbt5hLZZKE4Zw8bGBtdcew2bBw+q0hpbFVTna0AyYxRkyOC71n8RYuiRELFiOHz4EK1vmbVzNhcHwVgNRXSm3oCMx7yvzMbyQAH3E5++nB/72Zdw/KSmX2ibwLd+xXu46dqHMAjWgkTttUUXR5GNeiJkC6yzeaEa2llDjIGQX6oXVoeY8g2QkZw3d5jnGI1+vJY+rxBW+bsUpNNRS0q14Nr6WHMBODsUrKuqZgYi1/NKMaaTcmFE3XQoIIY+EkKibaWO0Vqd174PFVBiimvzWAjdlM1Z/jk2h0kSJCaICW9dJs9UQada40d/Q8mRqmHhKYJ3As6h+U8NgsO5BcZ4MG7MwQOg5XlMSUabAsNYhTTUM/crxarTiMmYozl4rfWkCAaPcw3GqOx17ScElZMkwTuDuj5o6GkBx0oySYtZimhe3bHulp8F6Esfd38+nmcoRL5OmPo9/dzlIokAy+WyAn3B1d3nGstnvJl+rDU4taFNnLbeLiVO28/L75nitJQSq9WSFAN/8IFbefPvv5ztpaaeOHJwm2983fu47spTuJxH+unjtNzHJI+b087nijRx2sRpE6dd2G3itPU2cdqT47Q3/u6tvP09z0JE+MQni4FG+Jov/kOee9NJTTUUQ/bifvru00p/MKliv2X9QRDAi297gM3Fkp/79dtJyfLwo0f4F//l6/hLf/a9E6dNnFb7OHHaxdUmPltvE589dc8df/xnX1LxXsTT9Tpf5R7t6svPfMb4zGDo+pafe9PtfPK+y+v73/tN7+LRUxv80m+96Bxjnfhs4rMLk8/Oa2AqIFhPMsIGDcUaWelHAx0D+jlOnC3yI0ElzV8ZQ8SVkFhrKuhZazV/YCxhXCr4pmlrKKoKMeWFYfdMxNqEFIvrrq4Zc24ruxpmdstn9L3x+wYwojkgK9hpeKYkBV/nGzCWkC2rpciYhgYXzzDAqEeCs5ad5ZIW8N6hiRHVAtk2M3zbYnMO2JiLn5p8XUMGfRn9HI+7jE+0yJxar+vlaWctUQSTBOstUEIO9yrZuf4WSXzq/qO8785reeu7nw3AoQM7XHflKV5y2z142+XxF1IVjOj3Cl2V+R36r+NxWbYxRiQlfOvzdKkHQznWkLjpukd54Nhhzmxr0cG77z3K0UPb3HTdCUoYJJSwbIhx5C3BXp0ZjXRtI6G/Sxb58F6RuDHDdzSsUUnYWkeSiElD4ceiXl3XrV9jVw/yTGOggvywPpXY4ui8frQh2m2x3g36eozo95OQrNJHEvUgSDisn4NxlDBgGHQtpbTW393rR0PdC+YMI1JL/3oYvLWe2XyD1SrgXKMFM22AEs5qfZZBQlLxTop7ST3PtTBY58nYM+7fGEPGcinvnQv0y2dja796mAy4Nn6/eCmMgX083wXXyud79Wpq52sTp+3u97oMLmpO2z3uMr6nmdO0C5oi4ROfvor3fOhGfv+9twBw+ZEz3HLdcV72vHtIMWp6jKeY0/YjpCfGaed5Gjdx2sRpE6dd0G3itN39XpfBxGnn5zQRw4c/cQXvuuN6fu8dN+255m23PMi1V+wAPqeATRjz9N6nFZ0uOlQexkgep8Fw1WVnWcxW/D9vfokWG9+Z844PvIDv+oYPT5w2cdrEaRdpm/hsd7/XZTDx2ZN/7viW/Nxx3J6Je7THw2eCATGc2VrQhyE65fDBJV1/vkf1E59NfHZh8tnjMjANHc9FqETw3tVw0BDGoGCqwNY8DUafFaEWVS1AX0LvrfXZ+TaR0hD66pyj75bEFIkxqfWtmWcAjXQhklgPjd09CUX4BaTqMWkA42JtPxfg7xbq+sIeLHsSIyU803vLbDZTSz1CM9NCeIitRdIUYCOr2LO1vQWSOHjwIGkUFicihL6nmSWOHj3KqdOnNLzXu4HcJNG2LV3fE5PmOtWxjgiZfTDfGFrv6JaRs2fP8OCD93P44GE4ZDCXObpVwGJYuAXJJECtu2NFNWY/oekcxBj4kR97Je/+wLPqJy97/r18w+vuqFZcq8iLBL2RyV/HGHDGEPLfmZUyiBskKblZa3CNx9niwF3IU1vbBP7Ct7yVn/uNl/MH77sFMPzwf/4CXv2yT/JDf/1NVR7DmFjTGWMG4Nrbip5qkUrVE1PHX0GboiPDxuTQocO1yJx3LYjmAFZLuq6hGCNnzpyhmbX4pqXve3JVyjVVNWjIcRIlS0kpF+NzWlEx5jzAxqieZPLYrddj4C+h6ClJDjUtsjVEMSRpwc1wzSECTte4GY9VvSWQAaDGuqJrWKqV3OdCes5qbtKU1DPD+wbB0rQzrrjqak6dOovF4puWJioRhpCYz1ussxgS5E1L13WsVjvMFy0aQisDRo3G7pzDuHXwDSHUeRiD/bj/pcUYR2Ma8p0qqOsmrW1bVqtVHe98Pqdpmvr93SQz9i7YDfbj0OKpnb9NnHY+uQztYuS0fdszwGmC5pROKfJPfvRreeDY0XrUl3zOnbz69k+A2KeN0zTZeXlf333CnHZuAU6cNnHaxGkXcJs47XxyGdrEaftzWtc7fuCHXseZrXbXhZRjNE+/evTHGDDPwH0aCEZiTr2UH19pkQ5wBW/PIR47cdrEaROnXaxt4rPzyWVoE589+eeO5XN4hu7RHiefzWeBv/j63+enfvXlvOdO7e8//Q9fBEDjz4UdE59NfHZh8tl5DUxjy3zJ72fr5k1Be8gPqJa+/QB+3GERXXTOe0RituAalsud0ST7wQJuDH3fY4HgHFtb21WQXZdzSLoG7z3LPmCtwTq3R2n3/XvtvZHFNAniNLxvzXMAyaCzbr3bDW6qCJEUAzNn6GKg77r8WcJYx2yxQC2uBbd0IYTQIyly6NAhnDEYEjtnz9I2no2NDVZdR9O2eO/Z3t5GBGwOeVO5GGqoKmSLKNmiLqWDdR70s9px+q4j9CskBQ5sLJjP5nivhV3bZoYYx3LV0c4byjd3z/XYiwPyYkdDM/fygACJGDoFIowWV8vF9ZKo94KQCKiXuDHa6Zgi3uumIEmi7zq893jjQTTMtV4hBLQgnMU0ni9/1Z286NkP8aO/8HmA4X0fvo7v+8Fv4n//W2/k6OFtTAF9hrDsspiapslW8gLj+ff6IG+wDhdZ6GukRyQMkb7v2NnZ4dChQ3VTY4zmwBRJ9P2K+XxBCXc+dOhQznlrSVGw2LpxsjYXtjSGVEKVZSA5Sanm5PXe46zNxSwHoChr0BiT+6B6qVig7zvnMVYQKwTpSdJg3QLsgsQCoSGiBf8stoKi957QRwom6bWUFIEctukoRTsNJuu1ekyWvhjrWHUdx44fJ0aYz+bMGo+sOg4cPkTbNKxWOwhRb4qD0DiXgTQxhL+Sc5oq8S8WC/q+J6aoHhJZplCOS3XDOSZG51yVX8GvsYdA0zQjD4Fhw1t0o4B3wbVaEHO0QS15a8sx5Vplk70f3k5tb5s47RLmtP1uY54BTktR5UJcTxtQJIykp5XTjPT81de/mTe//cW8/2M31is/Xk6rm8Ryw8bEaROnTZx2sbSJ0yZO+5NwWuMD/+p/eSM/+YYX8+a33Fo/f9a1j/ANX/ourjy6wjnF2mf6Pk2jDVx+nJe5hBE/2fU5BdQLf+K0idMmTrso28RnE589/c8d1dn8+77hTVxzxdbTfo/2ePls2bf8X//18zh+YrP28+99/2/y8PED/Pgvfs7EZxOfXVR8dl4Dk06GWkB18qkXHg+sgHdV/tHPsfW+NMnnKO/UCcxC00U3vp4ujgKg1hjwo+PytcrSW/MWYB3kd/enLgKoxOX8IJYS+ne+tttboXY6Jc0VGSNd15FiwBqL8x7nPIkMAgZSLMXPEm3j8c6pB4DkBTpaLD7LqEyyMUYBfdynlLKFeZcHgewdjVS6EfrVkhgCJjtaOO/wTYMgGGMRNDRRJFuw90NuBmXWedGQU8OQk3RNdinV/oo+sSOjt85XJiZBPefEmEq4BiAJMQwbDt2MDDMIBpFIErXeO2c5vLmFvUb43M+6mzvvvpYzWwv++J7LefPvP5eXvuB+brvl4bV+7g4p3J8oz03+e8YsgnWSAbyAiAcU+LzzxBQUZCWunbtcS71h8nXLRsVAjgDPH2UZZTKUmKrcqsfLyEtl9xzu7rPmI3VYF0kiWkgygbENWmCvQXIuWPUWkjWZFBwZNoUm61q5Vl6zCZIVrYVYN5KZLDNAbm9v41xDcF4L1CWtgxJCj7U63pSxS3MDp7pJtXbAhpSSFhscopL3zGPR5bpZzeA8ls34s/3OM37tlvEY2HeHqY7PUc6/G9vGBDC187eJ0yZOeyo5LcTAseNz3nHHc9lezvbpiTztnHbNZSd50bPvQ8TwgY/fABi2ly133XsZfdh/EzjmtHO2idMmTps47YJvE6dNnPZkOc2YxM3POsWRQ8u1z2ZN4NorT9L4Vh8Ofgbu0zCpjtoYVxVBkFLP/om3idMmTps47YJuE59NfPZ0Pne8+vKTPOfGB7nuyuPMWxAuDD5LYnjw2CFCHEDuhqtPF3Y4hyJMfDbx2YXJZ49pYCoWTJvDKctJS6jXbhAfW8fOqexZ6VQjhRQjIQTapsH7ZgTgg+DKAhMRnPNoKlBXPZiqgM1ehS25BccTZcuCrWyi34sxVuUozYyO2V9GsjYhVOUWEKHve1bLFRJ7NjY3adqZFkqLIAbECF3f1wJ58/lci6lKqrAScuhm27a5mJhRhc6EV/v2ODfcBYBBF5Uu4sRquUMMPd4qUHvnaJtmIGVjsEat1+xS6vO1JIZu5UhpoFbvI9bmtAsZdkUSxARRMsMPcjRGvSZSUpk5Z0lRvQtC1zGbq2ysG4OGyV9PSNRTO98gJnBgcZZv/fJ38a9/5rWc2VogYvi3P/15vP6r7+C5Nz+U9UTb2DqcUiqnzSMpY5Q9oYmDbqzLQ8NWB4LVwnoGZ3VJ+qZB+sRqVeSj66/ve6wIzjsaP6tn0HMOOmiVXrGmcLuGmIYQ6kZBUlrbdO2ey7FXgW74VKYF5DQtVCIJWBowM5K0iHVI0jDtsSFORDSPKRDjXnIZyygltPgjOTw1y1BJ2RBF2N7ZYT6H3jqWSwX4M2d2iDFwxZWXEWOP5Jy9y9UKYwTv1VNj99yIiUAcJtSwB8jHoFzm1xidk0IY+22CixzVw2od9Mu5x6GmKaVahK94eTjnqgfD+MFwCaOdblwef5s47dLktN3z8XRzmh6f2FlGPnr35fzLn/jqtU8bH7EmPWOc9soX3sWN1z6aDUx7ZVFuBvZwWvECH9+w5mMmTps4beK0C79NnDZx2pPnNMOqc4Q4PGBwLtI0EefLw6D4GbtPE9EHjNYpVirP2MoTTRNJyZByGpw+NoToy6RPnDZx2sRpF1mb+Gzis6fyHm13u+2mB/i6176b2Ac0euXC4bPdbbpHU5lOfHbx8dljGpi8bwDZM2jn/JqwxgqwO5ff+VqMkb7v1UKec6tqGB9UMymiVsGkijpvGrxzhDT0K0Yt1te4puYUHPd3d0siGAQHGGPX8zbnw6ulNrdMPY8xIslLTBCjFuwYekLoWMxmzOdzfNPQh0DCkpIWP9vZ3mY+n9O2mgfbOQfJICmw2NhgZ3uL06dPM5vP8fOIbbQfPhcfSymRk54+LrmXEbFrIca+xyAaXmcMbdPSzmb184KdUdSqv9/VitLWBYLw0LEDfN//+DVs7Qxz81e+/Te56uijSBqs25ZhbtBHYnVTod4MxStACCmwdXYLi6Ft53jnQYQUItarHmnTonDGoBbpFIl5MWP2C2tWTlKPAgWCptFwaGMMSaqCrGmDMfrQbgwM+wF9+bvrFCCapmE2a0FMJfu2taS8sHd2dlgsNmnbltOnTzPfWODyegkx1CnUG8FECU611uZCgnHoZwEKRfg1KDbGVDApzbmReR00TUYCMYGQouZYbeb42SZiNtkJjuR0BSdjmVlPkFDPqaGxooA9ksdAKhbvGsQOMgsh4NyQdznGSDKGzYMbtO2MGIUzWycBz4GDG1oYMvVECTUUeOZbfN6saghwgKxPurm0lDytXegIXeDAgQMVQDc2Nirolv6WMH09h18bT2nz+Xxt05tSqDg5bm3b1nDYcg5rNWR5uVzW41UXFhUri/5MqRcef5s4bfjOpcdpu3r+NHIakpAY+d9/9M/w7g/cuHb8vO35m3/uFzh8oHtGOS2Gfm/fJdWCsvtxmjXDOtDZHtrEaROnjXUDJk67ENvEacN3Jk57Ypy2s7R89///a3jkxEb97Fu+4q08/9kP0jpPDEEf3HyG79M0rY2OrXDYRtvxd77/N/l/3nQ77//I9QD8j//HV5BkeACssz20idMmThvrBkycdqG1ic+G70x89ie/R9v7jE8uSD6TfZ7XW2une7SJzy5KPjvvp9q5aqfEmPLSglFaUEyjCmXtO3t/r+8N4sBkq38MYWQhH4DCmHyOnBdTUiTFmC3KOmmDRU4tyuW7Y+tdOW9dnCLZiqDDM6bWLKsvsuXW5GuIMY8Zkl8UtYCKAWIfK/E1bYt1vh5jrU5U3/UsFnMa7zBowXBE6jl87n+ImrMxCw5IGKsFvKKk7JVQh1XlvPudMg8FXMvnpoBqErVci1JWBZJylnL9fQh0bBlNec4kpbwYZ8RR6Oes2cHbjhhLYT0dU8rfkaTzbihzJMQU86WF0HUa1uu9kkWGuLzt0P4LiCRsBjHvrJJOeeiWEl/wso/zuS++q/brjg9fz4//wisJoXjJjKy31uyyjw9ytpSN0bo8ishK8TzQAoi6AegzYFjVsXxwyuItOlzWkuYlVQ/CruuysWuvV8Ogk4NF22ZAUm8Li6lh4evX2W2p1jzIQ0iptXakMQ7rGwRHwmKMw1rPUGxwsLgXjx5rVQeKdbxsGJNATEIXekKKiFGPCus9Rk3wmve3afDeEVNgtVoCwsbmBtaBkIixZ7nc0WKEwMZiUbFm2PiVvjkkCSnq+xoK7rOnz8D2Y/Ie45qIrAE0sHZM+Uw3x4qZ5f0ig7Zt1zak3nv1WDG61XHeVw+pjY2NKsuu60YEstdLZ2p728Rply6nldmwoxvEp5PTTp2Z8W9+8gv46N1Xs7MaUuPdfN0DfPmr3sWBjS2cCzyTnJYrzA59T4b/8kuv4B3vv0mHNsK/Maft3yZOmzityHzitAu1TZw2cdqfjNOEM1stfT+6N/Md86bHOZtrKMTP+H0auS6LpJFeIGzMOpwbsGJ72bJcrXPWaOQTp02clmU+cdqF2CY+m/jsqb5H+/T9R2u/Xvs5H+BFt37ywuSzfcZWFWXfNvHZxGdF5hcen53f/GSGwahtULVPUhxNUFkTBSHNvsKoP4VsJTZ1waYsYOtsHZCMOmDy9TV0M4eaZZLJV6gLFmQNi62xOa/oEO6KqAAHy7iAEQ0jq1c2Kh4ztmkPGFCuXX7WBTNcAAT6PpBiAgxNO1OFVZhXUMqeBrO2wVkltRRjvqJatm32FgCDcU4/Ez3GZBRMVWblm7kfgJgqqMJio8kt/S2gnC24YknJIJndiiW/XNPU8Y8Wj+jI6nspgQinzrQ8dHwoWtf4wBVHzuJM9ozL49U+6DwOVtsyp1JJUHJfQ9/jnS7MOpfjSSlDU8TEWaNAX47JC/6lt93Di59zX/3KR+++il/5nReRYgG3zG0D543kLLXvoB42mEG31zpCIZWIYdi4xBiHhW0MYgeg16JwFmTIl1l4tngdrLXR3+OpLjLwGTSNcxiXC91ZmzcMu37aIW9vEYKxBufKbDgED7ZVrxhBQ1GtrUQ7bgXoh8KddlcIuep7n/OaYsA2DudzX60SrfM5bLPkkDUwa1uMEVIK9KFTj4Vs6Z/NZnVdjvwmGEJWix7r5rAQUNmgrXuE7AX73SA/Pq58XsL9i8Vf0yzmzcfI88lYoySccSmJYH32gABmsxkYlWUIYY1QpvY42sRplyynmbLNLxu5p5nT7rrnKD//ppfx0COHat+OHDzLc591H6968YdwNj7jnGZt5PLDZ/Eu5EMN/+0tL+Tt77uJhx45kG8e1jltdNqJ0yZOAyZOu6jaxGkTpz1JTtva9jzw0CaSyoOdxGWHztA2UR91GgGJF8R9mvY95feo16oTvk+bOG3itNImTrtI2sRnE589BfdokDi77Xfdo8HnvOjj3Hr9Axckn+3bpnu0ic8uUj47b4q8iF7MGFGAlUTse3a2tpht6sOKEIUuJHzjccZpCKD1gJDSCETGeTCNwVhL1y0RYL5Y5NBCnQwjiWiERMKkiLeCt+Cd4dChQ9WSqYcnzfHYd2zO51jXYFMOTgwp99/Q5IJqIppfMUZdNNF6UuqJaYUxHSlZJDYQBfFO08sYzesYkwUsyVBBXcFaqiKBekYYSVjRgmBJDO1sg3a2UZWnbR3LrS3NO2oisdNig8YY2qYB0bDc5XLJxsYGzXzBJoaYhL7bwUjE2IYoEWsNrmnAWA3OFMEUBTFFvTWHafEQQGIGnB5LJMQVW1tncNaSorDqA11oibQkW8KVE8akEVEnhJQXTZ4/MVjRRYgIzlv+6y+/jJ/9tc+qGPTsG47xF771d0h9RwgRSYG2bYixyx4gtsqSUqBP1MMgdEtSEmISQoiQVjRtw2ze6iizl0bXB4wpltsh6sigHgnF0dsYAyllMl5vTmDhVaZnzi5ZrWb4dobzM/oYsIC1uoi2t7cJKdI0c6xvMBmYraV6RVhK2G2iJ9A6EKdh1mIi1jlwhhCyTtmW+UbDcvssK+lxLjKbzWialqbxqFOCAvgQqphvFF3OX2qESGIVeqx1GtoJGKzisHfYDEwa0OrA5rydwKafaVhmCBiX5yP1hN4SzRyxGwQOEBJgAs73SDTYJLTGE/PYx4TknKVpfM0hChAk5fB3hykgmb0HjFPdxUDoghYUNIaZm3H11VfTdR0nTpygWy6RnPapdbaGci63tlXXs/4b4+vcG2OwfgBjzZ+q3v1jryQRLQhaiGqc77Rt27VQ0TGoz+dz+r4fPAqMwTivBUMRQoqELoKFWTvDGEMMEeut6nvscDl0N6XEqu+UBK3FWQ3X1zGcy2NzauM2cdqlzGkmT02PNc8sp5X2/d/061x28NHsHfnMc9qVh8/wd77rDfybn/8y7rrv6tqvX/3dF/K299zCz/zT/8xiM61xWvG4CqI9mzht4rSJ0y6eNnHaxGlPltPe+o5n87//u1dXHjtyYIu/+V2/RNtanPOaukhE7xcvhPs0AecMLj94Ktq6/khpaBOnTZxWzjFx2sXRJj6b+OypuEeTFHD7PABXg+WFyWduT2+h67rpHm3is4uSzx6zBtPQdJmMLW02WyKdE6zLVjIBtfoXaxoVwIo1WkSQoD+dc5pHUEqeTLUIG1PCRBMxRQgq/HH+QRistWqlTIgd8hUWK+U4VLVavXOf1kYoQ4hlSgqW5V+ShNXMqSN5DD9LYTxjVJFDH1ntaLicyRbcEpJXWhlL41tSt8Rm5fTe03UdoJbDIu8qK1TpmnmLrcUGE8Y67YO1dbGIKV3Mpmmjx2qyz7wIU6DrOra3tznYzJCR0la8tVnWRuWCLWC83ooMsAYRLcim3gnrniWSAU+LOQZEGijW8hSwyYGox4RxVICczWZqcU5C1/UKZiI1H6zauDMwrJn+FaQkm+Ely6RA+k3XHud7v/73+Ok3fS5nd+Zs7bT83R/+ar7za9/DS553N6vVigMHB0AtlnnQvnV9R4gJ38yrIKqOluNF6ndTHre1lsViQYwha5pFsHjnNSpWBN80pKhzVPSh6I+znhJOXYYpIsSYqv6HEHK+TDey3A/TN7Z+j/tddBDAOkcIBZgNgsUaD65RHIjqzZJSpJSRr541ZvAuskbDYweviNE6zn83TVPXRhlnMgqsBWS1v4aTJ08SgspmDLhlLVe93IUZZR0WTBnWvYZ/ixmOKxiiYcJDIc6xZ0CRYUqp5sMtuZvL+k8SSRJ0I2E1t30felKMLBZzQsybiBgxSc9f8jqXJecai6Dhsau+p229zsfepTi1x2wTp11SnJbJSpIQ5ZnltOuvOs6ffs17OXxgC5H4GeU0i/A1r7mDD3/yen797S+uoyhegSklQur34bTh1BOnTZw2cdrF2CZOmzjt/Jz2yXsP869+/HP4W9/3jjzfu+UbQBwSDdHIBXWfBhBT1PQ1Rv3x0/gh8q42cdrEaROnXcxt4rOJz57cPdov/9YL+I23PWffNXeh8pnZJxVe4YN6utwmPpv47ELnM3v+j8/dChDr73b42+xdIBW4R4IuAhvnKi0DGs6vLUYtbKUWuZAnmrXvlAf+ZrS4x4C+L8jvA/SSyjGjlVD7xEBcjI+RCnDljCnnKgTWxljkUEMxKbkiVY6qOKYuqKZps3ytWkhzMb4hNG19HGa0qEYjU3BDQFIGlEHYIQRCH0gx4pythOSdw5hBjuMJKuS9XzOUcErLe//oOh48dqB+dsNVx7nh6uPDIlQUHDYAWTfK3ymV8NVCVKYCsc3hi3XMo7mCUdhlBdx8s8TagWDgwMaK5930IN6rXEN0vPfDz+KRkxuklEHDQAnFHQMYKPFIHtNuvdPxrW80RIS+70nZgh6jesSUdWHytXQj4NWKvFrlfJq71skInBGq7BhdUzcRdjRnZk1XduuNksWQx3M8/UkMGIexHms9CiPZY6OsnyyjseoUPS/nH19rvB4LLqhci4U/roHs2Lo/ttzvN5ZyzrE8yu9DSLQM1xvdsO7GDLOrj8WrqWxiU9HR3JeUyRNj8qrLnlmGNR03RhFlkJ/Opc1hyuVVN1IMmx8hKYFM7U/UJk67uDlt+ESeNk5Tua9jIsDmYsVtN91P68MFwWk3XnOc6696dL+BVJypnFYxjYnTJk6bOO0SahOnTZw2vsYnPnmUd7//Ot79gWt59weu4+57jtTPrrzsNLc861jGuGEdV9leAPdpsv5Onsj9OVvlzMRpE6dNnHaJtInPJj57rHu08tzx3R+4gQ989DqKdDbmS5530300F8g92n58tt/YyrX0cyY+m/jsouGzJxDBtLcJCozjd3Zv9nTchrKMquKLFuwbT45k5TDFGoguntD3hAxIMUi1iI6L86WUqrUdM1j3xpM93jAXYBAoyF3PNShhPiIvOgX0/F4lg6zixfIsCYmJ2AdWq1W1bpbQtxI+V/JYlpyi1voaqhdjIiW1ms5mGurmcpG++byl61aEOOTBNFAVbn3MWdapEFPu8ajvUEIwOyxqgQ8p0Pc9i40NtbSmmPONJopNUj3HpKa7NDLMvHZL6HvDP/jhL+PM1lAA/au+8H087+b7NJS1yl4f2qUS0qxrRcMsUwlN1FfI4Y2l984NxeCQHIabya/ITKTQcho2BKaEQY48DtbuXAad0cU5bCJE1HPBFlnn39Megi2LcZC96mokpsjOzpJ2Bs7PteBg1iPnrS54BGfVm+TMmdOcOXOGgwcPkpJgrdA0TS5CZ9e6blASLOsC2ONZUciirrtCSnXM64Sl5zGAJSYwpsW5GbaZ0+MwxmGxSgIiSooWYhjyiarH0HBd9QrQAopNDrUeAymo8XNdngMwi4jmBkUL1O3s7NRiguN+i0j1uBlvMMvnu8NMkYF8xngxWPXzure6mUgx1NBnMoCXfK9lM0cGdeecblKsft87h0U9C1LS8HwLOCxGDLHL304aXp2iVMB3zuTxnEN5p/aE28RpFy+njW9gni5O05zXARg2g2UeLjhO2+8+rOL8iNPKeI2ZOG3itInTLrE2cdrEaVma/Oefv523vOtmAP7pj37emg686qV/zGs++yPElanyu+A4baSvhbp20dNamzht4rSJ0y6tNvHZxGdP9LkjCDdcfZzv+4Y3AxADFySfqY4UiZXnjRofVfRn4rOJzy4WPnvCBqYyQc5ZMI6SdsVQM2Tu/Q6sKVJpCmyDEhUL9tgiWwUlg5W/TJRzZu04DeNUACoTCdTJGiuwWi5zR0bdShVoMhAYgxhLKiAvrIGkkRzGapUCQgislku6Xr0IdGEOobNFURW8cwiqdSSfsF6LmnXLZSYgBRHXNJrrVITQ92tWZwpwZckXskTGZCSAxYjCdMqFE43R8Ya+I8VA27Zs7+yQYsR7z9VXXUUfI8udJQeaAayL/IxecG2Db0SQFBAiIe4SLmAzQEZAg+5yYGMSKoyWqTFky7l6AMQU6fqljiV7V6RMWqZDwcY4jHVY16g2StHbfMJMErpIXSay2rndqpt1zdVFniSBsSPgM9iRTu4G+hjjAK+y25Ogw1hHEyP4Zs1ToeTKTEkwtpD+jKZpchpbGYjGjKSc9yHee5bLZS1wp9fUEOUB2NY3QbC+VspaN8aovlpPAmIQ/GyG83Osa1n1GoIqxpCi1M2GtcOaL/IRMRgxeaPnNdw4JtrFnBClrpUC4OMiclU22SMhitB1y4wt0DQuX1PX1+ChI5kch83heB7Gxf9WqxXWOVyjoeX7FjQs6pLP14eBLH32+iiYNCYqay3O6GZEesEkizcteEtDSxQFegw470GEFJWYvNVNkLcNq36JE5g1M7rVCqxueKb25NrEaZcKp5W5efo4TaSn5ufep11QnLZfF416OVkZOK3crOumO+vtxGkTp02cdtG2idMmThtz2slTc/7aP/pqjh3fPKfOxBiJIeC9zQ/OuDDv04xBSFkfbH22t1+bOG3itInTLv428dnEZ3+S546v/8p38Pyb7gMubD6bz1b8wPf+Dr/0Wy/ijz5+LQD/4F98KSGUyJqJzyY+u3j47LwGpmrZz53s+45Uc5EOIVrV8i/F0k4GHygqKDKEvoEKpK4JGSyZxdIrKVXt9d4hSZUnduuhqiGEmvcxxYigFrexdXA8UWtgj8lWPVtfa5X0GBaQMVDNhSIwyicKAolKUn3fIank7dwfBEII1epprQNcBhpVYOccWKfeZyJaKM85pO/xTvOhxhhJIjiR3F/10BhMH7o4DGSPhdxXA0YMhkQKIVuxhfl8Rlz22DwX7WxGt71FrMo69D+liBiD1bKG+dMsW4S77znC2++4ga7XhTVvOz77RZ/gyKGzUECqSjfn0BxZbUUkE+4gfzsOS80fOFNCAlMuyDaONCqgWbtN0eihp2VyDdUtYtSK5V8t3XlLYy1G1jcPpZV+SSba2l8Rugoaue/OVz2tHjBmWDUDGevnTdNobl3nsBmA6/X3IZhS2LGGgq91dR3kxx40ZR2O9VZEiS0lixgPtiXhSGKwVr0AREwlZRElw/H5NS+oU6CTQXbGDjmCx2ukyK6Qz+7mvaueAeqhU9a8eg7sRzRF3mNvhN1eBcUraLcn0lg29Txjme06roStFmkr0OfNKIkQo3qhWEvM6zYKhNjTmEz2WTVj3tg5MTirYzVYJdgk7FLDqZ2jTZx26XJaGdzTxWlFPnbEMdSzmAuP03ZxQhFgSnGN0+rHRT8mTps4beK0i6ZNnDZx2mNxmiTL/Q8dIsa9DwSsTbz6ZZ/kWdecyA+DypOjC/Q+LcsfA310vOP9N/HQ8YP1u6/9vE9wdrvljjuvnzht4jQ9z8RpF02b+Gzisz/JPdrep3hwaHObQwe2dT4vYD4L0fLhP76ak2cW9bsvvu2Bic8mPrso+ezxRTAZ7eT2aoWkRNu2FOudiGSrZxFxBlCGcE+1zkpVPGNttvoX5dLxaB7AEUDk63rf1FD9EHbyJCmc9X2PiC6EEEtuQFOtg7AO7rsVylotblbCX9cElgFU8r8CD4YCZjqZhnxeSRADoVthrWNjY6MC+n5AX8JYjTFYp1ORUiIJNM5rgbOcB9Z7Q2MVjJq2wUZLHxUcSx8UrEYEbQ0Yweb3EUFS1NBaAyTouxUpRZwxbCzmrBKkbFr33muocAmrVeZARAgxqsycxZZSXqLeHkjizk9czf/906+u491YrPjaL/pDCuYUMjFG+54y2ZQ5IWSZl44aMMbik4afguqfsS6PeQwGI6bG1M0DZdxlJjN4GZML7K19b5inECKz2Uz7knXSVKIaKKP0V/UMGIVFIhqeXKzb1lratsF5r+GpziE4YL0gY9EZ7z1N09B1Hb4dvA5K3s4KM/naXddVK7y1lnLfCAN4lfU7Dv8sheJKH8ZrKKZIEgumxdgZMRcCtr4hiclgBiGop4uVAfTKJk8N3nnuyticU53LpG3tugx2E0/5TK34PS4X8DOGun4LqJfxFTwwVQ/M2u+Qixc6h05dWluv4+MqsaAbHmNtzdc7Bv5KHKPNibVel2LSDZ0VQQz0khCTCCayHZa0RLx1NF6LKXZJ9dDLLBfVFEiCNU4LN8b9yXBq52gTp116nJbH0Pfd08Jp5D5tLWf0YcjfXuf1QuO0fW+1TL45HjitcEe5KZw4beK0idMuwjZx2sRp5+C03eu+NOcim4uOP/PaP8KZM/S9ZH3JD9cuNE4zRgum5+90oeGXf/uFhDjw8Td95Qe476FD+YHcxGkTp02cdlG2ic8mPnuC92h9b9nabihGHmOEedtjjZ574rOJzyY+e+b47LwGJmM0N6Y1UifLGqNhZCKQJ0vBclgkkArs6ESlRIz6UKNpmpyjEjRwUgFTMliWNamCsSCWEHpcJgdrLTFGQgg0zSwrs6lW+RDSmhX1XB4Fg+AtafQoJsZRcbd95GFEF1Ulm6T9940nJTh96jTWQttqYbyzZ88C6wumWEbL4gKYL9qRpwWVBEIILJdLZrNZtQjXcxkIMeJspGk8e3tcR65YKQzhpSRiCpw6dYq2scyaBmMMy+WSxjfM53NEyLIeAEMfSDHMvxQSyS8SJqfgW5Mdavk1JmLymLVAnX4x5X5ZwKqxuRJLiqNNQJnPevqUu5ItwzpRmExnqle5iJ01GJdPrt0v2Dco667WdR1dt2K2cVC3L3kdmEKVSQgp0nV9BaMYo8q4Yq9UwrRWi941bcNqqYAxa1sEsDn8O0Sy90jJt0ousijV4AWG+axsqUbXEdWrneXOWj5OU+l9vOEZ/i6tkIOIsLOzU+ddycAh1mHbBb5Z0EdHSA4jtt6UguCqx0eZt8H6HmLAGov3LV3X4ZzH+4YuBs0L6oZN0ZiEdgOziNCHVVWFmNC8uVkvBYFCAJY10B6fdzz+8n6URMzkMJCi1L+N0byo6uWTkOxdVcZZ+jsmzZgSfQxEp5vHjp72cMsqrFiGLRKJaBLSGPyG58TWcZy1bLYbLOYzuuWKsOoxraHf6XAYDi42iauYN1f76+/U1tvEaXvlcalwGggpRk6dPMWsfeo5LUlgayfxnX/72zlxemNdjkVLLmROE70hWvUdKfaV08a6AEycNnHaxGkXUZs4ba88Jk7bxWlp/xucl73gHr7py9+DZZud7S1C17M4fBRIF/h92jlu2Ha1idMmTps47eJqE5/tlcfEZ4/vHu2333Eb/+w/fiGrTh9tHz20xd/+rl+j9R2lYtDEZxOfTXz2zPDZ4zAwGQyJruuwxuYcqBrWlSgTnMBoqKUZtE7XvijwpmylUwufriwNa8sYkUEvr21VwrLBjakCPVAXzGy2qOBXrHtJwMqgzGXx7WedXZv88YTX36X+1A9SVeZBsvp7DIEYAikF5rO5FkIbkcnYoqqLtSz67PFgLEkiScB5Df1Ty6qj60N9z1hH3y2RGJlvHGAnBDCWtp3vP9kV2Ic/jahVuO96zYG6uYlvnBbxCwHvvOapTH3uv1lfDJKwZsgpiVTIGMlsr2XTmiHmxxRzqqQcW5u/a8jnKHNFLcKnC7NcTDcCKSnoYRVYhigkS/WeyH02GJw1iGgW1nI0Bu4/dpS3vOe5bC9bABazjv/+297Bc6+/l67r2TykeWpFlCBLP00G8RhD7XPxHDCZ+GKMWjAyBObzOc47/W7RwZEeUjY6o7kMQUmk6H8XekJUHSJ7lwz6TtX58hrmw1YiGjw8doP/0JbLZQU1EcE6B7bVMFWxiHEY60kCseQppswNSqyyXszO5hy24/VZxmZzsUktwjd4s4y/XzZw5VXDcFnf1JXQ2HKOMsYxaRQC2w3Q1lqsc1WO5bolz2nJMRtTRIB2MYekc168UcSopwAogTsgmMiO2SaZSGcjn3r4UySXEC88evYkbt4gVthebXNm6wwkaK3HG8vMz5i5lta0tDTM/YzL7BEcDucdbjT2qZ27TZx26XJa2bxrXu+Np5zTPvix6/iV33kep84siCMPs9e8/E5ecMt9cAFx2m+/63l87FPX7iM8MhcNnFZaKl6JE6dNnDZx2kXTJk6bOO18nPa2P7yR333nc0jZyHTF0S1e9+o/RohcefQ08zYSguiDlpk+3E2SLsj7NEx5SKf9aJueb/qKD/D2O27iU/dfBsBP/NLL2d7JNXMnTps4beK0i6pNfDbx2ZO9R4sRVl0zyBxh3sZs8Lnw+Ww3tu9uE59NfHYx8dljGpgK6PZ9r2GJWbAj2KwLty72gosZ2FKenHHIpqmLdV3JjKmxIfU8MUUafLXkpaSLZ6zM5TNVpAGUzrVgzdDhdcwe/6qoOLwnQ37W+tV8mhB6Qt+DgaZpaq7XcwF9CWNzzo1yyerL+4YSjmmsJYRIjKn+LRmQvPdIF6rnhJLtPoMt80EBZSWmEDoQoWk83lmW21u7whsFYzSMV+dZQTCllMFqEJSYYQD3PniIY48OxWQPbe5w1eVn1Oqdgah4p0j+jqkGm4E8CuAL2WKbC+EVHcFACgmswRufs7KawS+kzr3Jc2fr9TPT6ObEWE6e2eRdH7yl9rltI1/5hR/l7KlHCSHgnCPKQHTYwYYvoh4RxaRWNxK5CzEEzWFq1LrsnUOI1IKOoMQ+nq+iniL0fcBazQXrm4YuaMHFvu9p2qYSheTNFQwF5AxKYBpCOcwfkoObTZZNnT6pa7Lvu6zHOvfWOozziNXQVHIuzpg0RFMEjGj0mv4+VkJT9Qq0IGbZUIkIjW+RUaiq5k01a9df36wN5yxjH3AFRIYcqmMy2x0+Oj5nkZ81A3mON4PWukog1jldxwjtYpG9qbInSfZ6kUw+YlTWHR1n+9Os0pKdbsV9J+5HWrAzw8NnjjOTOckKDx1/kK7vkJhwyUAUjhw4wqGNg4StjsOLwxyYHWBpd9hsN2h8Q2OGTdXUzt0mTuOS5bShpaeF0+648wbe/LbPqldxNnLVZWd46W2f5JbrH6akrPhMclqIluMnjvC+j97IvQ9dXvt6+OAON157AmsNIcQ1TlsT8OiGbuK0idMmTrvw28RpTJx2Hk479ugmH7v7skodszZw8/UnuPzIKRrfqyqIHutxGaMvzPs0/duQsge/c5HPfck9fPyTV1QD0x+892YAGh/Xzj9x2sRpE6dd+G3iMyY+Owef9XFG0xjazEPjezQ1xO2Wu7mgnzuO+Wxrq+XhE5ucQ3NGcp34bOKzi4PPzmtgsqihUpLQdR0bmwsFF9TCbbK1W0UuNaQ+QwqA5ipNEZMBsFg7DYMnQRGyNdkwCqSowkopK3QmCev0/JiESAQEaz3z+ZxuFRDMmkWxTNZuRRFDnUxEAcZgMSbnYjUCEivgGIH8NAaFhBFQWMv29lm61YqmaWjbNltD1XuivArIj0lPRIh9j2t8tWq2bVv7W0JzyziccywWi1qYbta2dH3kxIkTHD16+QCC+zVdWXleOlbLJU3jmTUtSGJne5soSSM582L3vlECoQCwWv6dnYFmWc1Nrfwi8IM//BXcdc9l9ZMv+bw7+eLP+QgmoPlBRbKhfwhwtnlSDJK9SjRcWPKigQzwVhecQReUPijz+JnPJA+YRAmXzsyGpAx4SdCwVgtisM7o+fazxFqbr5GwxusCTko6WihPZW1Jms9ztFlJKRFDJISOvusQSRw+eqRuACRFXDPHuhZjHN41RDIgZ8kagCSsVktc42ha1S2zskjoWa1WuCbn1qyko3lj5027ZiWXKEVxIZbwXoNYQwn3BTPKmUoG/EhKAYyCvHEtYmeESN34hV7yxkTlE2OfNzk6C3kqSSL4mYZUd716wERUFpvtnK7vKzhrGKtjNpupF5PV3KYa6u6qDhfwjnEILS0bKk3JWLafsrZBHOdGLR4FzjmMs4SycQIWi0UOrXfEmFiteparnmuuuZydnR1WqxXSU/NDRyLL5Vn6FIiScK0hxMCyX/Hw6Yf59NanOXbqGPc9eD9XXXclK1mxE3ZoD7XMwwZd6vjYwx/jiiuuoGk89IJtDPODc/zccefHP8SN197IZtwkPtJxaHGY+WzOYrEA/tS5Vv7Ucps47dLlNDKfNP7p5zSAIwe3+Tvf+Wsk6TVlxQXAaY+c2OSf/cRX7xHTV77mw3zvN79D52wXp9WbV2Owzk6cNnHaxGkXUZs4beK083HaN375R/miV97LN/+1byZGw30PHeIf/7vX8APf85tcd9UKEFIINI2n8R6CXLD3aRhDSupdbgRcnej9pTlx2sRpE6ddXG3is4nP9uMziXDXPVdy5dEl1165lc9X7tEEiTl14O6LS7oo+Ozt77+RN731s9jdJKfWK32Z+Gzis4uFz85rYEICKWWlFXC+xeVwLSNax8egk2yiqGpaU8NOY0o4YzGt1xDXEj5oVMlsVo6yHkUSCc2H6fLkSIrELkArOGNwoJ5WjQOJxBAgCcF5nHFQ0piKWkT1EsMCFJEhHFLVTAEmOTwNIa4IEunpMRh8sjixOEw+erB+578Q1FsChMVilgkqLxSCjsg4nNNcoyFENjcPqNyMxTpLjEkBzKjV3mQLt7WWttUHX13XE2OvZGgtMcScbzErfeixzmn+WuuIIYOddZSUddYIhkhKHZI6FosWiYGYNHR2sTnDz1vEGvqgOTQb75g1rXqNAd6Z2g9soz8z8VkTM06M6CYK9LoYTQb8FAISA5ICSMBQ8uOCxEgMK1JSuRqrhc0EwTmfreID4Rp0/go5i4D1BpMy0IhoKF++QEILDlprcdbXc+y7QNqW/5e9P4+3JLnu+8DviYjMvPe+taq6el+wAwRAgiAogiJpyhYNUiZF2bIlkpJtWf5Yi+lFkmWPxx6PbVnySLZle/SZj0eWxqZEeizJlE2Z1FAkJXERSRAiuICgQAIgtkZv1d21vv3eXCJi/jgRmXnfe1Xd2KsKGcDrekvezFjO+f0i8myIaFhiVFIvkkVZ9cBDDMxnc/1AmiODINbivcUVJTHpD6gV3fuALTcw1hFwhNbrpkGEUgy+1TBLSd4A1jicK4lIup/QtC2V91qiz6huhaA6YaTo5yhvrHTqA75plLQQxBnEpecE9UQRUc+A+aJSzyErIAXRzghS0cYipZMVJHolXxV23QtFl/QirZHJFn7NOxxCpPNKCq4ocK6kDaEP82y9p6gqBKibRuc/atRdiBGblCuiIbKCFqtsfZI9Ed2gpPmz1tLUrVK/ETwRmzYznpg2X4o94tUbyBaG6HPeYn0Zu6pbIpat7YuIq7AFOC/4VSD4iBfPsltRuxUrVhyFI55+/tPECoILHIVDnj15jlpq3JMFz9dXKEpHuVXABuy3N2m6ht2HtxCXwp+D8OClB/DRs1fv8/hbnwCEw3BMLTVHrqapW473ToDvPR/Hpza0idPuW05DImJE+/t54rRPPXeBv/h938CVl7cZc9o3f/VH+bq3fVL5Se4iTuu7mFhYAv/3f/Pv86bXXk+n27Oc1h+2QyR0fuK0idMmTruX2sRpE6e9EqetGWD0rNbzlveaaqUbUoDfree0mCpp6AtNYbks+P4f/jpeurbVj+5P/+s/y7VbG/xvP/rOidMmTps47V5rE59NfHYOn1lneM2jL1OWlog59d4x8P/4n9/Db3zs4V6M+jNad5ed0U7xmQ/C//fvvpvnX95l7b1prw8jg+jEZxOf3UN8dmcDE/RhkGq9k/5rPWws5XVM+Kehe4q4agG0KZSr/0RSOgUHslr1mJ/EJAy5FRn93RrBWZM2xx1E0YJTfXWzof8ZCHrfhmTizEIYkyeUsk8KpQSiRJQ1ZGQVj8Mgc4uBtmuSpT+FnoreN0bwvkt5UcEnC7lILp4m/UbZh5CAP/VahnA654rkdaCWcOMSofrsXWDUku1bBDBWhjnQwYxnUT0TghJTlYq4ZXIuCn2Wzzkjc97a/Hmht5RHm9dH5+TgsOSDH3mCo2WZpjrwlte8wAM7mteRtEYxHWqC74jBq0dIzPIRCQk81aydwgXTwug6a17P06usnJ3CMFMO8fwfYQhHHcBHkfCTzz3A088P3umPP7zH297wMsaodTmHMGYZ6sMXR3LQ577tuyT9XOsaJs8bdAwRwdhCQ48RiCHlHdb7Zk+RXHRPkscKoGTnItJpmLIxBispx2kIen8JfR+yHOXihmvF2cLIdWDotlKfUU8LjOY9RRxRLDmX+1iHsozlm5w22OmcmV4crbUEiVjnsE7nIc9nDifNm0UNuc1LOSKQUVh6P99p3vocqdnDYtS3GGPvsSEihFwcVGSAJe1u/0OMEd9psUdrrJKctYi1HB7uUdqSaKExDTdPbnEcjzkMh1xdXevDWG+e3GRpVsRZpJyXiBWijXjrOW6OCXi8tDR+RYgdTiqqYo4tHW3T0oYGawxt29J5r4U2sXSxozYNU3t1beK0+5TTkrfd55PTjpeOD40OLZnT3vzUizz50E2Czx50dwenrb9I1B/f/NprPPTAcf+3s5w2XvuJ0yZOmzjtXmsTp02cdidOO6/ltYwhF+Pmrj6n5d/HhOPXbm7xzJWLPP3cRXwYnvP6J28wq9rR2k+cNnHaxGn3Upv4bOKzs3wG86rG2BLQz4/fO37wo4/x8vXB0eDS7tFdeUbLfLaxaNk/WvChjz3M088/wPGyYtw2FzXvfOsVNjfqdeGa+Gzis3uEz+6cIs8YmqahbVtmsxk5BOz0AiroD4I9DgvNuT7HOQV7QE/X5WtlJOjeeyRqTseiD+sMvRDkAlraH9Nvpn1Qtc4haOPnZnDLC37qXcyppuQQQkByajSjYYA5FFbD5CInJyc6malPeU5yUb383BxyZ62lLMszuRrHfc5fOXR1LbTOWowRgu/IYXca0tdSuAR7YjEIUSIhhZGS5rppao0mEqGqKnzX9hE5hSsR0VyUZVnSdb4PBRRRq7ZaoTOrB3wC5adfeID/21/6tkG4bOAPfNt72Vp0CRE0n6kWYGwTQXZIDBpK2s+pJwoYazW00zi1kHstbqYyox4DIQZMjGP4RckiMuBZyjmaPDc0rYKB5KHwoz/3Fj753IP9p7/paz/NH/2u9yMMgKHhjHGQCcl5d4Wu9ZRFAm0Zy0agrmtms9laTs+81tYMIcAmrSNA17QKWOnaHKY5lhHnnK6dH8KhV6tV37exbA0F6eh/P9YJndQURoxJRKM6aJzDGkvAQtQ8pqdbSH2NPebaEbmEtWdFwIihLFNhPeeSN8Ggz+MQ2zz+cRuHmOY5DSHgnKOu6578xp/tQ1Yz8Me4RihZR50z+KCF9NQLwSFY6qZRT5oYiLS4riMaCAW8dPwSlzYv4WYFPnqefuHTHLaH1KamNjWFdXSx46UbL7Lz4C5FpWO9tHWJZb3k+PiIm/s3uPzgA9jCcvXZl5jPN9jdvMjF3UuEGAgE2tDx/Isvj/qq+aEXiwWXLz9wZl2mdrZNnHY/c5puqqtZReiazwunhbDOLHc7p52X6tVYi7HqaRbjWU4bpAOsmTitv+fEaROn3QNt4rSJ087ltCD4oNy2Lg4RYxSfAYJPL5nE3JWcRu9JbpLM6Br85ice4cd+7i39i6fcfBBCyCm0Jk5bu+fEaROn3eVt4rOJz27HZ/llfz6jBR/55HO7a+8dx3N5N/PZw5ePuHprhx/4kd92riQ8cvmQP/Pv/iSRwG996nIe0cRn43tOfHZX89kdDUxZ+QGqquoX4Dzw1No0kS6BT16IHEI3XqRsmYtALtaJDNZq/VFf+IQQKKsKYwxtq/ceI/QYFEPwZMtfv7DaSU5DwZ1BPo0t5GKBYEJU72aznmtVRC24VVVp7sLbzKP3nrquNR+kWbfw5lyMOT9qntfcqjR+HZNBsOlw0AH6+aIw1HWbhNxjTDeQhj6NGD2+azg4OKBwhnI+J0TNS7lqOurWs2P1+d575vMFxtQ9uYvYZLGPKT9k6qv3ED3Rd2fGLkRC6IhE1PnDa3gqASNqKfa+1WjIkYG7a1piTJZbq/8aa2naDisuvUhTRTVydhMxFroISBQIosqZnkVAwzDPsXobY1M/B68DUeeVlFpIx2MkslqtMPMFpXVgtQhbXdfUdc329vbahmMM9Dl3avYU0LzBIQHvAHSz2Ywg9IXZ8+ettSyXS0A3Gfnv1lm6pXrwZXlS3dXPaU5i9bzweHXYIEDoECnxMdAFj3UFEUsXLcZUGlIbB/nsizyi3g0gZzZ6Y3nOFvxIRAxU1YyI5j09OTmhqqp+Q5OL7elmo+uxxzm35gmTMcp73+chzn/LGy3Ni1r2/cj6pv22PejrLHgiuqFzJoUWp5ols9mMjsCqq6mP9wg20JkW81jBLbvP4dEhn3z+k8R5pK5qVqzorMcXgS52mC0HlbD0NXv7tygqh3UWWwqb2xscHh8RJfLIE49z/aWr1PUJmEg1L9k72uf6zesU85JLD1zCiOHm9es8+OCDHB4c8lsf+8gZ3Zva2TZx2v3LaU3TpL59+XJaXBcJQFN79EWSz+G0PGbvvXpuTZym8zZx2sRp90CbOG3itPM4reuEf+vP/n5uHWwQguC9PuGJh2/xb/yLv8DORk3wHT52uIQ7dyOn9ee0/n2yevF+0297jre+8Qb//V/7Bjo/vMz6T//S78KH4cXzxGkTp02cdu+0ic8mPjuPz2KMfPK5h7m023D54oroPf/dX/tmfv5XX3Ob8Xs63961fPbp5y/y7JXd24kBTz9/ge/+U3+QP//v/8R0Rpv47J7kszsamNpR8avTQpp/7i15EQjrXgEDIK4DG4kY9NoBgtWqOVhAY9A+ZItttn73lkPr1qys+Vn9c/SmKlwjLMjK+kpgH2PU7gT1GDDOrAluCIHOd713gOYylTUhX1e0iMvW8dHDQ9DwwtPznFsuIiZrHU4eZ6O5zaQaYlBLqKSxhphqIiqSeu+pSkfhCjIxhhBpWi1WCBoimcMo1SI/EGcUwZkcYqmhpz/7S6/jl3/jib53jz5wg7e/4RnKMoWnhgAmewDolxY2DBA8gha8E+X8PneuSSHS2YPA2QTyomHF1iTv7CRHg4dDBDH9xiH2wJ899fTrvBdyaULJIdNZzkUYvEv6DYZ6jEBEpyOuWcKzhXz8+7xemdhCjL2MZ6HM4/XeU4SSaNd1biyH3vteV/NmIfcxP0tDpyP9JigO57XeEyhGMPktZaRwBW0UogdjC2I0xLWNVUjPLIYNVYxrczr2aBARjIiuxUieM2nlORqPLxNJr29d1+vVGKyLoug9A8beAXpNWrtTaz2eox5DJCLGpomh35AWRUGQiA+ejg5P5Gh1xLE/5nhxyI2b1zhYHrIv+zRtA2UkFnDj8Aa2KhAr2MrSxo7G19RdTTEvqOsVbb1CnKWsSspZyebmBgahshWudNRdQ92uaHzLxvYmxqmsFLOSum0wTrhw8cJtBHlq4zZx2v3LaTm8Xyfhc+e0vujrqWaMgsPdxmkf/fTj/NanHz3T3zzPWTJPc5rI6WsnTps4beK0e6VNnDZx2mlO+/SVC/z0L76B51/eZbkq1/tpAjubS/RVmfbMWElpbu4uThuf00JMAJ/kbFZ5trbqM/LwDV/zLIfHFe//4BP9nE+cNnHaxGn3Rpv4bOKz889oBh8cIXYIekY7OCrZP5yv9duYwO9410d48uGrdyWfHa9m/OIvvwURy9WbW9yudd5y/dYG3tvpjDbx2T3JZ3c0MGVr83jixsI2Bp7YL/Lwt/GLjdwkAUuG95gm04zuIUQMQiDStm1viV0jlhDA0lse9W/9Q/o+Df8KvAKwq6KtXxaJPXBkIcqkky2VuQ/6t/X75b5lgXXO9R4HY2XJL33G8zsmjF5Rek8MgLGBZKz8qXg3GnITQ+yVOyujMRbnCnIuyUjEh6gHDhEtfGZ07UIG2rR2IIixSNr0xxj56fe/kZ/7ldf3Y3/swRu85+s+SFFaQgcxpxyKnhA9MSZwJMtNGOY/CjaRoSp0TPOkxdmiSCq4p3lgx94qutRpjBlUdIB9/611icAsR8czOp83CpGdrZr5vNNicqKeLrE3KOk1GVCzLKtlW++QrdoxyUtex9PknZasB5MsD2KsrpEoMHddhw9eD1rWDNeN9C8D/Vj3xs/P/Ypx9LIxMaAAfa7fKL18xBiwRUHbBGIUREpCtCD2zObFGEPwuhkaxjk8N/fBWtcX6cuPzGucgTtv5jLw57DTfI+u63rPmqwrOWz35OSkD9vNejUmk7QNWNOttftn8bboRqKNhBgQsRRlSe1r2tjS0dKI59bqJjeWN2hczaeuf5Lj+phqp2JvuUfhCmbFjFvLWzhf4MqSsnR4OrrQ4Qm4omB5csT+3h7znS02dzbZ2tpia2uLyhVIMFgsq2ZF3TX6mVlBGz0SoZgVnKyOmVUVjzw61ImZ2u3bxGn3L6fFUR/zOD8XToujjfm4GSNpg3x3cFqMhuVqg1//2Gv5lQ+/ru9n4TzbWyusRcedDrxnOe3UQXzitInTJk67Z9rEaROnnea056/u8iM/81Ws6mJt7mZVw2LejF+vpsLjQgh3D6eNz2mI0bJQIYIZIg7q1nF0MtStsCawvbXiX/zWD/PMC7u8/4NPTJzGxGkTp91bbeKzic/OP6MZiiLDq95vY16ztVhyeDIYmawJfOu7P0TlToj+7uKzZVPy8q1dfvy9b+OVBCPzmXOxv3bis4nP7iU+u6OBqes6ZrNZKgy3npNwPNH6TbqhczjnFCBCoC8+l67tuk4vFQO9h7AqkE0glguthaBALzFSOcd8PuPo6LAXgL3jPRaLLYwxKecoaUMazvbVDMrhY4QovXrnMTjncMFhkuXeWgs+IiZDHWtKm0MSFxuz3oIbowpO27asVis2NjboOg1TdSmvZAaHLJB1XVPOqjMeA1lYM5EA/VpoPyCDfoyBoiho24au63DO4n2LEYMRS2ENJ/WK/f19FrN5H73kiXQhUhQVjz7+WF/DoShLQhSMK5AucHB0zPbWTl+Mr+vzdgvW3AEqg2bOThgCEfUWQPscvSd4z2rlsdkTo/OU1YwQNQw0FzHMRp381YWAKxxGsgcHPXBGk4BdLAYFIhgXfjRcu7nDf/v978EnA5Mxkb/8536Uhy4fAAGM0PkO33YJgLQPyS0DCITgWa1WNLMaIwas5uM0xrBYLNZAfAz2/fEuyVPnPc5axAjRhz4ks65riqrE4BAZNl25zedzVitd1+3t7Z5kZrPZ2iZDQW/wbDEiRCP44GnbBiOWebmgbho6r7byorAYUxIpCdEhZgZSENI9swW/6zTcdcxlxmi+1rZt1zYpargTXZOQQpCTvoznKf+c+ysiPbbk8WQvjRACJycnGgoeQi+jmfg0NJeeiMZkKCK9d4J1FiIYDEYMte+wVr0nmq6mdR2tbVnFFc/efIaXDl/mxuoGN09uEqynW3TcXN2ilZrKzMAJj7zmEW7euMVqecLW1mWqzUo9DSxgodxYsOMs2zvbLGYzrHEsT1bcvHYTg2FrvokUjmoxYyGBG4e3CN5TWMeFrR2iRIqqYGv39p4wUxvaxGn3M6elfMrx88Np3geI67nfU0fvKk5rfcV/8/3fybJe91R/19uv8Gf/5E9RzGI6WJ/Pacasb4YnTps4beK0e6dNnDZx2mlO+4aveYG//Y4f5Lv/5Hdxc2/Rz/Hvf88HeMebn8cCTbMiBt+/7LmbOG18Touo8UtsehEmhiDw3l99kr/302/q0+G97omb/I//xQ/jCsNzL14AJk6bOG3itHutTXw28dntzmhvePLq2hntT/+hn+Q9X/8wf/ovfte6EN1lZ7TMZz/x3nfwvl9/w6vSg4nPJj671/nsjgYmoLfMZQtfnvQ1K3iMmCgY0eJUY+HWlj1lhxYVnfq/k4BA9IN0bYdPeVWNDIW1euHt/Og+IEZzSWoNNdNbIntgHlku1WKbFzuHxqkgSZNYiSQQ6t6FGQG9iD5r7B2Q/zYubNZbgr3OUVEUCAo0Y2tnFugxiGVBrOua+XzeC7deZyAKRqyG0qa5NUYt6yEIbdtQliXGauqfruvwXUcMno3tHUQ0pJUIddMCkY3FIq3VYN0tigLvAyfHS9jK/UpjDhFCONe4JGh9ouA9wetzbbJep+lFEvC6NGfWWqwYjCvxObTXaVE2LSzmiFHJSecteY8I6okhulY6Bg111QJ7WlRPjNHQVmOJos9W49IwAucC1gx9LMuSYJIshUBgIOIMHnmuAM2LmkBuDERZ9vL3MUaQtAlKujD2JPG+WwMzGenA2GNH18evAVuex3yvMbj1sjmyqhs0PUbbNBCFznvqrsaWRdo4OZASpADs2v16nWQA0pjINlvycx+MiYNnhyRdvI3e5PHk+Rp7Z+Rrmqbp8aeXu9EmaXw/9SAZituf9obQvqj3UoYm5xxRXS4QJ8QiEl0kmsinn36a2tSwHZmXc+qwom1b6pMVxdzRhoYbt66xu32BeTVn7mY4HEeHx3TRUzgtMFgWJWZrh/l8RlPX1Msa5wqOj06YlRXFbsGya8EJxbyka2BndxtrLHXd0HY1xcpxdHR4jhZO7bw2cRr3JacVRQFKSZ8XTjNG5/R0C97TyV3EaRR4bxJWafvXfu+v8a63X6EoRin+kjPVGU4b8dPEaROnTZx277WJ05g4bcRp1kKI/szZzNqAtQmvNWQJY7irz2mgns+aCsokj3AIQfBheKGLRJzzWpdl4rSJ0yZOu2fbxGdMfHbOGa0o1s9ohYvrZ5zU7rYzWgglf/vH38Wnnr9MGHPWndrEZxOf3eN8dkdJH4PPeCLHPw9gTw+m48m7bTu1ODL6vQDBh17Qx/cxxkCMfaGycfNevQ/GfddbDqQyopa18WQra8z/628/DhEdWs49ORYaGAQ0C+m4MJheO4Bor4SnQH6YisGiGkLowxGzYPf3CFlpB0DwvoOoHhBCpKlr2rYhexxYa8iz3nmv3tvZk0MSmQDGWqx1dN6nB2h4rPYV2s7w0acf5PB41s/BQxdvcnH7sCfSXkZC7OVE0v+MZOu+Wvwl/RwTIWRS7NdzBByDgkdiTqIqkHPEqlwlsE9zbkwKtxTbj/FMy54vqLJn743Q1+UYZK+Xc9Hway3s5tbkdk0O4VTf1/+ecwRnoDrtPXFap7Is5Y1DjFFnVuTMMzKpDGCs60CMRK/yRUwvHXtdkkSWDgV5WQPR4d7j/q2PYawPgwbSz9t4HOdtdtbmZ0QyWbfy37Inzul5HZPBuFih6WXCrONFyn08eEFFoo10pmUZluw1exzGfU7sMW3R4OYOKQ3RRYJ4xEEXWk6Wx0iEWVmxqBYQhKZpaZsG73VuC1cyr+ZYcRCE6EGioXQV1jpCCH1oazTgZgXFrMJVBVEixho633F0dHS+LE9trU2cplfej5w29Fk+J04jbUdFWC/kmuYjG2XuHk47y2Nf87YXecebXx5+cQdOy3I3lq2J0yZOmzjt3mgTp+mVE6etc9p5LdEJxMHAdPef09JzR7JypzZx2sRpE6fdu23iM71y4rNXPqO9cHWXZ65c6vu+MV/xxEPXEfxdw2fHywXPvnSZX/vok1zfe3URjI9c3ue1j93UNZj4bOKze5TP7hjBNJ/P+wnNkzS2wHnv+8l2aZKbpkFE1sIuz0KyLvcAtqqc0QdVbtEQOoCqqmh8lwRFlSiELuUoVA8AYLC02/Wn9SQyVpAYiX3sJOllUuprb90fFNoYwQCewXuhbVucs5RlsSaIGdTyl4aNFskqqQuT5zB7Z5Qj4B0rZwgaxiqiytU0jXoApF4bY2maFQhaoDyqN4FzhrYN6olBxNNx69aNtfDYnEM0oDlQY4wqfEbHK9bgAxjjsC72L6cQwVijxhYfuLU/43v/7O/F+8ES/N3v+Wkev3wLjccbDjRdjNjelmyUiMRiLETfEYMWM2tDR/YOCFHrRams6dyEdBjyybJvjAK8AWImK8AkgM+0AiZ5Gpj8CvEcyVxvRVliospkF0PK4XkqJy/JAzAdgmazWb++p9vtQL7/W/ZYSR4KGxsbOhejjcYY8PNGoKoqmqYZgEuKtU2Y6kkOax42B4huqnznyQXthYiz6hEvYkEcEYcPufDiMH7vs5eEbmIGIhnypOa+eu8pnE3FEk8VFxxthjKm5LlVg6hdC1EVkd7LIn8+y3bXdf1mJuNU3oSJSD9nGaPGHgYE6IIn+IAtS5BAlEAnHQfNAc/uPcNHX/4w88cqbpzc4HB1xO7iMlECYsGWwqpZ0bUdEmFrvonpLKEJHB8fY63Q+sDR0RG72zvgIdpAXbfMqopFtUlVllx+81s5PN7nxZdfoNxYsFydUHctlx95mJMTzS28Oa9YbG2xPDrm2o2rryjLU5s4LX/+fuQ0XQDBOPs5cZrm7Qajx4RTK5zwL/14V3DauS8TM7+dldTTnDZen4nTJk6bOO3eahOnTZx2HqfJyDs3N03DInQ+104Y4XZa8LuC006d02z6+U5N0ksprQUycdrEaROn3Ytt4rOJz17tGe3/879/Ez87qv3+ttc9y/e8573E4Amp3NKXms9+/WOP83f+0W87Rxpv3/7w730/7/nGj2OkmM5oE5/ds3x2RwPTACr0oZ8ZnMahYzphamWM6cVFiBpKVpblOshC/84jJiv32h9iwBibwNeoAJ8cpwXLoZoWiUJIIXoIdG1HllwxZgAmkmKMBC6oZCBBdCGzWRUIMWJjRELE2OE1TVKT3ssgRo+1BbPZTO/vzKAo+TnJgmmtpSxL6rrGaipUYnqWsQZXDvOclXMspOP7EQcrq+aazJZYSwhdmh96hREi82pGVTqsnTFfbOCD5qC01uKMUJVF/0wt0GchWkK2/EPKQal9K8uSerkC7wn+rAjl/KbR6PfEABKh09+JLq5+pfs3TYOzLh0kRvfqgT7gfYt1hZI0Bq0gJYQ0ob0nBMMcieh6R0UoHZPR9fYhcruW5dKmGhVibJKoMVBrP2KMtF2HsZbZbE7XdT1ojXOB5vHkL3POeSnGSOg8XVBSdmltgnDGi2G8sco5Ubuuw4ohOLvmxSKigjcQTb4HtE1LjIGqmgEeIxFnDIJFw1O1XlcIEWR9w5enYlxwcOwRMJ6LstQQZCJYZ9bAva7rfg6y7A8yPuRJLcuy9+IZexuc9jgYP9faghA82VuobmrVY1E9RnKYvQK+i9J7JLXSUseaw+6I33rpY+yHPcqLM5698Wlm2zMu7T5AWGW5AN9BURXsbu6wudiiXtaUlEgQurZlvjmnkhmzck5RFDR1zepkSVWlTXUIHB4ecuvWDTCBza0Nrt68gZuVbG5usFoe0zQ17XLFrf1DHn7wIWbVjIceeei2sjy1oU2cdv9yWj78Fa74nDhN0gbzL//gN/ELv/bafhxve90zfNu7f5XKru4aTvvgRx/np37lbTTdOg/HoIVCx3J6e04bt4nTJk6bOO1eahOnTZx2HqdZWw5vU/uVSy95gqYvR+Jdf04j5nQ4KS/TaTnNTfK9T6dMmjht4rSJ0+6VNvHZxGev9ozWX9gvZSQGf1fwmQe+///3TTz78kU+0/bXfujr+aUPvZY/8yd+biRRuU18NvHZvcFndzQwqTdQsmyC2oBjDjtTYcmW2qy4ghAlhzGe3Qjm++W/CCC54F5+Vghk23WMcY0oYtTQThMFY3RyM7CJoa+ZEGKyyQrksMgsLBIjJv8uosIXAmKTgKKeA5KEMpLIwQRi8DRtO/IISEXexIwUEHJYv7WpeJ/JYf86P1EgGtHPGUPoVCmV/LRfgnpuCWoVL1z2WtDPRejHLyN/axHB2YLgW3zoaGWVBFr70vmOkPrq65rCOWIM+K7FuAWIpicQFKCzHMSgebq7FmIMfPLZS/z6Rx4hpiKrG7Mlb3rqeRZVjRCRGPS5BL2XQrISZkyFFqOSu5JkSPOZLdo6Ti2MaPpxIkAMhKhF3iCtezwldzEBK1poTv+mcvXMlYt84pnLvVxdvnjM1779RWZFg0Rdg7ppEFPo+qRNDTESskgKPZAgGhQsYnoLuK7NqZBphj7GcOoglGXPGJXJDO65DkjiRVWTqDKaPBKtsZRFiQCh8wTje0IdaU/PryETTpfkHsAoKfcaEAqilEQpCEHwUTcdLo0p5z7NOjCML3+ZdNDNxRATGJsh+suM+hdjTLKuG0WVY0vXdQxDiD0G5WdlkA0pxDR/b20OfTb5Y3TeK2Yl7xNJ2zckeXFEsFisEXBC42sO/SHXuxvcirc4Nid0rlGvErKnRSpmiLAx38SLJ4f2ilEPoNAFjEueT0ZwpaPrVD999BydHGILg3GGIIFlu8I4w6wqsa5Im1/dtZTisEWFzDrKsiKEyOHxMVN75TZx2v3LaXp4CtT16rPmtOA99crwC7/2Rn7to4/x3MsXgMhbnnqGtz71DI8/cF1rVcQvPaf9xqee5MNPP8KVa7u9LG3MG77ha55hd/tE51tEC7kbbstpOQ1gPn9NnDZx2sRp906bOG3itPM4jdCcfTeVMC2/BLzbz2lJMJKcDTwVsyyeGtvEaROnTZx2b7eJzyY+e6UzGl4jiE7TG3cJnx0cz/jIM4/xzEsPcHA8v7PAn9MuXzzm0YcOJz6b+Oye5rM7GphiBvB+fnVyc5jgWiE+VNms0fx9iIaAxeEW6zJNUuSYYTRtKGPEtx5SuGjXdbr4yTrpvaewDmsNvstgof0IKGhGURDrn5UXNQmgTTCfYJG67YjRU1YFYg3WCA6B2KXCbhCNwdqOEBqWy2OqaiOFxiVLLaYfp4jB2oBzBWVZYY0DDK4oiaJFSiOiaQCMpgEIo7yOfeicCGVRQgRnLfPZLCmvhvr5kJVeAKP5qtNiVW7Gqu3ouoauOabpOrCOLgYwhoAh+Mj+3h5bizlCZLVaMtveRIyjJ/EYEDolIu/xbUPoGoxx/Nyvvo7/5YeH0M9Luwd817f8IyR2SY8C4JGoAGJMQETlKPgOoldSTx4XMYZEQB6TwiSttRq2KQbjnBJcjAQB7yPeRGyaK+UJ6TMpRDIoBIzLGwH1LPjlDz3J+37tdX3f3/Sam/zHf/y9dL5GQrJuHy9hJlAYJGqIpEkAJUQtNmstZTVDjO2hNwNfCBHnCgbrvZImqZtrlv5kmRcjGOvwx54QA23X4spSnyWCz3oYNYzXOadzI5bN2QZN0/QAYq1VIBXBd1kfFGxC0DDztm0pqhLEE8MKogccEiuiL4kyI0iFN0JLh8Vgo0mgqvlShQykGcxzLt0B3LNXRTWfI8bQ+UAIXdqMKVb4roMYscYQRBKBGfqA3xjp2pYYAhLTJCKoZ4nQdb7HqPUvEFFgbbuWsioUcCUmUvUEAjF2SBBctDhT0FWek9UxN7obPN89x/78kJNwQu1X2FlFjIam9jgb6ZbquXDp0mVu7t2iaQLBn3Bxe4eDkz1WyyVbWxsEvBooTeR4eaRrXwjXX75KuSgo5g4zt/g6Egh0Udja3qVpGnztmVUVRTnDzgyyfYnFbMaN6zd49tkXzkHwqZ1uE6fdv5yGCMEH9vb22N747DgtBM+NvYL/6vu+BR+GFEPf9u738/gDNxOafOk5LYTA3/uFd3Dt1k7fR2c9j1w+4D/+Y/9Ixyo5HZLXY+BtOG1Ym6jPnDht4rSJ0+6ZNnHaxGnncVqXUjydlpYYA13XIBLv+nNaj4d6YXrZp1h/+kVZBLou4pydOG3itInT7tE28dnEZ690RmvbjtLZM/ymI/zS89nLN7b5m3//t69JdVmcTV13Ow34Pd/ym/zO3/5Juk6mM9rEZ/csn93RwHS7lkPIxiGUOXQtL+qrvxkQBYjYZGlfHi8pCu1a13W9lTqHvrnCkkMEc+5HELX+ifTXjvs7DhfMLStfCIPC5fDS3DkjksIrBe8V9K11VNU8FSDNwpS9KzScs20a6rpma2uLECNd04zmTPo5yxbWbC0VGXIz5u+7rhvGKTnWMxGjZMu0WmyJsd94u7LEx5ZbN/fY3L5AVRY4A21U+2nXNly9epXt1z7FfDYHRIEJteSaNA0RzbmpheSyVXjInTmaUHzbUTh6QBcR7XMEkQwOvs+LqaSnFu4IiTjTQ/s28laxRsOIE6AEUVKxkAjMIsnLIoSImGTpTtkVsuKfbtmyLT14aL7bolwf4zgMs23bfm1msxnWul528nU57PJ0aCUMocz5urE8jPN96kaCNT3oZSalkogh9mkPI5G6rpnP570uGqtrkEMwc87QpmlYlAuMEXy0dCEgxuFsRXQlFCXBliwDlGUBkT4MtPcKEsOY5733qrdRw5pzH3Lxutw09DUgGT+y/I7mp2mafsyQQpqdS/l4hznMn+lzIhvFhxxiXzc1xmjoe1FamrYhtpFqViWCAavswYoVPhyzajs+fe0ZXjx+iZvlPi8dXOHEL2loMYVh5isKb2nikoPDA3yMVO0+5XxGvVqx3D/BzqGxK7qi47A5wsaSxcYGDz30EMvlEmMMzjkuXrzI/v4+JycnPPXUUzz66KN03rNarVgul7Rtq6H/RdF7syyXS158/gWcczz++OPnyvXUXl2bOO3e5zTQg+DVq1fZed1nx2nGCFXhztCETX26WzjtdHUogD/y+9/Pt/+Oj6YHyavmtCxfJmHlxGkTp02cdu+3idO+vDntvBai1si4V89pWR6Es3Ky1iZOmzht4rT7qk189uXNZ6fPaM4MQTbD3N4dfBbi2o2YVy3f9+f/FvNZl+6bIr/yOooni++1a9e4eLFCU8T1wqQ9mvhs4rN7iM9eIUWerE1iFsoMRLllAc3f51C927VeGWSwvDIClRACUhQIQ8GyfH8VoLK3kOZQwMGAuA7wuU/nPV+1Z/h+yKU53CaJMsZYlquGtms1J6j3KbQ1P2cgDR/UCmyS4hHUSm6tJRotYhZ8SMXgRPsg0vdlbJXvyc05bCouFlNhVjWmDi+TjBGCz0Aj4BUAgo99uKDmVlVPDCkcVVXR+UDnA9aVw3MTtIiold5ai3UuzZGG+pqx1qZ+a+ogSwwBj+/LtCIGUM+PbJgKMUAKUzY2eUEweGZg1OtCfQJ0fqTPjCBYp7lTNQR4MPyQyCWm7mUrPTGmJY+cwv+RvAwvEscgfPq6/PJuXNhNLfTrG4ux/mTwyXOV751zi443Ivl3zjn6M9epue66ri9ymWZI7xcNbRzyFqs3xiDqStq6BjnfsYqgJYRGZcfaRJ+S/pYt9mfHkYLCiXHQtTzHeUwxDh4yCKn4ZLdGljatneYM1mu7ruu/j2lM1lpybeLx/I5Bv+8fuo/M3kQQ8V3ASoE4wUSLiRZy343QxobjcMLLt66z1+xzEpccN8fUoaaTDnVdiXSx1Vy/BcRUzkSscHDjGl4ng6PjA5x1FDNHs2oprIZLHx0dEUJguVwSvGdnd7fX0bzhbOqao6MjZrNZT1pFUXBycoKIsL29zerkhBiGjeLU7twmTrt/OU0Sd3wunKZJMs5f6xjiXcRpZ1+uVWXDYr4khLyhHg7Bd+S00RpNnDZx2sRp91abOG3itPM4zZ+zvCGk+rj3zDktv/jTl0LKadKv/7i1reHK1S0evryaOO2ccUycNnHavdAmPpv47JXOaEfLgv/tx9/NJ59/IM8u3/yOD/Gmx5/B+y89n51+vSgCG4sTFrNU2ypacsSNGmc6jNE1WB6f6Av96Yw28dk9zmd3NPmPBW/tQyOrZ/55/Jnx1+1ab4VMLz+yOudFMmLOCH8GKpKSqKKMrLORXprz5+7YnxEKZCVZv0QFT+XP0LZdKuqXraj5Wfl2kRA9IXhgAFcjg6XYpBzTIQQk953MUWfnK49Z52QQVpX02IPjmBxiDIjRZ/gQehAj3SuHWVprWSwW+BhovRaLy3fJfcueYtZazcuayPn6rS2OT6r+mRvzJdsby9wBBUIfe1DOL7gGqzp9qGOMpHnJoBB7sCaBmH5Whx7TpBur+WYl51TI3gaSQj8l5eXt5VPB6ubenLoZbKsXdk64sLNKMjLM+1i+cr/GAOe9T94Vg1fN6Zd348+clsUM8hnUx5+x1vahsZm8xjIcIylEfCTXo80P0PcvS2eWV58PmCID0CeC7UIkRAGxRAZwl7QRkTNgnr/WZXbcjzz+HujjcA1pHseeNGPQyvJyGsQHcllfkzjChmHcOTpb9dP7gEHDUYkGiQaTQr2xUJuGg3jIlf0r7LX7nHDCyq+ImiIYsRHjhCCBNnZghKIsKaoS6yzLw0O6usY6x3K5VN11joBuxrz3HB4e4r2nXi45OjxkPp8zm80oy7L38KjrmtVqRVmWVFVFWZY456ibui9kWlUzjDHTweVVtonT7l9Oy4euz47TSHmZI5xnYIrcVZzG6FA6XvoxFg6ywuj78zgtfT5MnDZx2sRp91qbOG3itPM47bx+5tRAqQN3FaeNz2mkF0yMsJAYRzJ1Zmh03nLt5iZtO3g2T5w2cdrEafdWm/hs4rNXOqMta8sP/oN38vzLF/rnf+2bP8qbnnj2ruAzZz0Xt48xJhu14OXrm5ws3RoOhgBtN7yLnM5oE5/dT3z2WaXIO225O9164H3VTRcl+EDwalUkAZG1lvEQQtB8mWNlAS0eFmJEyBZpTi34nZ4tmoPSnN2U64+J0ERD3TY2tnG2JG+CVcBUeWL0hNAhAmVZJq8Kg7VDKC9jgUTIXgZjD4z8d+dcbxF2zuFj0ByoepUqN4CoNdH7juDV82C5XFI3NfPNzV6hQ+cp5pYQOoL37G7vEEKn4YfGgQSMSTk0rWjeSRHKsqSua2KMFOWC7/0vfh/Xb230/f29//Qv8pWvfxq8JYaOseaHECB4iBl4YiLOJhm6DKt6xXxWUbiC5XKFKXVNYhqmFk/02ELztBJBfLZyD/OqmwAAkwhUMGLxovPZdgX/3ff/s6zqIfz0//5v/gzvfOvLGOOS15+kPqoHSQ4XHst0BifvvXpjpO+tLRjAeP1lX7/+6XdqTV+3+IsMYcy9h0Ea13qLa+HOjK7p9Sb1WeUwk+XgwZD/jRLxIdI0HctlSzkLSGkAS4yGGA3WOdquBQIhrgPsaVD3PowwYnjJaa2lSfIZYqdyZm2/EcqaN5/PqWsNLZ3NZpycnFCWZR/2GvNYZX1u85yOPR2CD4QY8bHrZaF0JV3T0viaspyRVBQEGuu50e7x3PI5njt6lmv1DVamYX5xzoMPXWbvaI8XXn6Bhx9+kK5TvfGN4eFLjyIiHBwc8OBDT+GcoyxLbt26RX3U0hpPWc44OVn28x5CYLG5ydbODiJC0zQsl0uOj4/ZSb9bLBZsbm5y48YNDg4O6BYLtra28J3nE5/4BBJhVlVsbm4ytc++TZx273OaSQea3e1dYmw/I06bzyt800IIiTnW2+maD19qTrPmbBo/PVRmw5PpD5evxGmxPwB0E6dNnDZx2n3SJk778ua0olxwdlo1HV1Z3N3nNMnvNEW5TJvWOlG5vb2kRCZOmzht4rT7rU189uXNZ/fSGe01j97gP/0jP8Zf+OvfxtVb26zqgj/+n/3L/Mk/9HN85z/zYfIZ7ehkxvMvXeZNr3uBqgxMZ7SJz+4nPrujgWlsuYMhJPX0Isc4zkl6FmDPE3gV1QgM4fDZYmiM5m3MFmS9lwJi0zRqIU15Esuyw1qni2iGcMVxftZxG5OQGIPLANtbKwdl7McRJSkNEDUc1KMW+v5+opZKzcXo0TDDMilxRIxaUb3v8Fn40zOMScXFkoCO57Yoij6/o/deU/hEeni1Vsfati1SOC3aai1d12g/xDBfzOhaj7EG6yzBd6xWK+qmJvqAGLWqGuNScUEF6UyZxlo2NzdZnpwogVULQpTejwEgdFqEr0hjycATQ0TISgnWmGS1ViKMQXOCV1WFiBZx0wJ1Cu45DZ8xBmO1aKEIfe5VzXeqeqqZuVGwj5EcrhrSbIUY6XwgBpWn3IxGxfYymL0HdD3CmixlGc0Gppyjciz/p+X87OZhUPSxrOXnN02DiMrcarUCYwioX31RFNR1jQ+B+XwOMmy4xtKercvee5qmIWa5TZ4FKmf6bB90k2JsgS0qjCkAhykqumh1rUPa9MRcrC/098n6lslE+hzBnJk733kiaEFB73WjkvS2S30be1bkMO08H4PHgUEsazrjcij3KWxq2wZXOt0wBK1DYo3FmgIfPcGAx7OsVyxZ8fz+83zs1sfpKo+blRTAfn3IRXOB2cacBx54gM57lWdxlL5i76W9Hqg35ws609HSsl1tI0bDmI2zHCz3qdu6H9f29jZbW1scHBxgjGE+n1MUhf5sLfP5vPc6mM1mVGXJ/v4+q+WKpml48jHNgVrXNVN75TZx2v3LaTkNQt2kHMOfAactFjOQyD98/1fwEz//VoLXeX7wwi2+8xvez6Xto55b7g5OOxtlJRlLMT22vxpOy23itInTJk6799rEaROnwfnntNPNty2+be/6c1oIWhQ+y5QMSY/0mWF46ZZbVXa84ak9ZuVQTH3itInTJk67t9rEZxOfwe3PaFEC56Uxl+yUcBfw2ZXru/z4+97F3pFycFV2/F//6D/kK15//dQZTXq+y9E90xlt4rP7hc/umCLvtHCeB97j362BqJwOrTtH+BXn00INxHKaTOLomsF7YSjcloVuHO74asfSK1j/7LPXguaPTCMDhBxiOTTtU+4bDCG0+bJsyR3Iq3/ImX6PvTTGijLcP1vQ89+S9T5Z1ZumAQHrHGVZqfU35AkPtG1Ds1ppEba0FMjZEMuY5qAsyx6IxsaZ4fqU41RGa48Q8zz1ibYzaYNNHgvjMeZNQ75GpyCFLRuT5iqlgDDjVBACko1eYw836f/ZO5rz8ece0FBMoCpb3vnWK2xtNOtjjrEfawxnQyFPhwUOnzm7OTrdTuvL6d/1z033yCQ/1qu8IXLOJU8UJdSsS6dlpmmansDG/er7GvLmQdJhL8VkGqc/MwoFv00bD3es88Pz0lhO9SG3cUjq2KMmk+HamGUIXx637Imxhk1CIu7cr9h7xxhr8ASijXjjWVGzV+9xq9lnvzukKzyUEB20eiViNAcvAr7T0PV5uYAOQhMwXnA4bDBIJ9hoqWxFaSu6pks6OJD9WO7LslwrkBjCUBAR6HPuto16/iwWC8qqIhcUnNpn1iZOu784LffRpLzHnxmn6Xifv3qBD3z0qf5l1qKq+YrXPMusau8qThs7d6yvbf+x9THfgdPWP3z6cxOnjcczcdrEaXdzmzht4rR7/ZzWp8jp177/WP9S+FPPX+D6zSGDxlOP3eKtb7jK9mZNTks0fPjUXE2ctjaeidMmTrtb28RnE5+dPqO9eH2H3/jEY2RDxaysecPjV5gV3V3BZ1euX+Bjzz7Ch59+lKZ1PLB7xDu/4nne/Y5nePDS0TlnNDg6KVjVdjqjTXx2X/HZK0YwjTs1FsjTBfc++zaAowJLXLMOigidzwXQUnidSAq1NMkyahQ0rCWOgidPk1Me02AV1rA6GLbiY28JksU7xsjJyQnWgnU5dDFbgV0SxAyCQ47T3HIo3jCfYIxNUJtIwRhICi0ivYLnPplkgY8RMDISXDBRf/beY41DjBbzKquCoiopyhld1xGiWu3FOXzT0DUNlx98kOOTFfml1VnwUejMeTnP884ActrSgXxEvayjTjoQ0YJ4A4gprmhIZtt2w2szo3lbiZHgPWVVKfCQkdlkOMcVAhhMRmyT86LqqgqinxX4zU8+zg/91Nf0fX7ogWP++//ox3qiGFvHxwCZfwf0lvnNzc21UFX1cGlxqWDhGHBOy1/+9/QmaAzS+au/T5LlnN80eI+pjBJ8jPjgeymODLmEszfCfKYgOITDDhsVay0+eDrf0XQeUxjEVYBL+pSZUP8TRuPKfVIPjyw/sfd8cc7089m2rRa+S9eVZUlEPUbatkWSrgz5W3VOcsitMYaHH36Yw8ND6ro5Q4D9pi2EoXBdzN5AQ3+tGDAQJGhKjjIS8NRS88KNFzjiELtjaW3HslvRxBZbqvw1dcvhwQG7u9sc3TqkPql581NvpbCOpq7JlZXz2h4cHWqIrRhefPFlNi8uem8B7z2r1YqTkxO2trZYLBbEGLl69So7Ozu0bcvR8XEffmutVR13jp3ZjKeeeoq9m7fo2rb3opjandvEafcvp+U8zds7O5ycLPlMOS2uvfDS3+Sx3W2cJnfwDZLcaXhVnJYHXRYFhSsmTps4beK0e6hNnDZx2v10ThNE7z3u9+i/IUT+1x9+F7cOZv3f/+B3/jrv+cZPDPw9cdrEaROn3ZNt4rOJz+7EZ3//fV/J//IjX9///PDFW3zvv/D3NC9aMF9yPvvx972Tj3z6ib5/3/jOp/mT/8rPai2ddM34jBZj4JkrF3j4gSMefmB/OqNNfHbf8Nkrsl2ewNOAOf7deGFebTMmKWcECERJ1sEQMc4QwxC2NrbmhhDovAplFlYRoxZGY/BBrfV54LfrtwpEVhB6ZT8NZl3n6Vq15u3sbFFVFcY4Yqzpi+AZoetavG+wziTBHEBbRouUldZai8/XgIJaUg5jTB+Sa61lY2OjV3ofT+VLzR5pKD765FEQgqeqNihnc6IYiqLUnNvBIwSqqsAYXYeqmhEQJZBEpmuzJoKzbpj/rjtjVDZisSaFKJqBgKI3icxG/RRBjKVtWv1dDLhsDU2K7YoZYjRMUmwGb0McezuQvUcMoIX3MmHFvKaSSJGzMjzIh/SAIiaSl39ra6vPIQv06zdWvKIo+n9zDtAsX+Pnjb9/Jatv3ljlNQ9Rrd9x1AeTDmDZT0UYe2MEfOpnCEELvgWoqjnz+Tz1TXUnk0GMkbbrMLZAjBbaC4BHCJEUyq26Mg5JB9WRvPHQsa1v1MYWc5M8QMYtX2tGeHL6gJyfdf369bQhBDGu/2zeGOXnZcBXmUobqTjMU4gpyVQBre04bI64cnCFF/Zf4Hhridl03Lh1E08Eq6Tkg9dQWx+JAXa2dwgbgZf3r3F8fMTq+ISj/QO6Vc2DDz3E408+wd7qkGsHN4kSufDQBawTrFO82tvbW/OaqOu6H2fbtrii4PLlyxwfH2OtZTabsbuzw+pkyXK55Nq1azhjKcvy87jxvv/bxGn3J6fl3ZzmLP/MOK2uA3/qz/9+nr1ysb/kn/+m9/G213367uQ0cw6XkV7O8ZlxWsYgmTht4rSJ0+7JNnHaxGn3yzkNMWBEPXdFRpxGwsWO7/2D7+UXPvBafvaX3gDAX/1bX8fP/8pr+HN/6icnTps4beK0e7xNfDbx2Xl8dn6Tu4vPznRviFpJvSWf0TYWNa97/ArGRgqnf5/OaBOf3S989qrdKe4EmJ9dyyI6hMuRF1CScKUN5hh1YlTrcs6Hmq3R6kVw5z6Pf3/aeisM1t3x2LwPdF0mHYexLvVxLIhRi3qFSOksbWiJkq3lo/sNPdAXRF7BSgFr5HlgTA/6ub89OXmtDKaeaNITioKo4H1H27SI6H2sMXQJGELQeWubiDMWW1m14IrDZs+G3A3JJJyCP0UjfK5c3eEjT7+dVa2iU7iWd775t3hg5wBj1MNDFTEfGtR7QF946T0DcWRxzbM/fG+toPk0E0D0m4osC6JeClleMsHIANwxT2wawQc+8iSffP6Bfo7f8ZYrvOvtV/rxKsdEDNLPZ1agsYUaNGQwBM3xWZblmsz0RHAbkL+dLK7JYx5L+sogn2VCJBdzH8YoxiYwi/185vurV4FVmctAyHoYbp435wqMLYii95M4uld63Nh6n/t8eoiSNjNiDMYl+Yqop8fosBhGYxdjek+CcQjnOBQzgzoIzq57voz7o2Ro+n6YJM9ZXmKIBBOINrAMSw6bA24tb9LalpVfcbQ6wUugz9Rh4GR5gvcpRNQW1K3HB08TamwpFMHhaot1FaYU2tAQjKftGnwIWAzz2QZGRIE8EXGMcQ3MZ7OKGOm9L7KAeu/Z39tDRCicY7Vcgo8sFnPm2zu3lbGpnd8mTru/OE03oskrydhXzWnqUei5cnWHg+N53+ftzRMubh9jxN6FnHauEOh9e3x9dZyWPcOyd9rEaROnTZx2b7aJ0yZOy5wWfJewc31O79ZzWg9KJr0MivklLGucBvDAhSWbi6Yf1839BS9f14LTE6dNnDZx2v3RJj6b+Kw/o3WBf/ALb+Wjn3qo7/Obn3qONz/xwt3FZ+es+xjzfTDsHWwQo0b0dF3H5YsnGKvp2KYz2sRn9wuf3dnAFOnDLgXpoRkGxbudDJ9HAGsCkZSUVMwsBt8rkYLiUGXAjIEuCXvbeboQIOfENE49C3LHz+nL7YQi/XLtb/o8LY7XdZ2CjlVvhUDEWAt9HyM+KNBb69KmOfchWbpF0LC/oIZ2MYDve6vEoXdTIVfrcfbR0IVX74cBjBM59t4EhrZtqOtVH84rEQixv7/3nrptqOZzXFlwvKyZLTTHYpeQPm/mg+gjMtDH6PnYpy/yP/yNb+7ndlY2/O5vei+zYhTOrB/Q2bF6P5WfQIxqie26jrKwQPYU8XqVIZGOeg8YYxG0KB4ppUPinxSSCDEDeA/09HKQwenvv+8tXL253ff7d379J/nn/9mPAIMFVnHVEkKHD15DDFPY7NgyXhQlbdv0HgRjmVEwWZe3dPczcnm66TP6jujVsg6KuUibsQqEIQG7taYPd871D2P/t0LzaCbvlcIVKjch4rsuyYqSa1HMMa4CY8lF80wi2JBKGp7Xb+3y4EHRE68xWFcoqMWIRA25Vn0JfYFDI4JN8rPmRRHy8/RfI0ZDXqOCV/Ya0M/pZmacT9UY9YYUo2sjCBIsMbYEAsF4Dup9bi5vcnN1k1gElu2SW/Ut5hszfIxEifjoOTo+oCxKtre3KY1lebKkrmvKrTnlosBVBoyncgXWOo7rQ7xpCbYj+MCqXXKpugARDo+OqKqKGCOd9+zt7bG1tcVsNmN7Z5N6pWGqxwcHFK7QcN664erNmyxmc2ZlxXK5ZHmyxBpDcbE4sy5TO6dNnHbfcprvPDFEVvWKxWLzVXNajEFfxp1qxoDJaSfuQk4708493Lwyp+X8zF3yXJw4beK0idPuoTZx2sRp9wunmbwOGZdv95LM9OM/3SZOmzht4rR7uE18NvHZKT7rOtg/sPxP//s3cetgA4hURcs3fOVH+MrXPYOk9Kp3B5+daiJqiAmKf21refHaNjGi2O49ly40SGimM9rEZ/cVn93ZwBSSziaw7/wQPhqyQkUgWfFzVMf5AhzXBMIwCiUDmqZNi+1YtR0Gh6CWbkTBzHsV8qbzxKZlNp/jigoxBT5C65UUnLNngHwM8vl3xhiIhrIsiKP+ZEKxtujDVLe2t4nW0JGgJM1JiIF6uSQiWFciUmBMCSIY67C2IGLo+rBaATGqi8b1hNd0kZjIxBhD5RxOQPoQNEFsQeFm2oMYEfEE0Tl3Vgi+xbcNoWvZ2drCRCF0EevmBN9gbcVi7rh18zpROsqgZJbTveEjGAUYJSsF0SDgY6CaVVRVeWZ9rThE0rUo/UVEAThvEESVtLQzYhlxnSf6TgsERg84BXwi0WueSrGCM0ZBHyUd6wog0oUO33UU1mmfU9iiEY2UDR6ME8QKuIFEhyYQE3H0RBkpCsfxcc3h4TEPPDDHewjRJ4AtkmeBwbkicbnQtl7JIuq4Qxd6L0ENn1TMG/YUOZfryBPHCPVySec95awiJC8+AaJzmAgmRlbNCS6F5wYiMSRCiEIMgqDEZHA4MdgiUhYz2uDxMeLblmiEIJHoQJzDdBF8oG0iYisiJVEcVjxRIkTBYHBR1zSYhAEJUEWy7qcQbJ+oTsBEQ9tFQvQ462i6tpc5EUF04hJQCKAhyb5TATRGer3Jm5WqnBMB72P/e2MkYY/ew5hI9hDBaM5TJ46CEhcKooFgAgfhiPd+6L0cyQkXn7rI008/S13U2E3h0oMX+dTzz3C0Omb7wiZU0MSGm4c3ubjYZXdnlwu7F1m2S158/kUAHnnkERaLBXW94ujwCImGrdkmxhrqpoG02YsR6qal67Rw3u6FCxSlY9UsufnMNVarBqLBmoLVsmHLaiE+tqBwihs2WL72HV+rHgb7+2f0cmrntInT7ltOa7uUezvIZ8Rp0bdEP3hDD01TI9zvnOacbhKdLRBRaZg4beK0idPukTZx2sRp9wGnRSvgCrLHeXrrpV+S5VN77mxOTbQuv3XdTpw2cdrEafdym/hs4rNTfPbLv/Ja/tL/8i+wrBXXtxZL/r0/8H+wMWvvOj47K85CK1bnDqEsAm99/cv6zKCpFxfziuNjP53RJj67r/jsjgambMHPbd0yOmpJisdRHuc1/XtW/bj2+7ZtqMoSax0EVW1J0qL5LpNqiGjuRdMyny8wKXepGEmeWXcc76m+xcFLQOLadcaoxVJDA9GQxCycImrEJ2+Acw5VOUV0yQKNAksIIeX1HIVSSgbtlLYg9UlJyCpYSrKEIkOhszi8XpI0rnpV92F+LpFvTIpkjIEQCbFLFloFNsT097LWqudCHN0YJbOmbSnLkrKsTk0oqpupMyHnahX6uSIBgg8twQ/kbtL4NSQSxJIUOGCtQ4zNV+m/kovPkYBNMaKfh96iT4qIFK7e3OLnPvBWDo+130XR8Ud/3y/xlW96se/HmPhPy26IoQem/PcYo25MjOa0HEJNdZ3PeKmM+ne6ZS8dn6zqfYjlqC+iNzvVz1RoUtTvQ5c5UWyMuqaJYUwi67braNsWU7h+bQ2k3LmBgDBzBVgHGCWumAQi3TOiG5ZhU6TElUNI+41UnoM0DyFEolVvmDyOnL9zHJYaY+gJUOe3oGmaNAua3zXnbuXUXGu6R1n7QpL2BYhB8AlbFOhhFRvM3BB9YH95wIXLu+y3+xx3x9y6tcdiPqeal0hhWLVLCluwudiksAUxQNe1rOoVm1ubuiYpfL5pWw6Pj9nd3SXnD27bluOjY3zQ/LSz2UxzOItQVZXKQBqrcw4jFmdLVvWKonSUs5L9/T1sabFiKGYFhyeHdF3H8er4jGxN7WybOO3+5TSR7AMnnxGn+TayPK7P4HWe+LuN0/SIcPZADQPnfEaclnKX5/zrE6dNnDZx2r3TJk6bOO2+4LQ0rzrXpBeNwzzmF4aI4Ud/5k187NOX+mF96zf9Fu9+xzMTp02cNnHaPd4mPpv47DSfWTdnWY+c2yVSVQ3qYyB3GZ/dRghiWjkBYxIum0hxSt5hOqNNfHZ/8NmrrsE0BnAZLSKirzsycsWRQI4/O3zp50IMa4vkvSYGNclyrCGSYW0x87/Zgmmt1fC2JCg5xBPWn3/7MWmhMOdMsljG/jkikkLiIpIMCWHcj1NjVGu86fsnPSgl8kuKaDhP4VUgxQgEGYAeteBnMMhzlUzHa2QJUDcNErW+UgZExR9VmihaqC3nlvRdhymrQWFTON+Z+YvQdR3Xb11g73Cn//VituSBnX1EWFtLOa2EDEX6gh8K7mFSmgOjOTf1egUFzd2pVvGU+FVBrZ/TBCAxbwjyOSVdZ2DvcINnXniA9/3a6wDYXNQ8+tAB3/FPf4T5TPNp9itwCpx7wk7PGRdhjDFqiOLpImdxfW3HenCnDZAgfRE9kbFFXOej3xqtySf4EDDGpfsnmcjyppOczmMGS6RFC9n5rsNYm7JQSB/eCYJ1jmgtUQxDILb2Mub/jYA+9yeHhuZ/82fGOX5Pj388P8aYPjfr+H7jOT+9TuOifeNClWNjIHkGo+pWDOBNIJiIN56lX1IsCkwjHNdHbF7cpD6pOTk5oV7VlBsV4oST+oS2abGF6TeePih4N23LYrHAWkvXdfgQqJuGk+UJlx64hG+0OKj3npPlsie+8eawKApdDzSsvCwtuZCkaQWxgrFClEDAI6Kbk8OTQ7wPNOd6q07tTm3itPuL04Y5pV+XV+I05xzRqw6fHv8wp3cXp9XdDHElnT/fyNSvwKvkNBHF9nGB3/E8TZw2cdrEafdGmzht4rR7jdMGD8WMfZHbvaXTF2nw9PM7XL811Et84pFbvPE11ydOmzht4rT7qE18NvGZc06jJ063u+y9YwiGq3tbNG1OGxZ55MFDLu4uz5WDfk0ZcHM6o018dj/x2SsamMbALTbX2ElKl1a03zCmaxXozlpl0x0VVEb3zwuUrbQKbDGB23Af7zVksJrNqKoqLagW0orpc6dJ5pXG1rYNVbXAWkPT1j1+xwh1XSMilCk8sR9n+qwqiMcmADRGi92tViucK5jNE+gHQ5QU/iqO9UJ9ZH3uxxpBx2UMklLdRIQYs9AHiEHLByWlFhGauqYqS8qq1FBiY0nSk5eHELSIXNPUNG3HrKzoug5EcMV6yoH1Pgp/7i9/Gx/+xMP9r77hq36Tb/m6X1WyZei/mvjzmDTPbUweBmVR9KDpUyGxPg9szAocNe+s1XycOlA9gOjPaB7PENRyGwIuRLCCZldQ+fw/fvLr+PCnHu37+zu/4VP8iX/9FzE+DMXqWJebDN6z2awHv7FlOsv3ycmJjqcs+89myTPGDIB7ajMz3MsMn4uRtlWPFms17+tpMc4/Z+Ngzsl6/loB1ijhioCRZJW2Guq9qnFliSssAehCR0ibAbXjWxCHbrViP7CowbG9HA2DXtfTTDp5gzE2zlVVNZB+vkdag5CIKofBeu9ZrVZrQ8ufyXqS5y/PZfYyKJKc5Qlx4iBqepCOjloajsIxLx68iNtxlHXJ3uEe1hvKRcGDW5dxs4obB7fYv36LG1dfAmA/ClflKq994jW4lA/ZWtt/ee85ODjg+PiYpmm4du2a6leE+XxB0zRU1YwnnniS1WpJ2zb9xu7SpUtYa7jy4gssFgu89xwvl5jCUDdLIp6dizscHRxyVNc0TYMpE04Ud37ZPLWhTZx2f3Ka6ruu46vlNJGM+905x8PR5vIu5bT14awfil8tp0nq5/HxMc22mTht4rSJ0+6xNnHaxGn5Bvcap2FMPwcyQLNi/MjQlF94iUT+7X/5/fzkP349P/ozbwHg+/73d/Pzv/wG/sqf+5GJ0yZOmzjtHm8Tn018lm8gMlq4UddtWq+75b3jzeMN/uIPfCchDp39C//h3+eJRw8gCmLMGcPHdEab+Ox+5rM7GpiyxRbRCbZ94T16wBut7BrIZoEeWxxPN2NM7xWQgbsnFUTDT0W9DkKn1s6iKHCF6xcxd+MzAfhRxwfl662ScQTigbKoKIoiLUaBSWSUU8x532FigBRylgXUOkdRFHgfsc5qSG2rVkgjqVZRjP08+eARAoIqh4kKMpA9AjRkUgFCgV5/TvMVNQzQWIcrZ+m5BjE5ZHVQiDx3XddxdHTE7oVS++A7MOeDvV2zbuc5VxC3RUEM2nfnDF3bJXKKEMbWbUPTtCAdhSt6i6wCr6VNVtiAhtUaiZhUrC9GBZpcpC0iGHFYo/cJ0WNw6ToVjCxJ4/UWfB+Cfdp7YAwWq9UqhSevRy7llsOYT8vdaQAfe5NkhdYQTIP3Q+5d732S7aL/7Lr1XH/OOjJ44sTRGCX/X+U59000xNkVBXNj2N/fVxkwJoXiJp8UI3Rth0jsPXqI6j3gIxomLUMYN0mujKhFXHOSmqTz+kefCg3nMWoBQ7M29/18BS1Y2bbtmkdA0zT9emnYqo41SDizdtlLJoeA9rIXDYgFC420nMiSPX/A0zefoXjEsepqrh9e48FNw96Nmxwc7vPwa5+ia1uqasbjT72Wk5MTlscnrI6XlLMSZ5SQlof7/TMjWojRFQU7u7tYaymrCmu06OCmrbRvERazBTJfKBdbw9HhIXVd47vA8eERYg1l6aibhsPjQ8Jh4OLFi0QbKecVuxd36NqOg4Mj9vdunau3U1tvE6fdv5wmoqkSjo+OKC5ceFWcJqL9ddadPsOk+b87OO3gZMbf+HvfxJVrFzhz2kpNkDNc9Wo4LXsktl2L927itInTJk67h9rEaROn5XYvcVo+p2khdTnV14gZvTwatxDg+3/4nTz34u7ot8N5b+K0idMmTrt328RnE5/llvns7W/4NH/me/8a/+0PfA9HJwuOlzP+6v/57fyur/9V3vD4S3cHn2VOGztFCOQYnPPeOx4vS65c3eC1j+9PZ7SJz+47PntlA1OyHI4XRSfudKDkWNhv8wJEsqcAvfU739fZtDD5riMAjwk8c+5Ja62G31mbwC7fa90Act54spdDT2KkQmHR9EKShTLGiLEpTHU0BwqKnuDVQj62lIYQcM714zFmsD73At57DgzNZMVMyhFziKo1vSLqs0nXqbVeRBVQFRQlEmOVOFKoW0iF/nKuzSIRpVqO09zltRitd16I8d9PzWj/b0+4yYLM6N9ebkLsYSlGUp5XA6TwZLEgGp5reiHI+V8FDatUM5GI5snMOUMzCSKGtiv42LMPc3Qy6/v31W99kdc9cSsBwiB3Y8DP69p1Xb8BMCYdfk4Zo/LfMrD1s3XOPI315vZNMMZiTfaaOH/Kx2HGkC3r+nMOWx2mThgvpogMJB8Cvuuw1vTEmddi2GAMBJKQAN2C0d9jzDNr+kVMejWMf9zv014/MSty/ly/rvHsdXlyxK6tR16T8ZoO858/YugIHLZH3FjdZK/Zp1wVLP0SCqHpanwMSnpdSwwaZm6Mw7mS2SzikpxGVKcky4gxybtgGLMx6m2iOXPBoPrpXEG9WuGcxZUO33mauqFeqYfAbFb1WOGsoQ6epmmV4PPGg0gUCMQhR/LU7tgmTrt/OW3YkL96ThNSegBrzwBunv4vNae9dHOXp1+4zCefe4h4jixYE/jat7/AQw8cflacltfNiJk4beK0idPusTZx2sRp9xqnkb6i2JSyyCi0M4wPcl2FjNBweDLj6ee2+finL3G8HOoBv/GpG7zjLS8Pw504beK0idPuyTbx2cRnp/nMB8NyVfX9F4nMyhZnw13BZ/mMRsLi7c0Vb3/Ty8yrwdCRZWR8RjMC1sbpjDbx2X3JZ3c0MIUQeuthBre8YDGEAatkWDwYAdpoAUZ3JanqGtBba/p8oXq/YdF9Cm3L1lZj1BpfFAUxpryorkCFM1naYU24xsI2DiVUq6iCZFnlBdFcl+o9YSmKEh/orf8xqlU4W3OrqkqCr2Q0n897i7DJ1lofsC4paQaL3I+gRcSIQvCBo+USi85JUVUjjU9CnyKHooAYi0+RSBruqwX6jHUY64iowIpLaxajFoH1nQpKDolFBbWLI2UiM/LtyFt6OdE0eVrXKSbCiuR1CISg+SCH9RvCTwNGidAVFMaxbBpcPoRgVJlI6xYAY7FOqMy874UA1jkQy/Jkk7/+w/8UObQX4N/7N/4xjz98K/Fj7HOy9mCevm8aDR2ElP6O4dDTjzqteVUNXibea02n08CfwS+DV/5SLwIAzcPrbIE1DpM8P0wCap32SBdU95xzo/tGum54rnMmyUjol46R3ua+l6nPddexWMzpgscH3ZQYVyKmALQfiEUkEn1LTjqbxxLj4NEQAr0uVtWc6D1d11JWAxbkMNvcjzHRZgwwop4DebOVPZHGODLMte1/b1MY7phU87NiiPjocYXBlgZP4KVrV3lm/1mOwxFXrx8jlbB7+QKr5ZLF9iY7D13S4n1pE3VweERRFGwsNth68CG6tqUNmit5Pp8zn88pioKiKAghcHx8TNu2VFWleOAcTdNyfLBkVsHmhUu8dOUlqqpkY2PB0dGBjiUKR4eHXNjdxVhh2ZwwKyvausF3vpfVtm25fvMmFy5cYLG1qQUUp/aKbeK0+5fT8iZ2Np+9ek5LB7C8QRs3kzwZv9Sc9qsfeQM/9f63jnoWMWbY2M1nLf/5v/NTzKoGrb/6mXFaPlwrp7mJ0yZOmzjtHmoTp02cdq9xGpLqLRiLVggZv2SK6f95HklyaHnmhV3+2g+9K91P0xoBfNd3/Abv+cZP9UI+cdrEaROn3Ztt4rOJz07z2Uc/9RT/9V//Xf1qLmY1f/h3/0MKp5d+KflMTMGvfPiN/PQvfUWSvcgbnrrBn/8P/iERUSNhPP+942Le8rrH91guV9MZbeKz+47PPqMaTDbloQwh4EegafJGdrQo+bNnmyRLcujTjGXhjyFbx0XDD32noVspP2VZlrRtgy0KirI4/96JJPpFzsQ0+rkXNlSZx3kTc2jcalUzny8w1hCCBzQcL+bxe4+xhsIN3gfB65hms5luovWJxAQuRobw/RDXLamdj1ijZlljNI/lGlkaCxjqukFChyGluvMe33ZqfZzPsc4Rot7fJyuzAs26sllrKYuSk2Yo0nX+eqUVO0PYGSQlKfhIGS0QIHrwoSNGDwKzshosxCIYU5B4C6weNKJEymKWisFZRNRwlQvuKXnFBL4BsZBDJImiCHmqn8PYIkgk+qH/479nYCmKIhnhfHru2WtzcbUMen14JgOBn/YwGFu6VS6HENjx5ui8FdA8sBoOmYkje5uoZXoggLxZCr03x7BRC96zWCw4OTqirhvqkyWQcs8WFcYWeAydhy4RnKDTaozNDhoJ3MNo3lS2NjY2CAGcCMbZfkMxvj7rO7A+R4mdxjoKUBSDrg8vTwXP4FkxfqGax9rPB5GApwmRrm24FW9xzBFt1TLfXLB3uIdBKIoNlicr9g72afYatja2Wa5qnCt44+veyKpepQ2m47CtOTjap2lannzNazW8uanZPzrkwoULbO/usrW7w7Vr1zheLRGE6ANEoesCt/b2U9iupes8GxtbbG1t4n3H/v4+x0cnWCsEPMWswDee1dGS+WMzmq7F44ldZH/vkPl8zoXdS7eRnamdbhOn3Z+clg8YvvNra31ey5yWMcm6s+mEQlAPnS85p8X1nr31jdf4z/7Ez/ScJgSqsunndTz2V8tpiDBfLLDWT5w2cdrEafdYmzht4rR7itMUfNGaC4Bo9ZO0+npOC2NJhO//P9/Bx595oB/Lt/+OD/Fdv+tXmc1mbG81azKd7zlx2sRpE6fde23is4nPxnwWYjjz93AXvHcMseAv/sC3cWN/q+/Xf/BHf5avf+dzZ/nsc3zvOPHZxGf3Gp/d0cBkjFFgS/kSc1igiGgoYVbsJAynjRB5Uc+0tIhrngkjgRTRv/m21byIouHzItmC6vF+dF8ZAD6LzFhQTgtO/h0wCjONeD+ExIbgcc4iCCEoQEj6XNd1iAg2kVy+V4iax3E+n2N6JYbsOiCSACeeAlXF99TPnN8xAubMNSJg0vwLpGJdLSJC0RfR0yXPKaMAAQAASURBVOd2IZVOs4q8+Yn9s0XX2IeABI/meT0LNrcOZvz4T72NazcX6WOBd33FP+HJh66cIvXkITIC1uxxkn/PeNgp0jGQajxFvca6Ik2c9PMb0RdtMa23bghMLy/aj8jHn7nMR55+lBxm+eClI77lmz7J9mZNHt2YQMfyMQaibDzSZRzWIc9dzrUZQugLreUCeGMr+XnkOUz/IA8mkZr2MOae9p8ZxjjIQyaLfqMU9ZMxfXRM6PnB+WdrHc566romGjCFpUik4TFpbbQvkZj0Pa3kaP6stQQ/FJxTLwmv5L+m0zpXp/FhPO/Oni9/pz+nepQ2UOfMbybC8fViheAiwUQOT45YhZrOeIKJtKGlwFGWJYtqDhZsMHS+oywLyrLC+4CVgaSzV4y1lqOjI+q67kl/f3+fxWLBYrHoo9ysMdTLmnI2o1nWXHvueaqtTYqqoCpLDvb3KJxDBGZVxXw2w1rd/Ja2ZHd7l1k5I/rI8mhJ0zQUtiRGCD7Stf7MPEztbJs47X7nNHlVnJb7ntdXvz+9pGkz/SXitBAN7/3VN/HpF9c3cWXhefCBowT2AWJEwmfPaaQZUvwJE6dNnDZx2j3UJk6bOG2Y4oHTrIFvfffP8oGPfgUfe+71AHzok0/RdpavfcvHv6TnNMVco9enPo+WYP2clsb6ptfcIAbh13/rEQCeffEi//iDr+df/NaP4gqNIlibCyZOy+OcOG3itHulTXw28dkwxYnPbvP3L+V7x5dvXuA3Pvkart3apu0ci3nD7/mWj/DWN11jd6cm5uv53N87wsRnE5/de3x2RwOTtclyGBR4M7CRgR7OLMC6weHsNVmIY8iW1qGAmJA3oVrMrG1buq7DWdODbAiB0KEb0ixI/TPGhME5ghD7vo0NBQpwsQ85VKBXZcjP0UgdFaK+GJi1mksypmDGGGlbDXPM+RFj1j4ZBFRBagQCAhLz/XMuyNNglIvrqZeFVk8KNG1N5ztcKu5nrYVUkC94jzEaqeS7plf4mNcngrWO4D1eDFKczdkNcOPWgr/6t76OrIRiIt/4le/n0oUGkTKNxadxDkST53lYHyWviKj3gERCIr8MchEtCtd1bQJAQ7QqL9rlgUYYHVwynn3kkw/xU7/05r7vjz50yB/9nl9NFyS5NQNhnQabPOcZsAyCsWcBJVvuQwhJhozmpQxDGHYPRr3U9xKoBGGU0CLZkyADfS/VI7ld1yu9dwrvHHknpB0XEQXaHFqbN2YA0QectcSi4OjokGjBGUchonIbZOAGIsRAkKAOhyjpZkIUMcTge6Dv+xZi73UyxoXTQJ9BWcHc9nOXN4h5M7i+EdS5yUSS8SOvxzg0ON9LnAEb8NKxvzpgGVY0tDSxpfUdNhqKomCx2MBFRx0L9vYO2FxsUBQVJ0cnFIXFWAWoEALzFOZ9c++Auq4xIiw2Njg4OCCEQFmWfRirc47DeMBmtc1+fYPrzz/Pw294A4V1zMqKl46PKVOo63w2Y3NjA2sMvvMUUnBh+yJsR45PTjg5PKHtOnYu7NJ5DwGa5RCJOLXbt4nT7l9O69fY2VfktDyh/cFVzm4yFWPDl4TTfLAcLyt+4n1v52RVcbYNB4LPmdNinjeHSDtx2sRpE6fdQ23itInTRovYr6+1kd/zT/0EdSO9gekDH30jN/e3+Nq3fDz190tzTlOv4WxgMuSXd2ucJuuc9s6vuEJZdL2B6UMfe5xnr1zid/8zH8FYJk6bOG3itPugTXw28dloEdfWd/1Pip9fqveOL1y7zI//46/p+7O1UfNvfPcvY13itNN8Np3RJj77MuOzO6fIUw3tCzqNQWAM6KH1WBHkFDiNrXnrLa79ZCzEEDFWX/Q0zYq6rRGJbO/s0K2O6RLoW2tVUc/zUOjvrQSSc6KebSkfq4GqnKGK19F2TS/AzjmNUkm5oV1R0KRcp23bsrnY6HObRrwqTlS1MrZAjEtQnDUmzQ3Sa7yI9ARB6Mi5VBHBFYVaEmP2nIj4ECld0Y8xRGjaDrGGne0tQoSI5pO0KZwvW5rboApqrKMoCzrfEUVYzBc0bUuMClhtiD0h3LZFqJtWw1yrhKU+gZCo13cGIg2nTCAmYG2BSMpLmizWIUSMWLLPgTWaKzRE8MmiLc5iJCm0CNmzwxaOGAJd54kiBM4jqgFi4SzQ5LDGHKacAauoSlXq0R0yoFRVlTxPAq0/NV8C0QgxCIGAz94hYpNcqlwYoyG+IYbkeZKflCZ5BPZGDGJdkiDBGiG6wYsAdB4x2TMwEONwaAsIYh1OLG2zQtJ4rbW0oSV0AYLQBaFpA3XXUlVlTxaRiA+MvBbU8yYG2NjYoA26Xm3wVPM5RPA+IilHqbWWtm37fKW9N07yPlDMUFLO4eM9qcX1XKiQCkcGvU/+fF+IMwFxv2sUwTrHcXPIyyfX+fiVj3OyOGFZLTmsj5jPFjhr2btxyGI+R4zBiGM+20heBJ4rL77EvJoxn8/Y2trk4sWLKjudZ3PuuXzxEmIMTddy4cIFlsslL7zwApcvX+bk5ITgA7du3GTPHOC7wIXXPMHx8hhuRbzf5s1vegtbW5uE4HnmmZYCg289q5MVR90Rbad5Vy9dvkxVVBS2ZHf7Ark4ZNu1d9bZqWmbOO2+5bSMTYv5Zhr3K3OabztC12HO4428wfwScNqvf+S1/M2f+Ho6fzuZ+PxxWl7Poiwxrps4beK0idPupTZx2sRpo5Y5zSKsVqv+5ePpuf1SntOc0XoYPo5e0q7J2ylOi4G//ne+hk8+eyoli4BxDkyYOG3itInT7oc28dnEZ6Pm2w5/jt4YDQD7kp3R+hSNIwlQM4/WeYppvUkr0F83ndEmPvsy4bM7GpiicUBQsBMIGASbrLCDQEpPBoIZ7LwYk63Ko9f+I+s/ycpssnLEQBcjq3oJIhhriGaw9IZksdQUzjn0LfYvgoxJ32fFjl7zgqZwwgysRoSm6/AhYJzN1hGcc6xOllhrKOeL3jIrAj4G2k5zt4oIYpOnAFDNFjR1Q915TFERxKihIwNTso5q3GOEmIqpoTAkgLFCMAYRm0LWkncX0hOmkUgIDcao8NdNQ5BUqK2cgY/pjmBTLSJBo4tC8IjRMNC6g2AcOKH1PpGmUNcrnM0F5hIYSfbuHjWBzcWColgSQ6AoK5ouEHygCx3WDAGtYvTgEAiIKfD9eiuBGgMYofMeEQtGw6O1yJtBJHl6pLkOyatDxEBAc8qKw0vF3/kH7+JTz1/uu/nd3/Ehvu4dL2gRR3Tuo6SjjC4Dne8AlbWiKDDWYJ3FlQXGueQRoq8fm7ZltVpxdHTMQw89rHJgLDkdX8zyLQxzGHWsuvwqMyIosWVCJB2OknwqUKmcGWs1/216VqacKCDWjPLQ6s8xxiGnMILml83GsaDPsqJzIkI5K+mOW6KHzhtMdFhbUmGx0WMlECXQmUjnQULABe2XESEYaL0Wu4yg3jig62WSNxLo+hpD3nqJSbmP87XqepLkNfYFDzMZZAzIwI4IVkRlNI0tDRoxYJ3Kkc6pQaIjGKFxLd1ihZ81mCryUHWZo+Uxzjm2Nre4tX+TRjq80YOwJ9IGT9M1bGxtEIgcHh1Szi7R1h1t07JRzTk+XuKDRwrHzVt7zGczLl66ROcDR7f2qFcryqJg6RXb3KJkFisW1Zx5WVFEwXQB3zasDo/Yns2JYghRaDsdpZKi5eLuJfWi8LrJlgilnJcbemqn28Rp9y+n+XRGeTWcJkDsvJ5doxavPH3AiQGij19UTosBfvTnv5pPPPcQnT9bpP0LwWnZI6s/VE2cNnHaxGn3TJs4beK023Ha9tYWs9l6BGzGoi/FOQ1XIrYEtLi8UYElv4jLc5lnKAL7RzN+5KfezJWr2/gwvOD97e98hm/7pz5OWemLnInTJk6bOO3ebxOfTXx2ms/e8por/Ol/5e/wV3/on+N4OWdZV/ytn/in+cav/CBPPPjyl+yMNm6CYKMgUWveqJROZ7SJz758+ex25vj011TwLH3lMEOyZRhJypjCI0WtroptY8tfHH2BxAHok7qocMdA17X44NWrwFoFih7oU4G/UU5CkvV+CEmN5LoE+fsYfBKiXMAuW+c7fAx03ivoG0PbNAQf+hRoiGKFTxbnHAI35HgVTTMX1dKvxeAUvs/NMZ0gUEZfAMZIb8zJgpyviHmeBELo0pgjTdtijO031zpfug5qDZX0mZyXUj0JfAYfY+l80By3RmjbpifftWUbftO3qpoli7/X9WMItR2/p8v5cxPSEclRRtLLj5J28l8T0dylYhIZJBCNOewyDrKliUo5Xs147uqD/NpHn+TlGzs463nja67xje96hne97Yqm8EzjyKOLqAXfhwAyWNWN1fl0ziHJY8UY03uu+BBY1XVfyFD6tU3jj4lE4vC+MlvBM6Fm7444+l8/2WmqxjfxXateAmn+4kiuIpKKK8ZexmKMSceGeQoYPIIXUS8HA9EowenaCT4KMapuW+uQGLBEbFqGIDFtnGI/n1roMK+JSWNOg0jzlwk6b7pCjD3pDWs5rDOwFjo8DlXt5QrdzNiRPq+DvSTCzcQcaXzLKq5oXYN3LbjAbFZhsRgsRVHQBk/rPZ0PCvRB87q6QuUhEGjalq7z+C4Qc47UkW7XdQ0ibGxs0rYtx0fHHB8dU5YlmAgmUlQqY4VzOGPompa2rmnrhtB15A10nifnCsqqIgZNa2mNJfpI6ALRR4at1dTu2CZOu285Tb3T4qviNMnzG4cXbusIky7/InOaiOEjn3qUT18ZHCUeuHDEm15zjTd9gThtGOv4UDpx2sRpg25PnHYXt4nTJk67Dact5gseunjCkw89169r3RY8+9JlmnZwYPhinNMQA6ZAxKmMRhKPqXxI/3PuKdw6mPP0Cxf4wG8+xtFJNpRFnnrsOu96+/P8jq/7NNYxcdrEaROn3S9t4rOJz07x2XzW8NjlG1ijxgEfhGu3dqmb8ot6Rmu7guevPsCvf+yJM2e01z1+Q1+oj/hsOqNNfPblzGd3TpGXhDSHbp4OF8tgq+Ga8Q53WW+R2FsR9f6q9KvlkqZZUZZlCjszOGuo0yLnEDRbDCFpwJpAqDDpc0KAGAVjXBJAgzG2F5yQ+x4hxoAHLWLlHGVR9IAbc6/T9VVV9UAfY6RpNA/hneapz/cq6wsSFR3pYsSnXKxjoc6Fxsa/y/PXNA1bO7vYwiULqy6nIEnm9HPZ+2GcG9L7IYy3KPRzbdv2hCeJJIDB4pvHkz4TQqCuV8xmM8RoqGAInRbYiwqgglmfj57A8vwpZoyVPR9IQvA07XBQMUl5JV2flfmjTz/C3/yxrycr2qXdE/7yf/7DWKfrHzGszx59nt0QAkWh4bPe+x7wnXN0MehYRJ9nraWqKnZ2dsgW77EMjuUwt3GeziwLZ9Z/9H3+OVvP8+ZC7Pn6NZ7b/PkIfajyyDkwXZcINv3ae4+xDowe+pT4AsSAEId7jogjjz3fR5KhUUk+pny8St4h+H5d2rbtx3aenuSinkPBSCWG5XLZf2Y2m52Zq857ulaL3xnWN4cAnkAdaq6f3ODFo5c4qo+g0i3m8cEtXr5+DWOEultSVI4QIk36bN1ojtMnn3yCq1evYsSwuViwXJ5QOEc1qzg+PuaRRx/BFgU3D/Yoq4qqqhARDg8P8d5TVRUPPfQQV29ew3vPzFXs7+/holDZghv7h8wWM8QKi61N8uvujDWz2YyiKDg4OODGjRvEGLl8+fIZvJjaK7eJ0+5PThOhH9crclrUfNZtp7nEu3MimDLmfCk4bdz+pff8Bt/zHb+uYzPm885pw7ozcdrEaROn3YNt4rSJ085wWtcxX8z4tne/l7c++Qv8ib/0X+OD46UbD/A//NB38ae/+2/x8MVrXzROEzHYHF1AXHu5JzK8hBtmD/7hL7yO933gScakaEzkP/u3f4QnHmlwbj5x2sRpE6fdZ23is4nPxnz2gY+8hj/7P/5uMkZtzlf8qT/wdxE6YvjindFevrHLX/qb39r3I7f+jBYhhmSInM5oo2dPfPblyGevysB0+uv03/Oi86rAXoUoBI/i6BCapqBmcK5QnY8kp4B4Rnmy0sUgkBZVJEu0Wi4lFekT0rsjo94JbdeCDAW9ELWAd02rBJSU2nuPKzRk8vikBmIvcGMlb9u2/3kIsVufo6zwYzCIihZIjISUYzXn5RzP9XjOfYh0bUcIHdYWWugtRyWlwYhIsp73aN57P4gIwQ8km+fcGPWI8F1HtE5zjMpADGfkIl1vraNtWyRGlAzpgUGNuilEOIF3Bv5sOI8x4iMYa8mhucZYRCwxerwPmrs1eUlojksFsbxWmvJ2mKOY5jfFpa63CCH6HgTm87lKZdACbU3T9GulhrhspU5e7jGqRZgxSI+efUpWx2s55Pw8C/BDPtOwdp33Gp5t5Owa5HvnlpV+bZOhsJwflNaJXinatsW6ClsusLM5J12HsQXWoflR+zyukZBSWmRS1FsJRtwaQWbSFokIA9Hpy9rxBm/QCe89Qzdjvx65aF2ek6wfwLAhMYLEJMvG9Fb1vAGNEghlx97+LZ699gzLxZLjwxOaoxYfA+WsRIxwtDrCRUfjO2rfEr3nwQcfZD6f06xqdnd3KZxjY77g5OSEsiwprSO0nitXXsRVBZcevMzqxg2Oj485OjoixsjO7g6zsqJerSisxdct129cZbPaoCgKlWErHNcrMBFbFbxw9WUKV7C9tQsGmq6h9Q0Bz0MPP9h7vVShVIIM0+Hl1baJ0+5PTsucox6Hd+Y0ASSlv8iHqTOrGrweXL5InPbS9V3+1x9/N1dvbgNQOM9f+NM/xmseu4mmGTA6B59nTsvruy6TE6dNnDZx2r3SJk6bOO08TsvXFy6nuRmtcIzqUf5FOqdpwIH2YaiTIuuLkFrdOP6nH/xaXr6+0ff73/vXf443vuY6AmzN9+i6cuK0idMmTrsP28RnE5+N+eztr/8Y//Wf+Cv8l9/3hzg83uBoOeMv/a3fw7d/w/t4w2PPffHeO0peWW3TGW3is4nPbt/uaGAaC8zYIjoGoSxYn7EnQfRomKta/rX4lqFwBZpDNQPiWaVQMDPDQjJck3qI2gmlF4TgQ/8ZHYPREDy9G5DyLyYB1PFmxU3CbgaFZfTMvJkXkd5aehphBkI8bzYgJqHO1uzx58brEIyh6wLeB1xRkEM6TcJOSf8DSex29j4DGAxhsCA4Z+na8QyO5/xMtwGGAniK2joXw1NTjww5n27vQZCu6tcu9y2vrTEpZ2pUkM/M3wO8jufDTz/Kp6880D/xDU9e46ve/OIaD6xtQUT66K3TczIOj1TZSuszAm9rLUUxpJbQX8d+DsbXnrcxyr8fPj96Xlz/fO5TLjB5p/uM70XMoatJjmKaijjoU/YS8N5jCgvGEUkbpNSH/u6JMIdNUxzpRyLr4DFoobzxmDKZgwJ9NtJlQjvd/8FLYZ0gx/MxJkLQ3MbZ8yJyimgFooFlXNKYhlAEOmnpYkeX7PU+ekzeFIUIBiyGk+UJbaueRd57jdrrPAd7t/BpDnE6F13X0gXPyclJP2+9F1AYiEtCxBnDxnzBvJpTFqVGxs1mHC2PtXhf8lSojaVw6pXQtk2PDXG0oGIESUUdp/bKbeK0+5fThvl9FZxGHPofSSHh66N4+sojlEXNGx575ovCaU1X8PzLQxFzkcjrn7zOzuZqxF/DP58vTjvLI8M1E6dNnDZx2t3dJk6bOC33cI3TRL3kdS7O1vMbjZov9DltSD10m/Nkuvo3Pn6ZVe1oO8uzV3bouqHfTz5ywJtfe50QAgcHwwvRidPoPz9x2sRp93qb+Gzis9zD3P/N+Yo3PLGPsz6N3fL81QdYrsoeb75Y7x3X53c6o018NvHZ7dorRjCNJ/x2wq6rMExsWpW1v2Ui6CchBly/gJ6maaiqirIs1qQyg+x48XS/OgCFjBUk6lPGIZLqcZABXSBqPtUoAxLEGPtCXr2XQoTgfW+lH1vjx+GfIQy5U7NXwen5OzNdvTIDI5AfK8AYKPK/xmpYoPee+XxOTIBkrFUAy5v5KGnuQt/38bPHCpSfWRQFoWv1Fmm9fUj5Qk/3PwxEmvuniuj7MEn9MsNXPw89K5HTV/Z5dtMCW2uJxmAFrNOxBe9TTt50aLKGH/+Fd/DC1Qt9v37nuz/B93zHB0GGopAi0gMUgO86JD0jr11WxLzGuq5eu5n6ba3FWkdVzVgt63PW9Myv1tbu9PdjQB+33I9MPFpgchxue37rwTmq970YIK2FQWvphhjp3RZDxPuIKy0hGrq2S0UzAxI8LnU1a641BoJa7GezmepHiHQxqEeCjVRV1Vv0dQyxt/5XVdV7O3RdR1mWtyWrNd0dzVP+3PpmyODsWaIUEXWmsZG9dp+2aJlfmtMed0ghFLaAKCxPlogI1azCx46irChdxcHeLfb2brFcnlAVJWVZcnx0wNXnnmPnwQdpqhmVK9mYb2KtpW5bXnrpJXZ2dynLEu89dV2zXC6pVyu2NjfwnacqSh669BBdo/XLjFiq+ZzD1Qmt97hZRdc2+C5gpeCBBx6grmtW9YrNjU1OTk7I4avq7eJ7D5epvbo2cZpedj9x2tDF+IqcJoA1ymMRxdjCBYwJhFRI/L3/5B0889KDPPmdz1C63N8vDKeFYPBheJlmJFA438ucpHt8ITjt9IZ7fU3P//3EaROnTZx2d7WJ0/SyidMGTtP7np+Fop/UL8I5DZPqpZx5fJJZBN8Z/u5PvoVrNzfP7WnvCT9x2sRpE6fd923iM71s4rPbn9F8MPhgiKH9gr93nM5ot3v+xGcTn53fXpWB6fQC3OYKtThHARkmOwSPEHsdjym/JGjOxbZpWa5WLOYbPbgOL3LyfYSiKLDW0nUtghC8p0Mo3JwYTQLpQi2uRgu4qWUxCapNxQAFGp/DSTW01JnBC6AqNfdg23WU1Yy60ZyZbetZVGUP6ON8jXmyY4zs7Oz0uTVjHAD2PC8C7z1GhKIs6WKkStdVVdV/Zmw5zYIexWqRNKshglGELmpoLggxarieNYJPn6mqqu+z7zxS2F6ZxsoYYiT6QKRFrIYdmjNrriG/VTWjLAvarsZZA9Hjg3pSaKE/ixaJ02KNkZD5OwG9xVhBLAM5BU15FyUVkfMBH3LhMYdNoOy7BOynSdWkfLdqK1eoF4EYe4UonFub0ywLxhh2dnaIMbJcLhFncUWJiKGua27evElRlFy69MBID2Qks5C3MqcJuteSERBlQ1+2gmdgz+GZPbkbQ8SMwpGH+47vZYyBoJsjvBaMQ1ROoqhFO8+X7zxd0wKSQlXnxGqTrtPCjUikaxus1UKM5DGm4dR1jZKooahKcqRk26rcELNXienHdHJy0utG0zS6sUgbHEjhwj70a3GaIMebsCy7XdchRj0080bvTMoribx4cIWrzVWOwjF2YTlpltSrBrywmC8IIXB8fMzWhU2qqmK+WFA99RSr1QoiLBYL5vM5i/mcCxcu4EPg6OCQw8NDCELTNvgYwBoWiwVFURBjZG9vj/liwTznRj04xBpDVcyQaLh27TrXr91gsbkBVrCl5dbBPg8++ZgWABRH13aYwjCzM5b1ktl8ppsfPLf2blEWJRubG0ztM2sTp91fnDbkVzevitOcMxyfnCDRs7uzyf/8n34/f+PHfzs/+A/e3Y/nhWsP8t/8jT/CH//nf4iHL+59wTjtb/+Dr+WDv/VE/9xv/m1P86f+0M+zMV+hLwi/cJy2v/9Af32W04nTJk6bOO3eaxOnTZzWc9rWJvgOZw3b24tzpeGLdU6TQlLhdX1haIzibETn6+nndvnrP/S1HC/LO8r3xGkTp02c9uXTJj6b+OxOZ7Qf+ke/g1/96Bv51779h4EvLJ9NZ7SJzyY++8z47FWnyDvv51dqPVDJSPDJAhqo61UKxTM4pwW6BmTOimKAQSFC8L2wnd+f9c+BGh1I4YlaACymaBSLGI26ycLmUiG/mJTOe0/wobfajZ+blSxbfYF+gWMMhLB+7Xlgn5+Rn3f6umztzm21WmlonnOakzX3Q5IHQW/BH6zyp/vc+U4BVnJI7gB2hSs0asl7nNWcrxd3jvkj3/V+/u5PvY2rN7YIUfjJ97+Nt7/+07z+sedpmwavyVeTBVuSo4BJvUgHCRQoMvjGKINEpJBbSQDf97XtMM5ijRlAW2DvcMH7PvgG9o81l6k1ge/+9g/wVW++koacQUnBvG3bfh4HkB7mJQS1ho9BZAhdVRCdz+eIGAUzzOge63au0/c+T04zyOe/r3vKjJ+drs+uF+fcJzeVz0CMSvBKsprrVqxVEUk5Q4Mf8rSLscnDMH2R8rOHgFi0gJ2YUb723Ge9rs9jKsnL0NpxB/v5zOM0xvSbmQzgY8+cPDenNzowFCGMa/cd5DhDRyZ3RD0jjtoldWzBQVO3iDVUbkZlK6w4RGCxmGMqJbambWialP/YWZyz7B/s0TUtXduyPDpCUO+bqqqYbSzoQuC4Xq7J2nw+Bx9YrVbUqyUnS/UCuLW3x8Z8g7Io2d3dxZUFRVUgzqini1Vvq7Zr+/DeGCOucOpVsFr1BUeNNXf0cJna0CZOu385LSR9N2Z9A38epwUibauYY43mcN7drvmGr/oohCV/+6e+mRgNIVqOV3NC+Nw57Rc/9EZu7i8Azze/8zdYzDxNW/Cz738bn3rhAZZ1CUR+7z/7T/htb3+ena0l3utTv5CclrF4TeImTps4beK0e6JNnDZx2u04LRDxbXduFgpi/Jw5re/rbc5peWzjNFEMvwaEX/7QY3z0U5c5OjnLQ6fbxGkTp02cdn+3ic8mPrvTGW1eDVETdVOxrMsvynvHVVOkM5q2suimMxoTn018dvv2qiOYPpPWy14GrrGgC6iHQKBpakQMRVFirCp+HIydg8IyKOew6Ot5JteU6fTfU87VCCkvYcQ5k4DS07ZNb8F01vZeCFkhYlSgDwwAlP8dC2Umo5iUbBjHOjGcmi2CHwqL5eeOlWMcvto0DbNqhnNqVW99Cqckh2YOBfIUWM0ZRdOQWKtWYhlC/KyxmMIS25a28dg0pp2tE/7g7/5V3veB13D1xhYxCj/3a29hXh3z2keeTYLoETRE0UoclE1SmGo0YIaFFQwx80S6TutgGCJBxxLR8FsjNG1FF+ZY5/Ax8ML1HX76l9/ez6K1gX/pPb/Oxd2WnB4v7xvUk07zXp62UOcWQuit21rAUBVO10LHMp/P+/lz9vO3UbwT0PfrwzqZ5DbebGTwJRedjKfpIX0XooYdJw+NHuDFaGhrTPk7YyDGYcMUglrqTWRNbrquI6KbANWB7NESCX5IxzgG8aqqaJqm/1vbtlrc0Ugv/2NduN24db4Y6WDubbqOiI+BZbeikRas0PkONysoqpJFsaCrO4w1zBYlq26JD566azk+OqQsS5w1GCscHh7QNg3RBw5v3WR75wKzzS3KsqRazOlioENBPetbWZbUJ0uauqZtaprkCbV/cIAzBWVVMpsv8MFTzSuiBG4d3CL6qOvUxn7eAGbzGavDlXpAOTeEqwbP1L5wbeK0u5/TYgyjTeidOS34QN01WiDUpE2yCF/1hmd45OIVfvbXvoqbB1s0bQHA4cmC+WGLlUIxMeVS39w6SueJ23NaiJa9w4pf/I3X8/zLlzASeMtrnmVns2HVLvip979lLfXCt3/zR3jjU9eJaXumGC1fME477/DyubaJ0yZOmzjt7m4Tp315cJqH3nN83E5z2rzqmFX+czqnrXGRDDJh9G1vkpOE4InTfv0jj/Kbn3iw79e8atnYaLlxc54wOHBx94Si8BOnTZw2cdrUzm0Tn3158NnmouaB3UOu720CQucttw42ubBVY3Iq88/Te8fbna1OCdZ0Rpv4bOKzc9rn0cCULcKQ0bq3UI+uCqGj8w2r1QkxQlUVLBYzmqbFiEXE9vdTsA4YY/NeVe8Z6b0T8qKK0VA+SX9HtEZQ2scS0742ChpSZgTjHMGroHbeq/B5LbgnItR1TQSscz143q7l/KRVVSUF8MCdwckYk7R3yIWagXgMSNnSmi3drihwZanzY5L3QBxv4NPcJQu4Oyc0E+iVLARP10WcXReHtmmwTsMODw8ONN3BqFnrmM3miIG2WRJCp9ZNDEYsVgxiKgVb8ppoGLEVSxe8iooYrCv6gnI+gDNCEEMRtRDjz//am/jpX3ln/+zzKLMnNAa5UCDyvedFURT4tu6BNCtR13Usl0sWi4WClbE4Bz4Euq5luVyytbWFiOmL9X02LT83r3O2Bue+5D7noo3WWrXUi+0PbKfHfGYOjKF0BWICIab5SG4AQtYh/b5wFYgjYgmishUCRB9whdUlSZPpvYYaD0RE7/kzFERkdH4cPpz/lseViTV79DjnkNTXvOkBetnNoJ9zJltrB4u9CJI/o7sXRAQfAq1vWbZLjleHLN0SX3jK2YytCzuUZcnRzWN8G2mWKw73bnHUHlPOK1xVcO3555lvb7O5s8Pm5ibb29tYY5mVJTe3ttjd3mUx2+Dqi1c5PDnBFJb5Ys7Vq1cVN6zl6OiI7c1NdnZ22Nzc4MpLV1itamIUTuolCzHMXMHJ0QnLeokPHXuHt7QAoAjOFly8eHHAB4lceuCShqqL5mQ+OTnh5tWbn7VMTu12beK0e4nT9B9N3/pqOW1eFhTOEIM6R5RlyaMPdvzAf/7f8ee+7w/w3l9/OyD8tR/7F87M1cWdQ/7Df/XvaCnZO3Da9Rvb/Dc/8B5C1DuEKPyPP/Qd/au2GE9hOANefjE4Laff+GzbxGkTp02cdq+0idO+HDltPl+sL/A5nPad3/irfPNXf+RzOqdFBIwlRDTlEAm/SS9/8s/9HJxt3/rNT/PH/sAH+X3/1u9luSp47KEDvu+/+iGsidT1xGkTp02cNrXcJj77cuOz7/nWX+ZbvvbD/L7/6Hvx3nLl+kP8P3/wj/Env+t/49EH9j6v7x3X+OzMS0fpjQzTGW3is4nPzrY7Sm3k7It06f8r/VUw5DodVGz4OV8dQ9A8mz7gfWRzc4uiKADBGtcLQwgREUVlI6nIF7FfbGtHIaNx6Jch5fIMSYGJ65YIial/+m8uXxa8RuBUs4rlsScSMdZycrKkKCusKzTXqh0VjBsJuQh436Vw1yqBqaR5OauIAKHzfU5qxGhoYMxjzPM5FBdr24bVaklVlRSFwxir0bepmGqm1KxbsV8mwRYKopHMB5o7Ur88XRf6ewhgjKUoCuqupRAVkdVqRYjr8hBipPWepmlp6hYIFGWJRIsRhzEOEdsDQiCFTYrBWAshYsRirCWioHrzYM4P/6N3pdlLHhkhcH1/hxDPjxr66q+4wnf/c/+Exbwhh8OS7hdiBGNw1gJC23Z635hDQQ3EQBTBFg5bFIg1qa/aixA0T+bGxgZlWWCtw3d5LuKpf8+2054nWY5Ph8Vm+TDGDLlqk2eKCtr59x17iWSZNyZFcaVrOt/1mx5Ec+UaMRRlgRQlWLemp953FKPQZ2sM1gwgPBwQdbMgxvYh2xJjyoU7bFREJOn6QHZj0iyKQq3nUa/PXgaa/3jIXbxYLHrQyxs31AEFQj7U6taiqApa37F3uE9wES+epq2ZVXOODo4AYXdzl6ODI8qyZGd3i/rqC/gu0IUaN1sgRjeDq2VN6SpC8BwdHVOWM9q247g7YntrSz16jOCMpZrNsNZSlaXOp9e8qSdHx1jnKCtYnixZbG6yMV9QliWrZoUtLEjEzRzHx0dp4yf9BsUYw/HxMccnx/3c3Lx5nWZV07XNbeVvakObOO3+5bQeb0RImSdekdO2Fjs4Z/C+w6Ih/t53NM2Sb3/3j/Doxd/gb//M9xDj2WKn+0cLvv9Hv4V/7us/hAj8/V/8KmKIvO31L/ENX/U0Efj5X3s9v/7xx5NxKc9b8po8db/HHrzFH/v9P8ejDx4iYnrDUz9uvjCcdvYEO3HaxGkTp90rbeK0idNeidNaf7YY82lOe/+H38jLt3b4fd/yj1/xnBbRlzDOGmLUly1lWWrf42ixs1T1eD487+ik5Ad/7Ct59sWdU/0KFHbJf/Jv/xRtB/Oq0ZdWxInTJk6bOO0+bxOfTXx2Jz77yV96Gz/23rcSfH4fKIQIIhZjPrv3jq+Kz85t0xlt4rOJz27X7mhgyqGZPYiPcPM8QM9IMahcnwETiZEQPV3XpAJZVkE0FclaA8TY/wdBAbD3HOiVZfg5CxOMiGgECjEEiAMZOWuSQHti6JJSRQ2XFYgSezBQjwNL6MJgLU2W2Pxlk2IGr4UF84Pi8O1oPMP4xuYSFVhJxQoHby+V5kDwHSG0zOYlxlkNKew1Nz0MSTdK/wQdRxTbU3aUdP/0r77IIoXwkmoVGpw46rZWsI2BGD1vePIqTbvgmSuXALh5sMGnr1zi8vaRWqzFELEYU2BE82vmYn4R9SKQlPM0ioDR8NRoCj71/C6dj1zfX/Cbn3y0X8nbtfms4Y1PXYcI73rb8/z2r36mz4Xac3CSEw2TVDnLuchT1GWylKd1B/XM6L0zQpqXHDpJL2s+z6jo3MeBaskhk1kUYpbn7OUSB8KX8foNF+s9RrIyLmA5bmO9yV42MYO5RCTmzU9QBU790vUXEEvMYapAhnYiWiAxoi8606YrjjZckuRIRHpgH28ixmGmcurvY5LqiZBhvYYBpnGlz1nnCG0LIWheV9FNYbY9CpJCaSHYwMrX3Fje5LA5pnENsQACdKuUDzjXqEsEWxQlPnqiwGJjS8Nvo7A8WVHtVEQiXeuZz+eEztMEz2a1qTXIEvjOUmE9awyL+Zz6eEnT1ATv2bywg3MFMUQlWSt4AtFEoomIMRSmwrUtdB0xqCdMURQaSi6Gpm2IRGazis53RALWTbm9X02bOO0+5jTyxy0xdq+K0xSPDAHlpHzg8D7wusefxdqa33zmaT7x/GPUzXoB8rYr+PDTT/KGx64jEvnQp7QArCsND19eEkLgo59+mE88N6T/2d5c8uTDN/no0w/TeYuznre87mWEyJOP3OCbvuZpnCuJcTg8DZL3BeK007QycdrEaROn3TNt4rSJ016J06zteOtrn+aZFx/m4HjztAgB8NLNXU7qkk+98DCPXT5gY97157T94w32T7Z5/KHrxCRxIsIL13YxJnB595ae82IgxKAvZxM053k6OJ5x9eYmTzx8k1sHC55/+QIf+thDDBG8kTe/7iaPPXSAGM+7v/o5fdlBJIqbOG3itInTvgzaxGcTn92Jz1at4/CkPAPxL1x7kLrdoLCW1zy6DxK403tH4yydT5E9pHUVAUyKsDrFZ7dt0xlt4rOJz85rdzQweXw/eSo7umg2KdBY0GRNMLMsBQSPyXfwDSfHh/gQWGxuI6YgACGoVbBPlQlI0FydPkZWyxVVVVCWpVoQjVov+6UV0agSj4ZHii5mHxIYNcTTWIO1hkVVAUHTuvmGGDtEIgGPKQxBIitfI04wTj8HgpghlC7HTBoilbVI8MSuRcRjXNoMp/hYSbkCDGCysgMmKpWGECAVHBQxWFNo2F6MIJ6mWeG7GktHVRmidYTseZHIRq3GowVAvdYUdIYwxxCFIJaAJUSD2IIYu+S5HcA4jAFrDZaAb5fE6JnNLP/+H/4Z3vuBr+DP/L+/A4Bf+o038pFPPcr/5V/9XykqXY/OW8rFAiNaYM8j+OgJMWJtCenA08QI1hKMoe4cf+XvfDNNeydxXAe51zx6i//Xf/zDKEULIao1G5E+nDWiuTWtK/sNiBGh63zefkC6JkahbjpChMJowcXVakXbafG63d2L6r3hA7mIXd8kbybARnuGEBIXqAwkssQYAppTvOu6AdwiiA+YmACu8yku22YeWwPJDJS5WGQmnJAOeFE8IbQYSUUlg4ZDB8CL0AExGAiCNZHCpPsai5gC73WjhQexBhMCbfImMMZibAL2tBErikKJu9PQ6rKoeu+Dtm17ney6juw1IaJh4Zp6UC3nWPVK6NoWY1M4cfCY4JWfjAEPXewIAmILiJEKizOWaAPXun2uLF/k4/uf4JPXP8nmxS0e3H2IW1dfwhaWwhkO9vbAQtt5Dq/vs1gsFFRTiHDbttR1za1rt9jZ2MGKo3QzClvRdA1d13DcHBIMuLJkbhfsbG5xcnLC0cEhly9e4qiLmDbgPWzO5lTzGebiJZarFUerI5q2ZdXU0KbQcyNgDcZUSDRcv7nHYjZnY7FgMZtTr7QI4HxjzkPlw7qJuuMGaGq5TZx2/3JazJ5P1hGb9lVxWgieEAzGlYqrwaqHn3NUZos3vW7JX/h3/if+xF/8d/n0iw+fkiZ9/t9979es/faDH3ucD37s8XOkL/JVb3yB/+SP/zj/6n/0h7m+t8nWxor/9j/4P7FGvaWsLfEJ+0XMF4XT+vODjHht4rSJ0yZOuyfaxGkTp70Sp21vnvBffu9f4b/6gT/EP/4nX/n/Z+/Pg21b0oQ+7JfDGvbeZ7jTm6pedVXPTfXA0DRjSzSDEY2kliwEblkWCDkw2BAmsImwZQsJOcIEMnaEbIcsh8IIQpYEki0BwjaDacBACxCh7mao7gaqurqr6tWb7rv3nmHvvYYc/MeXudba+5x736vXDdR5ZFadd87Ze+21Mr/88vvlud+Qz9EkxeV2zf/lv/in+K2/7i/zLZ96kyFCNIYf/uwn+DN/9dv4Pf+j/4q6lrl0MfBH/vS3c7Lu+U3f99fo+xz9qzHKiEzznAf4zGdf4v/xZ342/9bv+AF+8Ic/yZ//a9+weLb849j/+nf8IJ/42AUhmMK0wrTCtH8MW+FZ4dmLePa93/23+M6f9aN8///yd+L9PB//2Q/8cgAq6/i3f9v/i6bavfDfHdESTJidCC4G4ii2LUYWPDPCs6UzMo04RFX+RqPwrPDs9vYzdgZT9mLOnk1RuFzH0HvHbr8HJC0tH1oWQsCHgCHFHCRFyHcRZZ6f472XVMLpoDB5hjHzQV5KMSmXKFaQQ/BCoHfjwtup2O97UTat8C5Q2YqoYBhG2mYNURN8pKrqdL+QB5x+zumzZlKQMBlcxeFPecGnMS4mJ8aIVjqBShHTwjVGs73uUESaZiVOjoVAsrzzAlJHRjBfo7VZzImaZOZ9lo0otW2bNKbIer3h8vIZfb+XQ8dSXcrDm4O2lrqSTKj9fi+GIVFfIcqbz6nI6biRCFrz3/7oJ/lzf+3TjKPhRe17v/tH+L7v+WHW6xUxQl0dlnyIMWKMGMMMz3x4HzEZKyDGuQaqyMBM87darabfsxdcJQNmrT2oW3r87GkeUuRB9rbPOin3UAl0IQQC4ca8iUqrCeIxpmgA71Ma8OyZBw6etayfG1zS/6O+gKQCOzfKWrK1pAorjXcRq/MfhbLpiEQIAhLvRrSSQwedcwl6N2UxDKPUF3YOa2xyyomMc9+ttVN6bk5bVUrqq+bXcx3fOXpo8awMTm0gSr1cE1VK/9Zc7bf4dSBo2bTF/AECWkesNWijud5dSbYy83wMw4D3nsvLSx4+fMjDhw957bXX+PKXv4zWms1mw2c/+1kqW9HWNVZpfIwEFXn32RPu379P3/d0XSfp7k3DZrPh4YOHXPc78f7HSF3XAk9jGL2cXxaBYRzFDkSFCnCy3tB3He9td8QH9/DBo61GGcPQOUxlWa1XL1w/pX3lrTDtbjFNTZt5/ZUxLZ0b2A8dMdlZU1U0Rv6gub7e8r/57f8lIbRp3Jo/8qd/AX/mv/7WD65MwL/12/9//OxveoOm1vye3/KHiKpivapoakWMN/n3D4tpN5hOYdqxLArTCtM+Cq0w7R9vpv2O7/9z/OZ/7i9zfXXN+fk9jLH0o+V3/YHvZ9fNB4n/53/251HXblKRXVfTDxX/7n/yq0UDFIDiycUarSN/4A//d9Lh3bBZDfyO//5fxZj5zN4//Me/i5984z7Oaf5P/9EvYbs/zAb+lq9/j//Fb/1BXn50Oc1fYVphWmFaaS9qhWf/ePLMh+f/0/XoDP/uf/Ir+BW/4Ef5rk9//tZ/d5ycL9kxmGx5Lp3ovef6+jqd1aOnf/s7VL7yN1rhWeHZi9pX7mASe45eLLasjPNExPwGkIy5dwTvZbEeOSomo3cwoSp5jOOB59h7jw5mMriHaqYOFqdAJhBjOpgqeTPrup4Mvfc+1RbNnnbxHGsVk+AVIRw/A0mljEyKqZgjLZLlmg3SQjZZThI9YOafFwKWUafaqC7ifKCyhqquiCFZ6IWtU0dzsfx5/vVIUgujaozGI55mSKmIMWCNRHl471F1hdKKj79yxT/7y3+MP/uD30A/VIyj5W985lv4tm94l/tnV+JtTxk+uTweMEVjZNmA4kd+/Gv4zOde450np1O/TjYd3/MLPifXJV3zzvFzv+WzfOyltzk/P5+85iEolJGanzFHeMjki34lmmWZpETYSTeODzRcr9fpvjMMcstGKst7aXgOHU4xyVA2BeLAk1RZlZxuMQRc8CmCId1HKW5CWi0nK01hnCAwz3HaZB3NtAD1sH8ZOhn0GYYiv6Rz6U4iZ7l2Gn/W99zftDHJD1VKIlqykV4q6rIfy4MFl3277TWl1GLTlnqnlFR61DL3MUSMkQgeHwJRRTyewQ1c7a44v3dOs27wQQ5eREGIHhRUtSUSGd04wdgYQ13X5IMuLy4u6LpODmtMh2KKCVGM3jE4Nx3i6UNAW0O7XmEqg0ZSrwc30nUdwzgQQqBtW9w40nfdtIHRStHUNUPXU2nNer1iv9tSVxUxXWOMRKKMzkladbJtpX3IVpj2kWDa/Fm+IqYpNcVKJnsJ1tTpj7uIMT2vnj+TP3SUJgT4xd/xOZw3/MBf/2ZA8ejeFb/oOz6PUoqfeOMRn/nsawB8xze/wde89hQU/KyvfZNH969wLvL6qxcAyd6cT5wMIfxDZ9qxfI9fK0wrTCtMu2OtMK0w7RamvfpoTwg7rq6uODlxWGvxvuJ7f+nf4od+/JN8/g0p5/r0anNrP959cnbr6++8V00/N/XIX/2bn0SlSGul4Atfvsfl9Srd47BE33d9x5f4+d/+ZT758WcL216YVphWmFZaaoVnhWcLntXVwD/93T/Cf/OZr6frLb/o23+cv/Ijn+Z6twYU7zw940d/4mNYE/j5n/4yRJ/mFh6cd4zOsutqHt3f87f/3sf4qTcfoJTin/zOL3L/fIv3nu21OKa6Dn74736Sx08305h+2Xd9np/36S9PNq/8jVZ4Vnh2s33oDKZjDzZpAg6VnqQYfjowy9oKmw7dWv4j/Y1/tCebkqSQalY+k8qUhRAg6sU9soFfKocYJa01oxsZhuHA0McY5wP9NLh0CJ6xsjhzVEA2xtKNDCGpDSl1okkGMPf9SAiLn7Jxj1EWvXdu+mREUluVkgU1jiPeB5q6pm5aRhfAqHmhpfvdFkGQn3jsDInLRalUWiAR50Zy+bcQAp6QvOrz/b/la9/ld//mv8QP/ehrvP34lG5o+BN/8ZdydvrXON2MhAg+11Q1mhBc8sCnFM50ePngLH/qr3w7712cAJG2cSgiH3/5gv/Zv/oXpY5rkH53Xcc4jjg3e8Wz57yyFqU0IapUY1fSLKuqSoftJamrrENxWlRLmRljaNv2wKDklj3feYEvjf7BddNrYuRD8MQoNUNz9IZEznicG6d1MM1UjJMO5RsKWEW3dNax2ZJPh/LpZGDzWlxqYF5byzFL+SWJpPBISnCMOfIAYpSUXomykTRqozTOOYZumLLZBGBzhIhSOs19BoM6AIP3fqF3kg66jHJY9jfP9VLe3nvMQp5K62lTZG1FHFNN4krhomM/7nh29ZSXPvUqwQT6YaBqalwYCcFjKsN6sybEgOoUtrZUVuqO5g3Bdrvly1/+Mm3bpigMx8nJySSDrvP0bsQHL5EPROqmoWlb6qpi7Ef6ceCd9x7T9fspyuLho0fsd3uurq44u39P1r7WrJqG/fWWuqm4d35Gv9uxXq2oKouKMUEs0PUdprJEFF1fDo/96bbCtLvPtDygr4Rp+SnGGKLXEBVV1cgfpCFS1+30GWMNPgz8su/8e3z66x7zV//m1xKj4hs/+Q6/61/582it+RN/4dv53BcfooBf80/8GL/mn/hx0QUfGUfpd9M0iWnuHznTln9oFqYVphWmfXRaYVph2vsxra7gt/36H+AP/rFfxluP7wHQj5YQlv/Q5amMpxuqA6nV1UiMitHN/5TQDxX/zz/7s58zzsWQ0t99v+7X/Cjf9R1viA4UphWmFaaV9pxWeFZ4Fr1m03b8zn/5z/L7/uD38d7TFb/jv/cn+fwbr/JTbwmHur7ib/39r+FL7zzk277xXeT4KLnfa4+ecrFt2PUVD+91fOZzH+dP/5VPA/ALvv2LPDy/QGvHu+/dZ99FtnvFf/UXPk3fzxz4Dd/7t/mmTz1GqfI3WuFZ4dnz2s9YibzDlpQtxmTk/XRgVN3UqDTB0wF5aeIPGRGJUXFov9I/xMQ5hZ44G7IYb3eg5J9jiFM9RllAGq0MSltRWAN919O0K9p2hTFVqiU9LwQAjZLUQe8JwaO1JXhJdXueOG68lJTYOcc4jpjKoEKEGMBGtAbvAn3f0bYtxlqcj9i6IWiTDqN7n1lYQHTpMc8fze9Lmq8spmEYqCqDtZZufz39A1j2usvBbT3/we/9I/zHf/K7+CP/n58PwB/9Uz8fo+Usit/26/8UH3/5GqM1dduI7FKEttKKn/jSS/yHf+yX0A+ifpvVwB/+/f8pq9alQxWh63pilBTG9Xo9GedxHKcogOU4Y/JeZwYfGuGl0RMjcQi+OHmO+76f5ma73dI0LXVdTf/4t5Tbss3e9EODtfzMbRub5RiOYZ03ODqljQb0Qb+BaR1N85P/oVKJEZrkruYIAplPAbwctjgdiZmiCQIhQF03hCiw8mHW7rqucc4RQkTrSF3ZAx2Pi74Pw3irvCaZMaeVZ4ZmAMQo67WqqgMAWyuHzsUYBOjR47zHx4DyAalqHLjsrxlN4ONf9wneuXyMaQztuuXhvQdicIOjWTe89/Q9QoxsTjc8e/aMpm6obMV7773HMAw0TcO3f/u3HwBnvV5PqdvOO95++236YaBZtdiq4vLykvfee4/XXnsNFSUN/fz8HBfkkD+boJe/Km2m6JfgvNRz9Z6LZ0+pKiMbsWEgxkC7WRNU5GJ7jQ6R0XuGsb9VxqX9TLXCtGNx3Hjpq4BpuX2lTPNuTMPSBCymauVZKrI+OaPr9vgIlda06zUhRB49vOI//9/9h0SlsCal2xvNr/1lP8b3/IIfw9QVbe2nfvZfpUy7rfxClnFhWhoLhWmFaR+1Vph2LI4bL/1jxLT/4b/4Q/wPvu9HwMO/+e//0/zQj33N9Ozv/e6/w7/0vX+Df+3f/E30w/wPVP/Gb/ljXFyv+T/8R//M+47zuL30YMcf/P1/gqYZOP7n0cK0wrTCtNK+slZ4diyOGy99xHj2u3/znyeGSNuc8fv+p/8pSlegNvzG/9Vv5HrX8vRize/9938tv/mf/6t806feRSvN5954FecD253mN/7r/wpX27k07O/5P/9avv0b3+Bf+rX/DcZWnJ1pzs4U/8Zv/TP80T/1nfztv/dxQPG7/51fy6/6JZ/ld/2rf7n8jVZ4Vnj2nPbTcjAdTKDihkGLMNUVzJ5D+dzh55dKP3us1RRJkL3v+R9cYoQwLQw1KYQxza33EgUOaC11HKuqEuX1jhxlIF5O+V0phc6H0aX+ei+fV8kz771HwVybUzFl3MQQiYoEidtlcxw5QRqPVhCjJ4ZACI4QPca24plVhohOC3NZP/MobiEZqry4tc6l6mbDt4QhUaIpqqrCJM+xUuC9S95SNd1Tp/tprviF3/bj1Lbj//4nfwmjs2Tz/xf+xndwsh5S3dRpeJMFeHq5TpFw8LO/+Q1+xS/++5yfdFRVJAI+RHw6gDGP4ViP5s3B4R8mMTmztD40okdXHciu7/vp8Lf8Wn5mXR/q1HG7qWvz7yJ3P72eZSjRBYee/uX9fDJqU2RKiETN0Zhl46HyoEOU1E1k7UzPyvdS6qBvIUh94shIUBG0uiW1NNVTJcpHjTp4H5g2KyGCQershshkwKypJj1fpgbnsWY5yZzKGp9lFw7Gm9NGJSU5oohycKICpQzeeQwSSVKvG8Zrx27cse23UEc5GFArrrdXXFxd0I8965M1LjhQMHpHN3SMw0hlK05OTjDG0DQNVVUxjuMEpbquk656+nEAJfJrqprKWE43J1itaaqa3X5HCCOb4Om6HqXg7Oxsqr1bJTCcnpzQNA0ROD05YegHLq4ueOWll+m6PX3Xs2oaqSkcAtEFHB5jNSebU0r76bfCtLvLtPxNf0imbbe79LkKMCiCjFkrQlBAwHlPpTWgMRpOT/x0UCza4H3AmsjJSYqY03Opu69Wph2zrTCtMK0w7aPTCtMK0z4I0+oqUNsAUfEv/Mq/xS/9OZ9HG4MPnq//xGMe3O/5H3//f41PmU0+BDabjjfevXer3i017Pt/7Y/w0oMtzlv+0H/5nfzcT3+J7/7On+Rk3af4+cK0wrTCtNLevxWeFZ5lnq1qyeLS2tLUHVo7rIXf9ht+kB/469/MD//Y63RDxV/6oW/g73z2Y4dz5+DiumWZqdv1FT/xxUf8F//fn7dwisk8vvluLg8b+ed+5d/hO7/1jcXYy99ohWeFZ8ftQzuYjhVf3WLNxAA7lCLVg8zGM95Q9JuGHoiKGAPEeKAsMKfoyWdFsewto0l2bIJN0zQYYxhHNy1uneo5BhfSSDQqRRBIUEQ2yNk4BUKMGJ3/YWhW/3ytOpDNbf068iTnc4siUivTObyXFFatDUobpGblUQ1ZpW4snuV7s4fZH8j3oB8JbtZKLUliJIR0gJmxU61YMZhigMbR8Y1f8wYfe+mav/g3Ps07T07Y9zWg+Jt/72tvGfFxi7z88JKf961f4p/9nh9Nr0jfJA05TlAB6Uvf9zRNM40rXQxaoVT2si/gmSZ/mjt1KLflvWOM06J7bo+X87WEdL7nJNOlfJe/z2ms8pqkdio1J6jGhW7PzwmoeDMCJOvndF2IeVdEzFE36cumxRFjSJuliPeBEB2qkkMXtbEEf2jos64rDZq0USPDQn6TNHQ9wTXrvnOOpp7Tf+GwBuryNdkESlr0cn6Wm5blQYiySRFDizaYdC1Ko40m1optv+Nqf0XnOrz1WCSNdLfbc3l1Sdd3uOio6wplNERw3hMIKBTn5+cSxWPMpH+yLj0nJyfkFHzn5QBQqxWVtRilsE1DU8sBncFLimzX94xupK4r1us1Wuvp0NGnT56wXq0gCiw3qzUxRi6vL9GVhj6NVSU9DBEdZc1Uej68tLQP3wrTPgpMS5+NH45pRhtsZZGdtcg6Py//wWesRBOicgRWlqPGB3/AlrvEtIPn5HtOc7uc58K0wrTCtLvQCtMK0z4M037Jz/k8KIWxljEfUK813/crPiNGCnnu3//cGiJ8/JVnvPnu2cE/1gFU1vHygyv+qe/+cT758QtGV/MX/vrX8Yt+9hf41d/9d1GqEUYWphWmFaaV9j6t8Kzw7P14Bo5/5ns+wxfefMAP/9jrAHzmsx+7RRq3t/cuTvjBH/6GF17zy3/h3+cbP/UEycQpf6MVnhWe3dY+vIOJw7qN2QzmFmIQr18MNFVN09QEP4ph1BofF1fHmG3oQnHlP3GhJJBqcKr8jzYBm7ybkobq5DA/czOdMC9mpeVzwzDS9z113WJMRYyecZz/ccgYnRRK4DT/g1ECjlIT3OQyWQa3G9/8n0MZZa+oePPTeCOE6Oj6PTFG2nWDrSoBj5ZD0Q5xesvcLBbQsUMpt9kApRRGbUDB9dUObTTWKi6vrrh/75yTzRprNfvtnnEciFGhtcVWNY9WgT/0v/3P+L3/3q/mL/23X/8+PVs8H/h9v/OP8cmPbzGmIrDcOKjFPIhxMcZMURtTOqbW6Z/1lByMngBz65AVB8ZDLXQoA99ai7WWvu/puo6zszOpE3tLCwtjPMmdiDF5YyG1UEWvQzqIMaB0nGqWHhv5g3slY62jQgVu3y1wCO6sTyEBL68/YPLMZ6MrGxxFP46sNhZja7S1dM6hMFhrJmOcIxaUkkiDHMWSPf8xRoyVsex2O0xVY4zh5OQEnaJp5OC+WW5Kqek1YEp9zTWOM3AFSH6af+89wXl8iIuawBqjNHVVSYqnhmf7p/zo3/8xvjy+xeqTG9768k/Q9ivu6/sYo2nbFmUUVV1xcX2F0orNZsXJyYZNu2Gz2qCU4tmzZ3Rdh/eepmkmgz+BSsHm9AQTU4qpMvhukIM9ncNUFj+ODF3H22+/zb1757RNi3OOL39ZDomsrOX+/ftEH7i+usJay8OHD9GVxtnIl958A41CR/jCl77EKy+/Qtuu2Kw3rDcnDM6x33a3K0hpH7gVpt1tpk1N/fSYZiuJEsp1nlGKql7h/Shr2lp0ZTDaMg4epdLBrhGcl9rNtbF3jmneF6YVphWmfZRaYVph2j9Ipn3i1ff41Mee8uu/9/N8///8X+bxs5ODbn/d64/5P/7rfxRjDTEY6srz7/2bf5zIc6BWmFaYVphW2nNa4Vnh2QflWYjv1+MP33KWTvkbrfCs8Oz57UM4mGKyyzLh88vys3jdRsZhpOt2WCNKo7XGjQGjslHUsyLJbacFqrUm+jB7ROPsXTTGTIYjG5YDQjynTQbc6sVrenGIWqr5qcRjHwJoTVqMGoUc7p3Bk9MnRdE9CkmDCyGgbSV9W3h5s3zioj8uKbrWmpg3+Qq6vRhUY6QmqcoRGFEl8Dx/rEso5kVw+3VLL7Us3wwoWVwRa+b6jPne3gsQrW0xJtWQVfCv/Qs/zH/3V/04wXv+93/4e3jz8dmtzwX4hq95h9/y63+Qlx/uJP3XaEiyiDB5vm1a3Pkf3owxU8R3fr0yOcpCzQZ8cssvvNd6hu+xVzuPWyk1pSMqpZKxOa7RO8v5ZvSL6OFkcNM48nNCSlFVqW8HII6TshDye2SPfUCrkAxrSJfKRssoTX568J6lbiilBCqL37PB997hvCNGqTOqQiD6mHRsHqMxBo0iEhjdgIoajZ31H5m/qqrRab5igqlzDq3iVNf0uOX+zPAVL3qem3gso/S6tiJraxSqApRGoalMJVE4wTHEgXpdYXvLdb9FV4aoIn3f064bjDXUumG9XvH08inDvqPbb3n5lVdQWg5urKqKuq4BSbnf7/eTLp6enk7geee9x+gIJq219XrD9fU1V0+esjo9wY+O2lZ87dd/HVe7azlg03vu3bvHbrej2+9ZNXnDGRnHgb7vGPF4PNW6pt/1jF0PKVqhMpZxcLz15TfZbE549PDRTSUt7QO2wrSPAtPyXAYfyH8SfRimKZWjA9V0v6Zp6Xvo+57VKt0vSGk8rTUhRkbvpjm5i0yDeLARh8K0wrTCtLvZCtMK0/7BM62qhGl1rfi9v/3Pse/g2ZXl9//fvpfv/94f4hf/nM+hkD5mpimV/lm4MG0eV2FaYVppL2iFZ4VnXxnP/vlf+Rm++Wuf8Pv+g19JXDoWb2kvP7zkd/2mH+D/+kf/SX7qyw9feC2AtVYCBcvfaIVnhWfPbV+Zg2lp11lo0GR45VfvPc7LIXJNtZqMTYxATB54dbjgVYxp8zkb8Jhs5XIhopR4R2PyxCbPukqffWFTuZ6iS4s0JjCJ508Uz8gBfPmZ+YNpADOUMlxmj6JiNja5X6LgszNiGX8RpmfqdF+5bhiGNL4UwaCkDzHe7nW+MU0Hxp7nGqoMWpXWN6RIjRggHWSmlBLFjLkP4JynafTcN6X5uk88BZ4wjiO/4Du+wJvvnhKj1PfOXuCsL1//iXf4jm/6qQSxWQVnz3WcDMBkRNUczZF/V0odRLDI74tNwI0NAQefz8/M86CUSh7tMD37dtndlPUMSZIhlbM56sUYjqNMlj8fz9tk6FELuaQ0zSijVipFtKTNTo4uzJo5Gfbptof3kcPyjPyxGEEHSVmNy83BpNekWr8BmOUjGxyN0umP0Alq8yYuHGyO5vEu17XAQN7LEf6THBYbEKUU2tgJClpH+VyIoNMfvgR89Kw2a1rTsg3PWG3WKCURC9dXA1iFsfKctm1QCkIQXc1RMf0wyMGeMP3hPelyem10I/vtllWzmja0gKwZ53HjKKnHSjEOA26QjQQ2UlUVTV0T04Ygp6kT4OrZU4LV0Gh8TPVotZIID5lJjJJxB+fxw/hiRS3t9laY9hFimnQ/H4r702Ua09wHbC0HayptGJ0HJdGG8oegjHN0bvGH2dzvu8K0+W+NwrTCtMK0O9sK0wrT/iEzTSv49m96E+cczy4N3/VtP8l3fvoLfMvXvkMIhWmFaYVphWkfshWeFZ59CJ597OWnbNaeX/gdXyBGhXOKq63h7NRjsu8sdfKlB5f8nJ/1Bb7r236KVx5eT3bsjXfOeePte7eOYR5r4ZnMUuFZ4dlhe18Hk2KxMYRs2+aWDFmeQO9GnBtwbsR7n+poSjRUiHJ50tN50rORT5OvlUbUKRI9SfhJgYg479FIemRUcy9z6t+ic9PC98Gh0YCl6/aM40j2lA6DpMRmz31+nlaGiJoWxrQOkyAUMR2KFybznT2ieYFOIkrvLUWX01TlcLtICJ7og3g706GA+XNCl9vnaGkgslGAQ+Nz9Ilk4LLKQA4qqyubDkqLUq8RxZiUSGrEaqnZbYxETpBKcis5wM0Fz+/8jX+JSMC5kevrazabDevVOqVopsW2G2iaekorFmMTbxiFbDTzIm+aZtKF7MmNkWnRLv+SycPOQBLwhmnjkdNUh2GYFu8wDGitqaoq1c/VPE/wy75mw+xCZEwHzUlUiZ3mRGSV02nNYiMh8js29FrpCf4xekI4TPUERc4gUwrw8WATZTDEoFBeDEIMgeA8IfqUJhpp2xVeCQxURLzZqHSY5WysYwSNXJfPGZE1l2QdIkGlOqsJ3JLOG6aJUGo+cNN7f2D4Re5iZGWzcRg9tASyyQdAegcRgvcEF3G6EmOvFIHAvYf3ue72PL264OUHL7HvOy4uL3j7Sz/Fo4+/wsn9c/b7PS+/9DIo6YNShspWVMby5N13pnned3vu3bs3RWJcXl/R972kNe871s2KqrLUbc2+2zN6R9UmHUIxDCN/84d/hHv379O2LaFpaNqa9XrN+fk50Qe63V7SpoE3fuInqU7XPPj6T7LdPqU2lvXJhjg6Rj+iR0Xd1Lz88BH7/Z43v/TFW3W0tJutMO2jybSpDzGi1c8c08bgwSuU0bTr1cy0qp7+kIoxMI6FaYVphWmFaf/wW2FaYdpXC9NO1j2/93/yJ5Iu6MK0wrTCtMK0r6gVnhWe/Uzw7OH9jj/wu/8UMUpm7d/5e2t+7rdes16D0lrOvkmydGPkt/6Gv4LRkmHmnOM//pM/nz/0x39hGotKOhdxXvSs8IzCs8Kz57YXOpgUFg2oACF6McZKAQElbujkAfdJaSORwNDvUcD5+TnaVKl2aaRqVihjiGiij0lJxHhk4xkBjyIoDQaMqvFR0Q8OH7ukXAqVEiyJyWOvwMcAUUnJt4MUzSh9VgqtFd47YvSAIsQBHwYCkaZa0XXDYnEvLKSKiNNdDhVT0afxe3b7K9abDbaqcSFCNIk98tylnQhJ8VVEvO2pf1UFfbdnGEZqU9M2a2xdyxTFxb2I5Pqsh1C7aeRFSdPh4lMEWjYchzOdVgwq9GyvnnHx7ILXXv3YBGqUZrfr8F6xWq3RqdzBNKrkrp4P/dLoylJXDjcGtmHHyema4BxReZqmQsqvih7p1Fd8oDJWyu8pmeOAbAS0UQzjKP1Jm4cq1YldoG4am1Zq0cc0UqWBcFCHc71eA3mhzzVR5Zrbvf1Znssan6LOWmqBWjFSUWlZFVHq6vpkNSMQXVo36V758DSV0lvzPCmT10cgxmxkNTrVV83wE8ey1BRVWqFVTVSWGAM6BLSKmErz9NlTvAsoXVHXa3pTEbWBlKKpTYU1NaAYxoHgxRuuFBiliEYlw+sSAKKAKxlB2zR4H8TQxzkaIBvqpdzyZqRt22kzFYIjRwZlGWSoGmMZXU9A5KJT1KWOYjm8goGBJ7sLvvz4TZ7Gp5ydn/D08VP23Z5u7Hj08ddYn27QWuNiZN8NuNGxvbrm3vk9nlw+YRgHXn71lVSH1fJ19+/x9OlTur4njCO6srTW0KxXnJ2cg4FoFIOJfOFLX8AHT2Utj+4/YtWsMRiuL7acn5zjvefp5VPunZ3LBkXDar1iGAeGvqPrOl7/xm+hbhuqpqFrrunHjv2w5fRsQ1SBvdvx3uPHfOrjn6KqNCrerM1b2s1WmPbRZdrBX1GFacAHY1puy3rTIufCtMK0wrSv9laYVphWmFaYVphWmPZRaIVnhWf/IHi2Wg18+7fsaNsKhf5APPu+X/l3+M5v+wK/8/f9Opw3fN0nHvNv/47/N6+9NGJtVXhWeEbh2fPbizOY0oIR73/kwDrEePgduSZ7YsWoSurapLBHXtkUQDBNvvwuP8t/Nei0YJJnUjypc3pe7mckecVj/vTSiRKn6IAQPDGG6fdhGAAwabHN91VTxAMkKCmNivNxd5GcCZSikVmk2nIor/wJnQ1wDMmrHPAhoJXHOYfznrpdTfVPJyMzDff2VN85nTGSIyQOowjywklzeTO8IHnFVUp/lHHk1L1xdASfa4TWkz4ofWxI5wWttVw7jkOK3Egpd2qOZItEVIySBpkWs7WWfMjs3P05UiSPIRuJPNeT40zNjqDjvi1llFtd19Nrx+mUSz1afibr94H8Fu9xy/t5AvJ12Uuvjvo6rQElHvHjZ87jnvt0uD7zBkzmWaGIqbYvSmrUalNjTQUxpbNm2aSBTDBRmqgM6axHQpQ6uQfjjnM0RYxSDzdH1GQdzu8dt6XDbjmG5fiWc53BqGKco2DQaBMJToAa8CgDGCSVM0bquiEooNLYRlM3NbayuOjRKKw2rNs1BDDaTDrRDwNaO5xzE9zHccT5vLkV6CurwIDSipOzU3yqXX96dkp0EF1kvVpTVzUOhzVyqKN2WvqqIl0vh/qdn5+zXm2IQL/rqE2F9w4fYbPeELyMcdW2Em0UZvtV2vu0wrSPLNMO4/TmexSmfTCmAYVpi3EUphWm3YlWmFaYVphWmFaYVpj2UWiFZ4Vn/wB4pnVk1SqUih+YZyfrga957YJf96t/GB8Urzy45rWXrg/OSSo8KzwrPLu9vW+JvBjFimbjIMY45DcXEyO/dl1HVVWTkufF9UHaUoGVUhADMYjBCH72Jh5euzBmBxyK02sRqTUYoxwAFqNEVFlr2W23U3rqgVG9pYlnOj9HFpEY+jDJZ7m4s3iy3Ijz+56QZBZFKfC44CWioW2J6GS01Gzgl3B7n3ZosOIBgCIzpJbjFUDo5EmvpvsYo9ntxgRIS13XjOOI0qLoyzlezrVSaro21xiNyRLn+ch9m+p8kuuxzn1aGl5JdRR52ikNlOkAxkPA396W9866kHUjy2353OWYloZnKeNlCumx7j9f/0UWS9kt9XvZsr5baw/6dOu983zkLxSRMMF9GAZW6wZra4k6SPeWaJB5zFlndCqPobWCIBuSqY/pWdroVFfYSTmMqc/6AE4vGuPxWI91aZI5Cp/WTe6XVpph9FK7VEdsZaW0x2jYjjvazQoTKqyrGEKHsUZqkRKIo6eyFSfNmn3X0bYtutIM48h2uwUkQsYYM2/UcuRIFCOrtJIvBY8ePcI50fmTkxOunkla66Y5QSu51lrLbreTiA+rGcaB3X6PQnH//n0Mhl23Z7/rqGyFNw4XFet2zdiPBCJru6ayFWEIKGWOxVjac1ph2tw+SkwDCEFxedVgddpUB4PVNbve0w2By+2KVVhhtObqesfQN2hjWLWtpIlrjTJGNseLeVnafOcqttst49ijbDOV/8l1pbWSMhfOOUanIULbtrM5BkKoyH8MOleBkrkw1qA7nf5wlTHkrb619SQ3lE41IvL9pPTAPCcrQow4N6K1wWiNNvrAxmdd2nV2GmdhWmFaYdrda4VpcytMK0wrTCtMy/NdmHb3WuHZ3ArP/tHz7F/81T808ex6vy48KzwrPPsAPPsADib5Tz6cTilFPgtrNjziOY5JQfLE5prMucalMWbyMt82yc+b+BiZjAfpiUsgaKMxChSGXL9xqVQxp3giffHJI54Vs65rcVzk5z9nYU7jjT6/MC3q7AnHaEJSdLmVYioCSwZnILgxLS7HMHRc93vapuGkXQFQNw1oI4tPGTFW6LmPz+3f/POtiyrNpxycFhdDjsQYGPohzZVmt9ulmpWGvh+o6wZrqwOY5XsfPy/X5JbU1cgwGJ49e8Z6vaaua6qqYhgGYgi4GKc5eZ5eyJf0cxh6kVFyXoFGGzvPSe7Pc+QUY5jGsNvtJl3IKaohBIZhoGkaQpjH9EE3LMvD4l70GZnbuS+HdU5vttvmN3/m4DrE4TYMHdErrKowJuD9iPNdWqPiRJQqvumgXq3wTtJIx9En45HWTXSTYUVJSqrULpXap3mNW2vBJMdfOszPp9qwou9h6nf21ueatEtdzZ9xztE0zWRLVqvVBJGqqqbIIB0Vdd3glCeoyKpesXEb9KXh7XfeYXPvHqiICyO96+nHHmsNq/WKy/eesWnXvPT619BUNVQaryOf//znp4ijd999l7ZtJxm/8vLLPHn8HlcXl5y+9AoX2wsGN6A0nJ6e0DQNp6enXF5estvuGPYju8s9Lz14ia7vefPtt3j06kOign4cePLsivv3HmCN5XOf+xyf/MQnWa83rDenPL14ghsdbnC886XH3Ds7Z9OuWbdrFBpb1ZycnL2/YpYGFKbd7N9Hg2lEePOdU37Db//+eX8e899mMe+rUyRe7n4e1Qf7A2puH/ZzP4Nt+egPgqbpj9TDFoJ60RQAhWmFaYVpX82tMO24f4VphWkv/mhhWmFaYdpXZys8O+5f4Vnh2Ys/WnhWePbVxrMXOphE8GKkYoxysF2EEKT+4azxEec8YXGAVvb6ZWOTDX3+fbkYDp83K3FeaDHOC+DYcZC9leJYsGKEo5+NfBQP/1xfUQaRFXO5PGbj+EKhJBsojoyo0z0XjBCwqORpTGMCSOmp0oIcbJYMx3TonrEYW4kRyPmFKnlsUUjt2JsdXIIvg26KcDi6PMY5mmCewlQzVYmXs21b8dimQ+O891hbTQ4YqY9605Ade8XldwGkc27KEtLaSh+PNgISfeDS+4fj08kDG4JPxmic0lpDVEfXakl9vXUK5+gP59x0qGE2MCrJYBxHcnr1cURAlu9yzPn12xxut/Qi///gmuWG5rZ+52csv+fnHOphSos2Gq1IMhsZndRG10qjMIAV73uQQwjl8D6FDnncUn9Vao5WeB8YncjemCibPnMYKUAIkPV5AaQcsZNbnuss89zvfPih1pqmaQ5SiZ1zRAVKa7SpBGpe9NPogDZiHx6/95jPv/F5vrx/k1gFtt2OqNKmTwW0MiijqOuKzWbNulmlgyZrnAp4PA8ePKDrOoL3sklN4+67jraqIUZOTk4IIYhHn8Buv6Xr9ty7d86jR4+o65q96gA4OTmhbhswivPzc3zwkhZb1VT372O0ZO6tN2uqugZg6HtqY6nP7hE5Z+gHooOhc7RVxBqRW/hHuIe6S60w7VahfCSY9qu++7N82zdfoLVCRZFx8A6tIsF7OdDVz0EV3gXWmxNJ1V9k1zLNc0hjT7Y6PdRag/cB50f2+y11XU9Zv4rMNNno5ghFWespCizdLzMNwLlxmru6ljrUPiwGqRTG2EWmrkoyBKIcJpw3+Pv9nspadEqtj4nXWmtG5+QQV6USf+R+WkXW7VCYVphWmHbHWmHarUIpTCtMK0wrTCtMu2Ot8OxWoRSeFZ4VnhWe3SmevW8GU25KKRRzFklyI8wT6cOthv55ivr8BXDwiRvXKaWkdqSaF5UY+VS3MSSDs/jc8YLMAPKpv897ruhePHondyT9J85pgZFkE+Lc1xjj5JQWUy3KJvCSReR9Ks2mDUpL7VEXInKooZ6d4GrRh1vktzQyU3+PLzsyImoyYtLyomuaBu/i5MFV6VCzDBA1gWdxH7gxX8dznh1Mxhwu7qwv2cmjdTyYx/zz8jM5CkQpzfFZYzf16+ZKyPfIqdXLsc39QO7/HH09NvLL77f3Y7oYVLxxzXL+soFc9um2aINZRul7eksliCvkkEfvHeM4YLRBogS0fJ/kPNcWxWhizM9LmynitMlYbqSOl0lIrwmYF9ct9PNwI3IY+bN0Nub6oyDrOJdmlDUv60/GBzlmyHnPk2dPuLh8xj7uqc9qei8ZelEFjJwdOaWW1nVFVcuhhM6NDHgcgbZtGUfZVFgtUUfRB4LzdNs9bdvS1JX01xpsFHO63V7TNHVyys6O0rZtZfOhFOv1muv+CmUUla3RVkk0ko+YqiIQ8W6k2+2w1tC2K6qqYqd39H1PDLIm+qGXyChT/nL5SlthWn4ndyT9544y7Vu+/l1+9s+6wJj0x0vweDcQXU+uV75kGmjOz+9N0U6iAyr9oWAO7FGWdYyRuq6ltMI4cHn1ZNqMNs0q2bWIdzJnsv4luELKzh6WdchT1fedlH9ItkEpjc8VQZKcrK0J05yY6Y8X4dgw2dRnz56xWq2mchxh8ceL1Ko3tzJNdKgwrTCtMO2utsK0/E7uSPpPYVph2kL+y+9ZDre2wrTCtMK0f2St8Cy/kzuS/lN4Vni2kP/ye5bDra3wrPDsHyLP3tfBpFCplGSFUTJxwTuykZcJQg6DWtRIPDag2bjOh8AtnQ1mTvlMk62zBx1J/QscLiStjsu0iTdbKYVOB2JZK17Srh85PT1hHAeur69pUy3Pruuo6/o5xl4OxFPZC53HoZRoSfBS3s05xnHEh0CVxqlI9VljTGZaohnE0Ej2jXcD3g/sdlsuLy959PBVrG3Q2uJCNqJaxoWkqSpu7+fxQs8pwSJ3hVKGEObDwW4DgkoG+513ntA0NaenZ1SV4tnTZwzDwIMHj8ikmaMJ5ufn+V1GZywdNtaaqaSdeJXrqZ8hBJqmneYewLnZAZUjFcZRUoT3+/00v1VVYYyFVAtSKYgonBshGels5HOflt/7vufs7CxFJMwH6M2RL7fX5Fz+fJwu+jzoLe9BBLswYN77CTb5tTHV4gwhTDDKRvF5LQSpeqqNJnhSpIJHK6kd2nV7qqrFmAqlDEZZnJ4PZvTBp92BgazHaQzj2ANq6qf3ogNaSy3PECPr9Zp+dDBtpua08CWwsnyUUpPss2yapjmQsxzUOOv2JEkfcMGjtaJpaqKH/X7Hs/1T3nrnLe49vMfm7BT1UHN5Jempo+sZw4BSER+9gL5uGMaB954+4Y033oDKYlcNKj2bGLFKY7Wh2Zzw4PwebV3T9z3dbo9tWlQl4zg/P2e3u6bvey4vLnlwfp8Y583mW2+9TVSR9ema66c7TuwJrdE8fvwuDx++RNVanj59WyJhRsf15QUP7j/CWkttKzarE5q6JSZd/dxPfo7NyYZXX3vtuTpR2mErTCtMK0wrTCtMK0z7qLTCtMK0wrTCtMK0wrSPQis8KzwrPCs8Kzy72zx7oYNJDIP4wJWKkzKgVHaCTi2XUpOD0kSxl57ZPKmzQobkqdVUVTV9LivVUU8OXl8aqnzPnECar1MqlXeLebHl9Moonrh0zXR4Gdn7qVFpsSsOF9TU/0X3tFbJ2BwexEVU6DhfPt0pgooRbeDZk6eEENi0K9brDSSPdfAebaspnTCq5PqcpDF7xW+bs9uaXCpSErCqmV9x9vbO89Hw7OnltMhE1vMztJaD8cJz5+zg6YBitVrRdR0xMp1xlI3AMgV2juJY9n9e6PlzwzCkjCuZsxBCMuri+VZRAKsXvYB4EPmwTINcbjayMVLq1qCN6X0xZn7SxeU9bms50gZARz3dfFmKMF+X03onHV/oP7CA+Ry941JtUEmtdBAiSkuGlzJgrObk5BRjVqAqBhcINhB0QM6b0tPalmiD7KNPWrzc9Gg5kNA5h61E/l3XYesGn3821SSnLON83lXWrSW88vtZDhkIObpjuldaUZWxzNENHmM1ddtw/8E9/EngHfcun/vMZ1F6Rd3WNKuapq7ph45hHMF7qXFqNY7A6mSDVxCVYhxHVquVANkF7t+7J9FKPsimN9nCfhgYuzEd8mcwRsb27OIZJ6uNlJxcrSb9sLXl/oP72EZzvd/y5MkTIjCOAwrYbDYMfQcxstq09MOep194j27fUTUr7t2/R1VbRj9Qr2uijTy5fnKrvpV22ArTCtMK0wrTCtMK0z4qrTCtMK0wrTCtMK0w7aPQCs8KzwrPCs8Kz+4+zz5Aibzsfc6Tz/R7XvzHHuljgzwbillhl6/lxfF8D+m8aGKMB8p28HqU2o2KRS3QIMqVD+8KIU7QyoY9xrnYm7y+9LKrg9ez0YpZGOnaZcRBjFEWS8oZFD1U6UvSVMdhZBwHKiu1GCViYB6pmoS9kFv6nqvBLmX7/i1OBksMg57GtpTvPB9qccbREqBLOcUUNREP5n3Zp2lzgKSi1nU9zcU4jhhtMelwtsPnP79pbSZjsNSxbOglBVFPM7XUGflx7vME+lsM8/zaQgeO9DjDKb8PHAHrcG0sdV0FdeP1/JljI57vt7xXfs7Bxoesp3laRat8cMQYJkOa519re/DcKc/1YPwJmBkoCyhLP/x036WcljI5tgHLjd9ybBmcy/eP5Z3XroDjqK9KSfpnU9ENHd24n2yWRmGUkrRsL4cHamBwIwCVrrBNTfQOF8KB7mutUzq8IhCJAbxzjMOAjxCVrAWtNScnm2kMV1dX+CEQvWwwjTVYWxETgGVjqQhOoiJsjLSrlv1uTwweCPgQ8MHhg6c1mpiiIMbg5HDBEHC9v6G/pT2vFaYtXy9MW8qpMK0wrTCtMO2utcK05euFaUs5FaYVphWmFabdpVZ4tny98Gwpp8KzwrPCs7vAsw+QwZQNS5JW8p4trkrXapS6qXjHE59bjExpossUvNuM1lKp8u8++INrYwTyoWGE2euYPMfj6BZpkrPC5utmo5eHqqafs0FHgYpJGksjnzUpfyCKIky3OBiSpAzurq+JwdO0a+7dP2O7Tx5xlQ8jS4ZYpxnIfUCUSnMsz5uG6rjla8Qrq6Z+xcX7SqtpsXnv2Ww2KcXUJ+OznFtRiPic5x8bbGPMlCa83e7Z7XY0TcvaVoTgp3tLGuTh4l8ag2xwpGaqxVjLOEp/UQoVkwGL4nGWTcDtfVumKi9TTuc0U5HQTf2d65Qe6+ayv0vDvvT4y2tMY15GN2gth9BloxZjPEjnzq8tQTf1L63R4/lw40iYDrWb4dc0LX0ao9YStRK9moAYQsgzTFVZSICYxyzPscZOZ1mhNRokpdXYKSog6wAwpakfG/vjccUYpYZoknWec6WU1D3WsrkTWYkRjCqiNLzz9ttc6Eteff01hl1Ea4W2isvuknEcCDFgqoptt6XXllW9Fv3sAn4cOTs7o+863OhobcXQd6ioCN5jlWZ7fc31bke1XtOsG0xl0FZxevoy+/2eq6sr3n33XQGIqnCd5+T0HG0Nl9dXbPtrtNWcnJ7y7rtv471sVJp2hXOeoXd0wx6rK9qTlpPTUx7cf8QYHS46KltxeXlBUBFjbx5eWtrNVphGYVphGoVphWmFaR+NVphGYVphGoVphWmFaXe/FZ5ReFZ4RuFZ4dld59kHyGCam0Idr5f5Ha1QaNCyUGOIdH2HWRw2lVvKHj1Q/vx7nnT5YfZea6vE4zx4ovc3Fu507xgJwaFNBSqiYsRoRT84iJI+69zAZnNC27Q4NzDFEcRA7xzKVsmmqtnIMxs20KBMtgKEADEoYjr4K4Z02TTuSE75G/ue3fUV7z15l098/HXa1Qo3BoypCVEORVMkb6+xHFbZnOfhuQJ4n7Y0PgsTT0yd3pycALDr9pi6wtQ1yla43qN8wCRvqQaBj1KQ6m8+/5mkBZzTZJmMRAiOYeypbD0tbDECc83cWYJiBFarls1mha00PoyEIRBJkQVap8PkjnQ15miCw5TJnAJ58KxkcMTYz+8tjX2OUolBIKKVePBDlHTTfI+s44cCSffIv2sFSqNCmN8LNw/VW/5+W39VmgtCTiyVqBmiZxg6vB8JMTC6yOg6QoiYsCK254BKdT+lBqpWhqZZMYwdbgw477BWA4EQcq1YK1EdVJMMq6rherdF5Q1cuAnELBO3qJ2cAQAScZIjRfq+n6CwBKVAKBJ8JKT6wlVV07YrBu/44he/yHbYEk4jVdPAMMocaUWFBQvGGl559RWePX3GOIzTIY9No9L6VpxuziAGdpdX+DrghoHt9ZaT1Zqu6xiGAbtZc3Z+ToiBN974Et/4jV9P8JGrcM3Xf903oIKGqKiU5eJ6y67bsd1fcXK2wXnHdrej6zpefbnlZLPh4uKSqrb0Izy7vGBdr7l3fp/TkzN8GNhur9l2Oy52l7j+mmgUNF+RKS8ttcK0wrTCNGmFaYVphWl3vxWmFaYVpkkrTCtMK0y7263wrPCs8Exa4Vnh2V3i2fvQLnnPmRcIRHTy4okhi3g/ircxLa4I4giXFT0vuOSJV1rqQGqjUt1RDSrreLpPelZUoHRMRnX2rIawMFLZkCoFKhKVeMkVkUhaPKlGJDFgbSXpdkqhtCGGZIhiIMaFUc8HtcmoZemoZDOQQSoMWknqn0ZjlMEp6ZMnoiVnFRUDmkjf7RmGnlW7wlaVHHpmLNHl8Sv05BVMKaAZRWkulkt/ufCzx3bphY0xyoJPHY8xppqtkRA8RmtQERcc2+tL6krqW4YQaVdrtLZpYZPkkVJArZnk5KObMIjWkNMJVT5ITrzi2tQTBKvKEOMohn7YY61NByjmOqtxGr9EWCh0lPd8iHJw4tUlm/UGY+UaYyRCQisZq6Ssyhz4GKd51gpiFGNslEZHUFFmM+Sog2S8s4bJD2mGFInuXjYSKHTWzxBQMWQ1xmjNfMsoY8x1bVVMc6JA55ReSU1URh8YyUk7lHjO50iB7IGfozyM1ngHRmlCdIzDHq1j0lfN6AOD80TlqauKkOWr9WL/EAlxlDFpsCrNd06/zhEFZF2Tg/68TyE0EQFhZDLSy/TTpe6KaGeQxjiPdZnCK+q1+F0pYooaMtoQI/Rdz27o0KqibTbYxlHbmt1wTY6UwQcqbahNzdq0hPYEX3kUmt12j9YVrV0TvMN5T/CObtejIlhtWK/X7LsOW1WcNQ3t6Rmg8c6jseyve/wYqW1LGKVeskKxH3scI8EETGXYbNZcX13j+oFXHrzEZrVGK8V+vyfSUtctr736OsqDVpput6eyBms0TW2pB81mdZ+gIuOtW8LSbrbCtMK0wrTCtMK0wrSPSitMK0wrTCtMK0wrTPsotMKzwrPCs8KzwrO7zrP3KZGnsy0n1/DMi285QaMb5HqtmFe8QhvNgVWSmYKoROV08ggiXJgWlJL75v+p1Ill+uDsPU0dVBGFFltEfk3SVglBFnaQWqhNuwYlBsMog1KBECMhZkOqEjQSqPI6J0FLAVFNRkgnI5+NfVbqQECpZBAIECNdt2McBs5OTzFWjDzG4gdJ1dRKYayV58QgBjo9U6U5mRd5nqdjIc9zIxZGQBHTz1ppwVCMk6y8d1xfX3J2vhHD7iNn7YoQwDmfUKPnjmiFSos/ei82C2Qe9Ozt9X6cPOsKQwieGCWiwFYG5xzj6AhhjdYGrcTDH8ZRgKITRBVEDNooXHD0fcfVxQWrpsEaK8bILFJBY0BFnYAP0XuZ4wDaaEJKbzRKwKlinAAj41CEPN95k5Pzj6OWn4PMqdZGDvbL10ygE92Q5waCAqVzHdoospPKmtJnDdHLOlBaE2PAp5TVGXpqMv7ztMcD4yhzEjGVpG6Ooxh6pTUEjfMCPrSiampcTGtSZ0Mr//Fe1rUxcyRENuw6p1BHpjUiht6TjYAc3jen2S4P0BNTcFOXc8uRBznC4OAzC3UXXdJYa+k7R98N7PcdbbNmo04YmgGjLcM4yCGWCRSVtjSqwmJYVy1UopNP33vGalWx2qzZ7a9xzuGGkWFwaKWw6xUnJyeSZt22tOsVdnOC9x43BipTc325QylFpRu6bhD9VIp9v2OII1EH6lVN0zTsrrfgAy89eERTN4zOMY6DgGC94dGDR+yvd+y213T7PaqtqWqLNg2jX7E+PcHHQJ/quZb24laYVphWmCbyKUyb10BhWmHaXW2FaYVphWkin8K0eQ0UphWm3cVWeFZ4Vngm8ik8m9dA4dnd49mHyteNOdcUUSypadlMB5fl16dFt5goBakGaV70UQxr1v1bmtxDFoj34jnUZva2L2midTq8Kkg6qyx6qb04Oofzkc2JJcSIG0dMlfucvLCLmpMvbJMXWBQyRE8MjhilBqc2Cmtqht01RktEw26/Z3t9DUrx+quvEKIYEzeOjKOXCIe0qJSaYwYicSGbOaLjeZ7YHEUwvR/Tf1Q2ViGZbHk/BEfwIzFGhn7E2pq6btHapmAQw2h9Si00NFWd6o4CSsCGP9wE5LkfxxFjTFqw+SA1RwRONqcMw8huJ3VRVy3UdXugL5CNY2ZWmAAlNVM9PngqW2OtTZELnugDxlo0mhAc/TCgtaGuahRxgvexni491flr1kGSvAJunBfW8tqQDXLSUaVS+moIkpJ6oORHCp8+RxpD8GHa3OTnHhvHZd+nvgTRxxCRVFqAqKS+phsx1YrVao2pNwe6dmx0vffTwXzey/wrbamqmlwXV0iqZJOXPrOMGMiQf16/l1+5/mtOY11GEuQ0Va1lJxdiZHRB1l2KMAkhUNcN5/U5r/pXefPJ21wN1wzPBnRtqU2LtZb9fo9panRdsR96dvsd4zgyjiNVU7M5WXN6uqGqNT7VULa14dH9e2gjtuWbvumbePL0CZfX16xsjfeB6D11XXNxcYExhrquCc5zNVzhvadpKp5un1FVlkf3H/Dml79MXdW88sorPHnyhKqWCKf1ek27avHB8da7b3H17JKT1YbN6Qljt4dRUdUVr3/sE3z5zbdYrTe89trHKe3Dt8I0CtMK0w5kX5hWmFaYdndbYRqFaYVpB7IvTCtMK0y7m63wjMKzwrMD2ReeFZ59NfPswxWEXSwSiNMkPG/RPL9lxZ8X3nMfFyVqYFkjUhYX5GRVJa52NKIIYiCTNzYd4FXXUmeRuDSOcu/sxVY3+j35k6dnZw9/jmbIz0aJkY9EYvBoI30fnePx48dYW7FatRhT4Z3Du8DoAlobjDaSOsq8ECKSyrhYy3OvUv9v88YuXxOZJUOcRqOUQqtI8I6+HxiGAedk8Vpbs2rXOOfFS46iqir6vsP7CFTp/nJ9XmQxZwvr+SjAXHPUGMsciZAhpqiqms3GcHm5Tc9zNE1DUM+ru5ub1NwdhgGlDVW9msYdosSmeO8JMaCUSf3QyZC6yRufD7mb7nosaDUDMevMMVSzjGOU1Oplfd8JyhyZ+AxfZl3WooSEmKMGDg35bfO8fH+KtAGsNXg/MI4jfe8wRmoJb5oabdd4jKTlcpwGm8c2r2GVUmu9l8Msjw/KCyFItrkWXajS3C0Nde7n8rC9WRaH8s91UPNnlrCYpKbE86/SL/l6azWVqqiq9KUqqGEcBlzwuN5xeXXF2jt8EP3YbrcopVitVhhTEQhcXT3jvSdPGIYBozX3791j9J5KG6qq4s233gSlaJqGp0+fsdkINN04cnp6KvKJcHFxwcMHD9hsNuy7Hd5I5IhznspWaKVxo+Pp02es1iusrRj8KBs+FRnDyOpkRSSyHzoqa3Deo0YNThFHCINn3A03dKO0r6AVphWmFaYdXF+YVpgGhWl3thWmFaYVph1cX5hWmAaFaXeyFZ4VnhWeHVxfeFZ4Bl+9PPsALvNb2jTBUk9zejn9jzj/nBfJ9LNSMC0WDpT4NkVOby5+TErJvIgmQzy7h+XnOKfP+bQIbDr8b7lApt4rSa/NKXu3dIQMBRlWXiDpfizvKwfJKaUmz/N+v6dpGlartcgugo9xMjiTIUr3mcZBnKG2+H055qXH+4bxlw+mL3lJpz6HIJ79EAJaKbS2GGOnqJCYFpK1lhiz5zYubruMaJjn5rhPWqvF3Cm0NglgRqIAjMX7wDiO0z0PjdzRGPXC+MX5qdmTPv2cPqOVmgxWjMsao/Pvx/oXF/LK8zu9vuyfmu+xnIPlPOT0XW7x2M+ymserUn9nGdw09scyWo5ZKakXOx9WZ6hr0b26rlLfAj44ZDiL597o3aKfxMnwpg8I2PWiv3o+oHAZmXObfG+L2MiAuM02hBAkzXhxPUjEgtY62R+Zi6qyVHUlmz9rJfpIK3wMDM7RDQO7rmO73zE6R900KbIoMvoR5wb2+x37TlJP83MjsN3tGIYBn6I9nHPEGKnrmrZtqWwFQFVVWGPRShF8SD8bxmHEmLTpRDH2A36UQwNjiAx9nw7z67GVQWnw0ROVwrnAODj86Aku4HpHX/5w+em1wrTCtOm2hWmFaYVphWl3vBWmFaZNty1MK0wrTCtMu8Ot8KzwbLpt4VnhWeHZVzvPXpjBFGPk5rQrlBJPvfcO54Y08QpJJzVMHuP5RstPLyYmLwaIMdxQ2txCkCqk84QvFXu6sfR3Xpuy+GJIXnJHVdVUVYVzXmpsLiIGFIBSmFR7E5giGBYDkQtVZPLAqpCUklQ6VYxQdIFxHGitYRxHut2OylrOz89oVyu2XQfGJturaZpGvPYq1YZNffMHa2M2sNP8HBv15Uwl4MSY0kqR2rBKJcOX7qFRVMawXrW0zRprakIQJQ1eAGNMRYTJQGprJNV2YeiX/ZLIgnQ4IoEQXYpCkNqhVWUneAGcnZ1xeXnFdrsFoLE2pSUeG7GAMXK4mnOOzWZDVdcL2aRrdJXmajY+MYrx04txLNMql7KcdO0WQ0oUr3Y+jI98KF4MmFQLN0M667VRmnCg2s+Zs/Rdp2gMnZZoTus+nuvj9TJBzAUuL57RDx1awcnJKU27QpuaXS/rzTnHuLvGrE7AcKPleyk1R0rI6ZcSCWCtwlqDqSu0seQD+HJa6ziOnJ+fT/fp+56+7yXaoKoO5QwHBj5HeCzheagHadOl581QZVt2vmM37Nlut7RtS6taLvfv0raSpqq1ZvSevu/ZdXsGN3K13aKMYX16wvX1NcYa2rrhY69/jDe//CZ917HrtpyfneG956rvqKqKq2sBxGsff53Ly0uoKl5//XWePnnCrtvR9z3f8o3fxBe/+EW+9MUv4r1ndW8NGlw38PD+A5qqRitF2zQ0dU3dtujK8PjZe3T9Duc72tc+JmnsVuGGwDCOuOi40jvG/UAYAjpc3apPpR22wrTDvhemFaYVphWmFabd3VaYdtj3wrTCtMK0wrTCtLvZCs8O+154VnhWeFZ4dhd59r4l8iJxUr5sONIvZG+nVhqTUhKXXsPbjHZafRizvJcAYHn95D1V4oUOPkzeQnEUq4PFmJ/pg3j6jTHEIF65nFqnTaoAqtQBh4wx+BDohwFrq4OarsfSIAEnG0mA9XoFSuG9AzUQlHjqm8ow9j1dt2ccBu7fP0dpIymhSjH0I9pY6lU71ZyUaItFSi5MxupQNkxjfu7chYAIS6FZRCIER9ePOD9C8Dx+/BjvPZvNBmMqjLFobZAgkXle2rbFe0ff96zMKhmjXKtS0n+dG6msTQaetKAPa3rmuZWDA7NcA23borUWY9+2VHVNZW06CDEnJDMZ2pOTk6PD2GaDPY4jxsrhfDkqY9Kp4PDeS+3Lqpo2GFmflt7ssDA2pMPjfJBD+rLkdTJMKt7cqGQ5TMOc35l0aTmnMf2cP+f9LLfcv/wVQph0NRtS52RsCjlUcL1as9msCB6GfkRZhfdiRI1WVKsVzihieqZ8VmAl9UvlucYYxlHSQfUivdeHAN7jfEBrKxuWKB71pmkmw56Ndzbgzrmk8/LcJXjzvbOHvm3b6dpxHLGVJaLoh5HGVkTiBEKbomC6fcdbz95m1+44+fgJWmuGYWC/3/Pk8WNOTk/ZbDY453j99dcZx5HPfOYzfOITn6CuKpSGy4tL7t07Yxhb3n7zbWLwDMPA5cUF3/qt30Y/vsl2t6Oua9brNUop9rs9fdcz9gNjP/DlL71Bt9vTVDXVSc3F7hJjDPfOz2nrFpsOmHz9459AJQg9uXjKyeaU1aqlH3aMQ8C2mqqqGcaB9WaDVYZVs+LsU1+XNm232drSbmuFaYfSKEwrTCtMK0wrTLu7rTDtUBqFaYVphWmFaYVpd7MVnh1Ko/Cs8KzwrPDsrvHsA2UwZf3MOrw0dCEEqrqaDhbLBnjW91uiA5Jhn4xUzAr+wr7OizBdO993XuTLa/Ni3u/3tOv17PFXh2su38d7T101kqo6QW1hVInJXR6Zl28UEE3y8OmzAaJ4roP3KC11E7XS8qkod1E61wnNRl1lEyCIVfMUHhr495dTIqJ8Pob80HQvqf0alaSoKjNHM8gc5jmSj2kN1hhAIiSyDmQDmW88/Z46mI1l7otOKY2g50mIoLTGGEtVRZwbBerOoa2kGsZk7LXOtxbj45wjotCmmiIxtNYSAbGUm1KTzJZG/UXymzY26bIQIzEZImPtjc/nzcrSaOd3Dq5T6jnPlmfN87wE0GIsi5YN49JA5jUiwIa6bhj6kYjGmryJkEk1VuPyBuCoZQOcm2ziVALADF2lVFIvMdR5Q3Zbndl8z2PA5fqnB2NQc2TBcr5kc8dk/PP1zjm8kT7s9juq2mKsYbvdUleVHOaXapCaFKmhgKauscbQty3eOXbBE2NgHHtMu6KqKk5ONzRtQ4wBpTUXF5copTg5OcE7SdcP3nN9dYUbRxRQp3TVfH+0xhpLJNLtOzb1iqg0GFmL4+joxwE3Ok7OT/HeMg4D0QMxbaqtpTKWSluMrVBpnZr3M56lAYVphWmFaYVphWmFaR+dVphWmFaYVpiWW2FaYdpdboVnhWeFZ4VnuRWe3V2evX8G02S0Z2XLLRvS9aZFKzlMTa5JhkLdfsRTNl3LCYwEYtQ3FFn6kBUxGeq0GJaHb00LYdG3YRjouo7tdst64XVmmuKY+imfd84dLNTbZCFPS0aTADEwxVokORElPVQOsuuIMWKtHP6ltcZH8cAqY1HGos37TsOHbtKzQIw51TCgUFS2orKGcYTVqpmUFqUhysLVJh1YFyJKaYy1hOgJwROiR0cx2CEE1CKCI3gvBmDqgyJmEamUyhn1Ig1R5tXaDPA1w27POI5gDMY2hEg6OE8R4myMu67D+IitWrQ2kzGy6lAvlnP4PK/rIUhjwrwCNW9qJkO/TKUMs/Jl/REWxUm3XtRmI5Z6NoGGhSGT+yyfm9NB8+eVUnMN2xDwXmpwamUYhj0VhqpJhpYs67QHWGApb2iW6zNHFmT4qAzOaczSv3Eccd5PRvjYcN82dqXmQ/hCCNNnMyy0znoYpqgfpTR1XRPTWlRK0Q8D3srm6vr6mrOvPQereOPdv8tmtZ5SVU9PT6VvPqAiaBSr1YazzQlPnj6h6zsGN8jGx1mauuZjH/8Yta3ZNQ3Oe77whS/w6NEjXn75Zbr9fpqP/X5PbS2VqVg1LavVCtoW7z3b3Y71ekPXdzx7+oyztdgkHSPjOHJ1fU3X9/joWTUrxmHgamQBUoVNhwhabTHasru6wihFk1J/S3v/Vph2KIvCtMK0wrTCtMK0u9sK0w5lUZhWmFaYVphWmHY3W+HZoSwKzwrPCs8Kz+4azz6QhcnGPiYPugJQ4k10zmFNxbL+6eTJft79iAfGflKaF3zioClu3F+UVidPNQzjwL7b0/c9p6entE2DSSmUeSEuFRXi+xr6NDgiER880cvCNzov7Bls3nmGviM4T11ZmlomwotqEpE0TmPtcw3Pz2STA+hmr3AIAe9Gnrz3mBA8dV0L+uIsG6UikrYIMQa0nj274zBiKo2xCZ5qBngIAQWTxznXCQUt3n4MYuOzziiCl4PGQhSvrLEW571AemNvHIKoYDIQ+Tn5u1IRFeVwNBlLNlypLupR3d1jr3Z+LeYHpTEF7ydjmsflvYcwH0C3PCwxH15ICPjwfAU/8JDPj5zHqhRyMFuc1lyuN2qtpGZmAPgkx2Hs0VphUn/kYEyIYV53KDH6ajF3MjYx/vJcPa1NiSCR6JfVapXumeqlagNRUl1zyjHAfr+X+cxRH8k2VFV1MO6u625NdV9GLWitWa/X9EM/ybPvu3R/LYa8UlTUgKRNq0rx6MFD3nv7XayVVNqu67h//z7r9ZoQApdPnqGUYr1es1mvqduKMYyMw8hqVaOV4fHjx6zXm2lD1LZrMeJa8VM/9VOcnZ1htGbYdwRr0SuNaVuGrqOu6ylV9tWPv4atK9wwsqpqjJKauvfO7jH0X6AfHA/v3ed8c85e7bigpt8NBAdu9JycnrHbd8QYMEpz/+SU6CVNubQP3grTDoRRmFaYVphWmFaYdodbYdqBMArTCtMK0wrTCtPuaCs8OxBG4VnhWeFZ4dmd4tn7lsgTxYuTkYeYFCN71WWitTo8GI38yedY8JAMx6R0t1yW73Z8j5Ssmhajmoy1Vlr6GgLj0EMQj++6bbDWyoFrCBBUWsgZXZFkoJRimZ573J+IQsUodTGDeNWNzhEKh+l0kvoqaXE5LTVLDeRgP62WkkyG54Xgex+39EGH04pN0Q85EkKpiFbCJW0MbdtQ1zVGGcaokoQVPsR0+CB470S+WqONGGFtAirGQ8O06PhsSAMhRKzJkR9JZ7SdNhBxAqAYbFWl+VKKcRyw1FR1hSJIXVfECNR1LUZm+bzFjCkkakMr2QBorWGRfvmiTYlKw4lEwsKTnQ1r/jykyILU9zjdWy3quL64CYyY+p9rrcYFaHyQqA6IkFOfE3iVmlNA4yJdVOcUSSvp5AFA5Rq2ihA8yiSdn+AVCD7KYYs53VnlA/DUFPmyjGCQfmiUWqQWJznnKIQMo9zfqY8pUiAb+Tze+Zlz7dftdiuedKMZ3QAotJZnGG2h1mzsildfepkn9QW+8pyuTxn2PToBpm0aTk9OqOuaZ8+eSXQIMPQ9qMiIY3A9V1dX1FWNtRW7bocPgaZuWDUtTWMl7TREHj16xMlmg1aKvu1RSNrsxcUFLz18wPX1FaMbeenlR/jg6K479tsdWyNRCk1dy/wraNuGzWbN5eUl0Qfu37vP9W6LbSy2tgI2BcSA9yPbzqBSvefS3r8Vpt3sT2FaYVphWmFaYdrdbIVpN/tTmFaYVphWmFaYdvda4dnN/hSeFZ4VnhWe3TWevU8GUzJaswlKJjb/HJNh9URjpoXB4qoX3HZuy8iAeHidmqxgRDJf1dQrkIWQFUcpRQyB4BxuHJKXX9O0rSinksU3PZQoRlVNj1i8FadrlkPJZnBeUI7KmtzJhVJ6YpDD0CRyQaGUntMsmRdolmYQ526C103j8DxoTn1b9DPGmGAmhj1jjWxOU/+JUNctddMIiLzKE0LKV5X+e9EDlEJbi+t7WcjJK09+Qowiu8nIi6FjYVBlHAGl8nXJyCmRsNLiXVU6EJSi7z3aeKBKCMrjVekwv1RvVS3mNcr7wmSRRf5MJBufOTU09zf1Zno9Jl0IPkyf1zoBL801SvRQbpM3PHK/HM3AQu2eZ/aXr4cEkDxrHjnkTwCpMdrIoYbp/RjyYXUxHRDoU2qmIUbQtpL5VUyGPipNjEGmm/nAO+/E2OeIidy0NqBEj7OeLw231gLBMUFx2jilrzlVfKmzh9ECSh3WPQ3TuOTLOZdSvhV+SJE/aT6sNihraGLDw3sP2Jo9nRZP/mq9giggrm3Fqm1RQN91k25mQPrgcM7R7Tu6rqOqAqN3hP0OBaxXa5qmhiDze+/ePVaNpHs3TcvYD1xfXbHbbVEvPWTf7RmGnq+993W8d/GM/W7L5eUlVmvWqxVutSZ4R4iBqjI0dcV7T55ilOFkfYKPHltblNVsL5/K5ibp8b7bJ3P1FWwA/7FuhWmFaYVphWnSCtMK0+5+K0wrTCtMK0yTVphWmHa3W+FZ4VnhWeGZtMKzu8uz93UwZSXNehqjpDgG74jOYVDiVVcB9GGaZwh+8nROd0wLY66TKmmrcuBcnK1tSD/HCN6jAIMcaiceyrBQnryoPGO3xXU7gneMY49SGlPVxKSgSkkNzpgVUBt5FIqqrvExwOTV9KnvyUB5SeszWoNRuCHghoFoDGgxzkS4urxEAY2tGIOnspaqqhgDKaVRUvc0ERUDKEMgLYIoHm+l8u83JzCm6ACR7eFlSsXpGhe8XLcgpooRPw70XU+33/PuO4/RpkbZFl1VBBXSuANKKzyjGFerGL3UUa2bNUM3gBdZLTsh8JiNet/3kxHIQBADHwjIQs2GR+uaEGPy/HswitoYYhxxw8jQ9Tx4cI8YHISYYKBxIdKPA3Vo0cagMeJ1z7qrFOgUDxMcishm1RLbBuecpDQqha4qVN6wJBOrIxIx4gaJtjCK6D0uBIE1BlRMhyhGfIxUykyLJoYUyaHS74TpPZ30ajJuzPuc0Y2TTEOSp2ka7ELWPjIt8hgjSktdTXHgR+pW4L3b91BXBFPhlEFXBu9FM7S2CQ6SaivpwkvjrhcGPW3vYvZcqyk1N4aIj05Asfh827aTgZ4jDmQMOTU1p6DmSIJxlLGHEBhHOXjRGDNFcYzjCCNEH6GSDRLINiD0I27oMH3EM7JTO966ekw37IhBavbWxhLGAa00VxcXnJ6fsT7ZsDmTGqn73R6906hzw9A7+sGzOjml3+8Zo2c/dmgNVhsIcPHsir7tqeqKuq559/E7GKO59+Aej589plpVrM/WXF1fUtcVp2cnKC3j9AR2/Y6mqth3W0IInG7WWBXZ91c8vnib09NTdK+JXSQ6h64EVGcvvcQ777zDMAxT1EZp79cK0wrTCtMK0wrTCtM+Kq0wrTCtMK0wrTCtMO2j0ArPCs8KzwrPCs/uOs8+3ClvavY6EiVNj1wP9IPfIv/EpN4xvXFg3GSRKETpZiWM06dR6QC2EORarRnHkdVqTVXV4hFFDLZWkuyo1E0v5vO8cXNXVTIcTH1SWtO2LS54Rufpu44Qg6R4JmNurcVWFa4bUFoMiVKaAKlepmLpbl44tpeCutGWaZbz+o8H74knX1JKtQKtNNe7HW50hBA5Pz+f6qCOwwDG3gDzYTqiALJKKbgZtvlZwzBM0RP5tbxAcz/z+DJEp8dl8cubYshRrNcVfTcw9AP7/R5rFFrFyTMeUxruFEkSmSAo24jk+U+1UPGHi2IZ4bBcMDFGfJw94nn82fBExNhPaqMgacWBHs+xD4t5I4FnMV9LD3p+1tLQ5r7KzwpJQ55Fp5Cap7keagwRYzSr1QoXFFHpBBuF1LmV+xktwJqerSCoub6rzJcSgKiAUtlAm3nDluZTqXm0McbJeC/1Mfcvp6iO4yiH6KUIBO9zFISedGuOVtDT+NumZVACJuccY6rbWxnDwwcPeXP/mGq8xPc+pVhrKm14dP8Bp6s1Gg1Gs93t2O52XHdyaF5lLZW11KrGJniHEGjbFmJkt9uhfKCxNVZb7p/fY7u7pt/14v0/OUVphbaGcT8yeoEO2y0npyeEILVLV6vVpNN+HDk9PaVpGk5OTnjy7CnDONI0Dev1eqp9m+URQqDrOs7Ozri4uODdd9+9zUSU9kFbYVphWmFaYVphWmHaR6UVphWmFaYVphWmFaZ9FFrhWeFZ4VnhWeHZneHZh3MwLSZurl84RxEcf/+gTS0WRyT9mLylWSmPbcLSEE59MgbnPJuNpGDOfZMF+KL+qcX7N95b9GP5bK01fhxxbmQcR/FmawNqrgOZlVxpDdPimA8ofK48bmnZsCzBdJtxXv4uPwAqigdzlIPP1us1VWUn5Undnu+XP5uMrkqwNFNq8rEBmj+bjeZSpscsfT8VydEf1trkWXYQFSZFrWQP+HT/9N+cMio6lQ3FEbQ5hOWx3IApBTTPY96YLI31orfJvh/dAzGWB5uYtL85fuZSN/MzM4AO73skyDjPQQhR9A8FSk91VQM6xzHkh994bt5MLQ82nGWTHqvneZ4Mb4wHcznVZV185f7l8SzrnGqtJ31Z2pa8UVj2cf5ZTWLQSuOdBzTolMK8h+CDfFnkYDtjqJuGqKRObd00jDEQuo7r3VZkrlKdYi2pvBqBY9M0uGFkt93S2gqvPVYZ1u2a/W5PJFBXNaw3hBgIRFabE5z3hBhwORU2GW2tk14n3bZWaqMuoWbSe8sNQb7HwXW3pAGX9pW1wrT52YVphWnppoVpqRWmFabdtVaYNj+7MK0wLd20MC21wrTCtLvUCs/mZxeeFZ6lmxaepVZ49tXFs6/YwRSjpCPmydFVJR6uyhwox4sM5ovaNIHyNHkepMPSZu/ydD0ZCkl5tMZUFX0/cHJ6hq0qvAsYayXaIRlZpcToL43T0ig/p3MLZZ6v8d6z3+/Y95Iae3J2ilYKP4rnD6UJKU21MnZKr7Rao7QiH2h2m82/HUbiMc5zcGyolgtm2WKUNNBxHHHjiEJz/8FGUnlRxAAqciDjybMeAzoZdxUlzdA7hx+GA293Noo5PTHfY1kXM4/rcK4PW4zip1eIfIy1rKuKZ0+eErwAqa0tzjkCYIyddGW6v5YatAebgfRdL16bNyuHcFr2vaqqg77HZPWO52cy3DFOUS9TPVSOPh/n0S/7cLwRmaJ2ju6f03VjnG+UDf1qs0FXsry9l6iLqa4wHhWRgxITKEOcDXNuAtd5XpUiReQsZJwMphxGCFWlCd4zOsc4jgf3rOsa7/3089LQy8F+4cAI5vEsjVg2iKAYxhGnA7aqqOuG/nIHMeKUZxwcu+sd+2FHwOOUA2XxCno3cnl1hRtHtNbcv3+fer/nanvN6ekp3nv6oadumkm/bGU53ZxwHa7p9x323n05YBTFullxbRuUVjx68Iir3TX7vmcYB1555TX2fce+69jvtzjnGYZhSi9dr1ecnZ7y9L33iDGmjcxeNmDe4aLn+vp6WufDMDCOklp9cnLCO++8A8CjR48o7cO1wrTlk6UVphWmLftdmFaYVph2d1ph2vLJ0grTCtOW/S5MK0wrTLsbrfBs+WRphWeFZ8t+F54Vnn218ewrz2BKk9sPA1ZrOfDrSNk/jIE/+Fx8wftZEaOkp2Vh+yAe8sGNeDegrUFbi7EVxhhGL4stLSF5RPrP0iOXv5Ypi7d3ZqHoSUFjCFRNTWVFaZ2PtFWVUulGhtFj6pQuGzzGVsSI1Ik09gUxBc9vMR4am7yolgt2aTB8qrGptcEaI97RkL29t98/PyMvPqXEUI7e48ZxqneZZbb0fueUw1tFmAF123vGYLXGGgG1c57gPI9efonH777Nft+xXj9g2O9SurCdIKOOn5fmeYLA0fOX8jnuXzZ0OZIBJIrCOQcq3jpnIYTpWWKEVQoaOdz8mIUxOzayB9En6mbK6vNafr9dbwgBBufYbvesVhu0sbJelEETUFFhNIy3zIAYnXHSCZU2OTFEqTMbAjGCMRFPpKpbTEpz1sZg0xjqup76PaeZShuGAaVEnvnn/P7xBmb5GWMM2kj93Lap0EnH2mZNjFI3+cSsOVmfcF6fM9bwpH+KrQxN3dANPdpqrKnpu54vvvElfAiYuiIi0TVt07Bar/mJz3+e7XbLgwcP2F5eMQ4Dbdvy8iuv8Pjtd3n87ttcP+m4vLoixMDTi6e0mzX7bs/1fotpa662V+z2W8a+52y9QgFN08xznYz2OI48u3jG43fe5dHLL1G3DU3dcHl5ycnJCZvNZpJFCIF33nmHvu8lAmFxv9K+wlaYljpTmFaYVphWmFaYdudbYVrqTGFaYVphWmFaYdqdboVnqTOFZ4VnhWeFZ3eDZ+/rYJoVM9WbTJMQvCfmxX80IcsJWirr4f2WWiX/maZz1q7592SAo3MHykAUj61P9S29D7jR0zYtNnnto1JTyEGMoLR4sLPyzo89TKtbvpaNekxC1syGXg4HU1hrJ29pNhrWWpwPBO/QxopHd0pRzTLRaK1uNbTPmZVb5Xls3CfvvvdYpfAxMAwDKDVFZjjncEG89qBTWIaIPqQDD2NInuooqIxhltMNDzeHnu/sLc5GmEWyJAv05vlW0zgkCiBE5oMSk4yapkVp6LoO5zxai1Gqqlq6n6IWiGr2sKf/5Iq9x/3NbRnxIJEQ8cjozrIWlcr3m6Zm+haZdSTr4MF8EQ8o97yohGMDH6PMS35G/mzwgRBjmkJFjPMcoJcyj6gYiYRpbuMtxj5NCbOeZmMfpjlVOpKfmHuTbcTxGsr1T/O9lvLPvz8vCia3OcJAOhdReB8IHrSX+VFK9Li2FhOMHNTY9wyjwg1Sc3Tqn0IOCY2ie8tIkhDmuqjjMGBXqyktNARJRUVBVVecnp0SFZjK4LxHGc16s6ZdtfSuZ3A9bpQ6waRxeu/pup6eHpXASoTz83PWqzXKalz0VClaa7vd0rYtfd9Pv2c93e/3t8qrtJutMK0wrTCtMC0/ozCtMO2ut8K0wrTCtMK0/IzCtMK0u9wKzwrPCs8Kz/IzCs/uJs8+UAbTpFAxopLi+xCwMXkH04QuPcnLyTr2EB4sbm4qMNmwovDMi2q+bwCkVmFQEEOOKshe/cBqtcGm9ELJBFXT0xSzcdVaTelzWRmVUgdjiIv+xBjwwaNVvo+i73u0VhhbU9c1/TgQg9TjNKbC+4EQwFRVUjCN0rm+I+IVVfpAFi+cDzWDav79+P3U9yCpqVVTE0KUvioNRmTS970YeaUxtk7mNXUlG/lsaJK1zN713N28QGPShwy6LMf8u0RoJEOjVObGNDdZz0T2MR2gKDVHlbEYJal8q/WaylmePn0P7x0+aCI9JyenMv8hTHOrxLoujJGaNgdZX/OcZyjlNo4jyujJ45/7nK+JWVZZ1nlC0/esHzEbeH24IYox/+dw3rIsp/se/ZzXiVbimc/QHodRUk4TpEMUHWzaFrRerOEAMUBUsmGzRxsE5mdmfZ36m4aX+yjjvgn+DLj5wL7l/Q43J3l95Q3CMuLgePOY01Rl7ycRPyFC9B7rwJq0IQhglEFH6HY7ur7DBYdSitOzM5FHlB1NUzcE5xn7nuBTuqz3+P1ePPQxst/vOdlswFqMMXR9D0DTNpzcO5XDNbUiasXb775DU1W0q1NOTzd4RgKe4BxN2+LHcZLffr+X6IS6niJvXnn5dbTR9G5k3G9ZrVZcX19zfX3NJz7xCbqumyIvqqqi6zq22y2lffBWmFaYVphWmFaYVpj2UWmFaYVphWmFaYVphWkfhVZ4VnhWeFZ4Vnh2d3n2vg4mEQbT4g9E8d6mRbz0rudD5Jbe2K+0RSBmZVl4cfNXVryIeN5s3aBUUhilQWuU0aw2a9AGH5Mx1SZ5V8WXTRSD1Rg7PSOnqR4rYlbAiMDBGkP0Duc94zBACLTrNVVdzZ5dbbAmpamGiIuR9WoF6MmYhTxexcGCXz77w8jwoGUDFkKqVxnYrFcYJf3ouo7V5gRjK0IUwygYSfVCk6F2zuHSvbTWaBSrVUvbNlxcXNA0DXVdY62l6zpijNO1x2OSbqkJ4EtjL9drAi4FAihAk78RxYBXVcXp2RmPH79DCC4Z4AyXmwePhRCS0YeYarTmfixrbVprJ+CHELCLMeS+LyMPXtQmA6fVgQ5P7/k5pfVYPsu5zwYttzyfQz/IHBmTIlY8JAOoq3phUDgAkvcu6bMixAFVyeFys9GdI4CSLUx9l3WnlBgkYwxaKTlMLgTAE6MXyC2iL7Kcl7qQjdTxuDJsnXPT3CzTdJWSiB2lDKOLxOBAyZit0UQvfXj04CH9ux191/Hg/D521Fztrtl1e+6fn3N1fc1+vycMI3hP9AFCRAWpCztEz7DvOT895ezkhLffeptxGLHWcnZ2RlSwPjtlfXpK9JHL/SUxRppVjTYR53ouLnfwVgAFVkc26xbvpP7qq6++ymq1Yr/bsb3esmoaqqoSuWpD0zYwKHb9ftK5YRh45513WK1WnJ6eYq3lnXfeQSnF+fn5B9LJ0grTCtMK0wrTCtMK0z46rTCtMK0wrTCtMK0w7aPQCs8KzwrPCs8Kz+42z17oYJq9qgqVPLExBJx3+Gz8kiZkg58Xidb6YAF9kJY9pNPv8uK8QJLiLz3o4zimw9YM3oekQDXGWDGkMR7WxlQaJk/toRFdejyXz7mtDUNP33X0w8DJakVVWbQxOOchiqIbMx9WJkpb4YMX73j2WqtswQ6fd+xJPZLUc+W3XBD5e9M0dNtrYoyiXPst1lhsWlgiW1Jd2cOxz3CdIy4EiAatmRbl8rC048/l35djiTGlSyo1z8Ji/HqKtNDEREQRl8JHD1EOaKvrejqAzDmPmWqvzvqS9SrHEowp6iTr6NL7DVKbdhpHMtL5GplPf9xd+X3xYjx4XU2qtpzXuNDt5dwt3z9us9E7lFnekGijMdoAErURUkTLsn/RuxRRoEEFCEFYmnVGawxzhMVidChmgxxCxHmfDK9sD5Riiiw6bksdWm4ClofrZb3Jr1lrp0MbRf4hedENmEoiAkIgBI/WNtkPTXADRDBKUxuL2400VcVqteLq4hK0YtU0BKUhRPzoGPYdKkSGfmDwjvVKNm8GxfnpKaMToIVxJO52k+yNN2gr+jaMPcPYoY2irSreeuOLnN+7x3qzAasxGBQqpal29H3POI6s25au69BKsVlv6PtB6jp7nyKVNJvNZjLwed2u12v6vi+lFz5gK0wrTCtMK0xbjK4wrTDtTrfCtMK0wrTCtMXoCtMK0+5sKzwrPCs8KzxbjK7w7I7yTL/w3dyWSheltmacjCWzIV4o3DLd7pYbLr5uvnP43CACVXPqn/wqXv9xHPHOJw9wQCmNsZUY94WSxpgV8RaP7i2LbDmuWdnzNZIyOY5jqsFZoY1MoHhoZ4X1IU6/K62JUcaotE5glH7l9iK4vKgpkDkJ4XA8MaIUuHEkxkBdV+z3e6mn6KXOolLqVnYsx5+NwHEq4rEMc2piHv/NMcVbfkojSAYsxpSeqo7UM9WrzDIUY2FRSk86l8erkr7Eg2elPjo36eeLoke01mg1e8Tzl8jgpszyRmeW+7xBkTTn+fNZT5ayPpblMbgPr8nRNhy8prXBVuI3zoaeCDlGRCslkTpJj2MMLFNT8wYoRwIc63/22meYyjPna0Vmx5+bx7gc0zJCY5n6OveDGz/nzUQ+3JIU9zKvWYk0Ci5gtaEyFbWpUCHS2IqzzQluGAleIkustunLYJRGoXDjyL7vpnWhlGbVrgCxfc45hlSPNITA6EdJ/VaRbugYxp4QPErBfntNcKMY2iiRKUrBMA70fZcONBR96PqO7W7HMA6MbsSlus8Zem3bcnJyQl3Xk8yn6IOvcFP9j30rTJt6V5hWmFaYVphWmHbHW2Ha1LvCtMK0wrTCtMK0O9wKz6beFZ4VnhWeFZ7dNZ69b4k8qY0ZUVHqJ8rvkvI5Tay1+ChCyRMRY5y91B+wTQqgFBAIMeC9Wxj4eeFoJSlr3cUl2ljWmxO22+3kXQ7T9eItdm5WCq3FCwoxLYZwq4LBYV1X5xyRiIuH3ubVySYZuaSsIRANYCzj2BGOlHoaY9TTSo1JrqSxftB0SKZRJUOf3e5KQZC6rd2uw/sRlMK5wPX1NavVCltJ2t123+Gcw1Y3lSXPX5Z99gA7N+DT87InOc/9MvV3Xth5jmW0832ZDXpEDE+MxLRgtZZ0SBTJHWpQUUH0hDBOaZfi2fapLqroj1YWFsYlt2Xt0zzHOR039xmgbdu0YVjqRTbkarGRmNutmwStJqO+rA1qlH7uPZb9PTa4OY12jgKRMfoYsdZQVTUuyDUxiPFXYU7F7mKWv6BQqwhKUle9i4AHndK+0dN1y6WR51krS/ABW0nkjM4bGA4P0ct9z3q9tA3HMHPOTSmru91uEalAMmpSD3UYRae11hhrGfYD1miwcu3J6oR7zX32q47t5pygI0Zbzh6c8+W33uTq6oqzk1NwHgXYqiJq2RR1Q8/l5TPCONLWDY2pub68BmC12TAOjgen55ycnfL06VNG7+j7jidPnkCIdF3H1dU1r338dTbrNVopLp9d8OhRQ4iBy8sL2rZFK03d1Gy7HdvdVgCk4P79+0g9X01VVZM9PT8/59mzZ/R9P0UZnJ6e8vDhwxt6VNrtrTCtMC1/L0xLM1WYVphWmHZnW2FaYVr+XpiWZqowrTCtMO1OtsKzwrP8vfAszVThWeHZHePZCx1MRgFEFAEdHcSA9yPjsKepJV3NBVAuSj1P5jS253kSkyqnr3x4GcSQvaOBqCIoD8oBI9rYycBYrVEx4v3Ifr9ls6qJUbG/voToiBhJUV1AwUdS+pxKXk6NUtnz3U/KNnmP9c36nTF4rPJ4N7Lf9cQYaNoV7WqN87ICQgTvI3VVSzRDBB21HKanNd75qQ9w6KkVcM7GRE125BYXf0xpnhlOi8nMeNARAoGAJ+LQdk7NfPDwIdZa6loMgjbpYTFy7LyfYL4w3tlQKqWJKhBxkIwqKtevFQMxjuNSo9BaoOZ8oDayOZD6lYhiK/AhoKKSTUUQmcmFyZCqPGjL6dk9xqHHjXv6botWa6rWEAKQvLlaGUYvz9VJxnlM2Rhlr+wwDAeQFSOkkq4qFAZj1GQAJ/2NOVU7GbSkENaaGQoxr4EISFrpBPq4WCNhhr6suYDSJh3QqLC2lrUy8dwz+pGgI0FFgoLg04YjRKIHrSP4IIYkalCaqDTOB6o4oLCAlSgEJV/OeXyUvqooGz7SeEVukgbridgQ0DoQQhpdnKNO8noax/GFm7/lZmJ5mNwy9V0O1QSNwlhL53sIEvlgm0rWu4r0DlbVKc3umre/+CYx9FSbmqZpuL7Ys7YbmrMWvEPXNT4GOj8Srcy1BYkIqFcQNNbWnK3vE0PAUvHgdE0cI7v3rjHacHl1Sd/31KbhwcN7hCC1ag0V494Tw4jGsO/2+OCIBEm5VrLFG8OIbQWaT6+fQaVQAfp+4MG9+zy7vOD68ort9RaFom1X2KpiGEdG57m8urpVpqUdtsI0pvcK0wrTCtMK0wrT7nYrTGN6rzCtMK0wrTCtMO3utsIzpvcKzwrPCs8Kz+4qz17oYMrzocQmo5BoApfSM7OXMIYASk91I489n5MCP6eJZ3b6LXkVxfCHOBsVSAfAIQdihVSHMYRIPwxMN1Gp8+lLFqVP41GL/kTCIoJg2eellxNSDUoXCN7hxmEa/5yyN3taxaNqptWrEMOX0wXnQ+RuyjqP4fj357aUCiuTFBd3zQtT5sxa8cyO44CtKqytsFWdxi8GNgtvOVfHnvFlyymOWs+RD/l3rQ/nXilNmDl24/4xRjFApOlLRkxkeOhZnw0mVFWNVhGjs4de9FMbk4xuQCmZiwhEdegRP069XXrxY/pMfn8hoYV+5K90/zQDMX8+72mmz2ogp18zPXMOB2GewyiRLpKSqdP1pI2KWqwTgeO8O5CoFumn3EqTYZVqoyqdNSTpiAdlZL1oeV/GkWr7LtfWQv4xRpSe05iVYqpBfBwNsZTzMnpnmep6fG1OR1/eK6RxK6XJKbsg3c8rrq5qWtPSmpaVWdFhcIPj8uKKwajDfmiFD55hHKhXK6KTAwl1HmMI7HZ7GViQWrxWW1xwBO8YGNFKY40laulXXVc0VZPANEdleO/wwSdbKWsIoGkbYowMw8Dl1UA/DFhl0Ci8c0nmmquLS5QxaKPFBoaADz3h2lPa+7fCtMK0wrR5DAsJFaYVphWm3cFWmFaYVpg2j2EhocK0wrTCtDvWCs8KzwrP5jEsJFR4Vnh2p3j2viXyjlsIAeccTdOglBIFA7RVN4zXh21KzYtHvNb59aQMcU5tW/ZJWTN5HBd3mzzGeQkdGI30+RjjlB6X7z0ra8RaSz/EqQ7j6enp9NljA2GM1Of06b6KWWnDZMjfX1bHiyU3vXg//UCMQWx9XpALAxRCoG1b9vs9V1dXaGVompaqqpPRO1yEy3vnPogBP6wZGrwnRKmnOo5+SjHU2kyLKHvstdY4f5gWvDSyy9+nUSzGfhtwMkCqqqKuRL77fcdut2W1PpkWE4RpQYlMDuvFLu+9PAwupA1MVuuYoJjnc9nFCQpHgBb9OEpJVR9sreS5CyGg1QzxQ4MYpnUiqaPynnMOk37X6FSrV/qrdQCtQGl0lkcIRPmFHNHzvDZDUe5XVRUKldK//fTJCWLpZ2tzjdYw6YpSaoo+Ws7FMj11KYesI0FFtAEfBVd5GwUKqxXnJ6esdg1n7QmfOHkdd3nNl957izfee8yDj78i3IwREwIEGJxj223ZPDhhv5eD8KpVhVYSOfPuO4+xajaXa7fGWJHv9bNrTk9PUUpxdXXF9XbLvbMzHj58yFtvvUXTtlhjuNpdHcBtmRb94MED+r5nu92KfEaPrS2r1ZonT55i65rNesM777xL3dZoqwnRTzLJelvaV94K0wrTJrkVphWmFaYVpt3xVphWmDbJrTCtMK0wrTDtDrfCs8KzSW6FZ4VnhWd3gmdfsYNJa01d15PBJUZ+huw7kJVXjGsMosCWOV0SxJNIMszOOZyTAVeqkvdDwBjxwkrtVn3jOdmAZ2HnmoPHBljeVzjv2e87Qgis1huqqhFDjnhgnXd4H7FVkw7VWyw+ozHJ+Gml3y824H2bmJv0cwiTvETN1eThd24kBD8tKK00WhnqusHa6taU3OXws1G+DTYxRnwIhOBpmoa+v8I5z2q1AgW5xuwSgjNwbxqB5fO01slzPXdmef0S8gDeB7zr6boO5yTttV1tpnuobFiV2Lfj8RyP0RiTIlRuOxAvG5TD/j2vTf2McTL2C7/8+35Wa50858vonPnTIUj6qc9RNTHinETfWGPRKKQ8bpTIgSw7pSXiIypJmdWKQMTHACGAyhujyNLoi1fcLCBuqNuWmNJxldIMzk0GPB8Yl+cVONiM5QPrMliXsM8GLAMi15HV2iZPekRZiWDSldTM9WNg6B1PfuoZtlKsVMPF3tOsNpw/fMDQaEYja0YpMAY27YY10IwtwY2smpb6/kO2lzv6rqNan/EN3/ANfOEnvyD9qSxvv/sW7WpF1dQ0TUXf75MOBb72U58khsDl5TPeffdtttcr1qsVp2cnXO2uqJuaV199lZ/8yZ+cajd/9rOfxVpL0zR8+tOf5gs/+UWc86zPzri63GJNRd00PHioee/JY+IQefmVl/DekSOuSvtwrTCtMC1fW5hWmFaYVph211thWmFavrYwrTCtMK0w7S63wrPCs3xt4VnhWeHZ3eDZV+xgWipgrs8Y4s9MDMHSUxojYrAWHsSlR9F7UYjlwWO2sou0N42405de6JvPXNbDXNZpPPBqx0jf90TECNR1M6W4ztdL36qqOvjdmCrJaulJ/mCRBPnZN+R09NmYIgiWh9blNDaAtl0RQmB0nnF03Lv3YDqwLHcokqFxmLK5lMPSix8C09zIIpXrrbUTzCY9OfB8z/c6NtjL50k0xk0ZLaM8lkYvv1bXNVUlNS99OIo+UIhRSJuGbGyyTjnngNkQSSTBscxTunGSeb4gwyQsrsrzHBf9lt8XtU6f0/LztNboGFGLjc4x8PLXrL/zPOTyunljE+MM9Kk/acMSlCGSjOAEkyMokqMN5H4hBPG61w1G2zma4ZZ5nSMQ5k3QsU4c68UB/PNrMUBURK1JIQGi32HEYKm1xaI5a08Yh57rp1dct9ds+x37/Z5eO9abNU1b0xqbjGXEGM2u79AajLaoqBhHR9/17MyOtmlBgakNw2VPVJKmu27Wkx2q6xrnHEM/sN/vuXfvHnVVUVmL0YYYAkPfc3lxidFGwBmjbASRudhebzk/O0NjGYaB09MzlBF9cr2TubWKqjL4ICn6Rt/czJb2wVthWmFaYVoSdGFaYVph2p1vhWmFaYVpSdCFaYVphWl3uhWeFZ4VniVBF54Vnt0Bnr3QwRRjnvB4oJZ5IowxaGMZ3e1pbccLOb8mkzq/lhdD+g2lSIfvSa1SFWcjnwUa47w4I2BtLUZWHx70l73q+d7LPuRn5+/5QL+lwVFKTcpstaayhqqupT4hSN1Ilumu9sBIVtV8UF3Mhud9Fvmyf7cZ+luJtRhLJAMxYrSibVZcXV3jRodzjrZdESMTLLNnPMQPPo8Qj+Q0yzDfLxv61Lvn3jfL+MAgwMEzl8Ytj/VYNkopVqsVq/UGpQzROWJMhlArsm6pZMittbeO7RBQYrSODX3uZ74+b070VEP0g8QKROLSEC9+VslYaqWSob99HYGa4GWMSRsbSQvVQhhikKgaMdAHI0VrSTcPMYAO5Jq6kCIfYh5NGm9Uk35le94PfToQM29qbjmsMj04A9Q5d1BPeDmvy+/Lzx4Yf6XQxiI1XT0+eOIYsZWlshVr22Baw85v2V1suR62bIct2/2eoXKsNiuquqKta64vLuWAR1PhBkdVW2xlMNoSfWToe56Nz1g3a6q6wjaWq/0lyii0UbRtO0VEAGy3W/quo+86XnrpJVSEGFM6PYpxGHn65AlN3Uxztl6tpg3Hs6dP+fhrrxM9XD7bcXZ2go+OwfU456ibiqoyKA3eOzLYS3v/VphWmJb7UJhWmFaYVph211thWmFa7kNhWmFaYVph2l1uhWeFZ7kPhWeFZ4Vnd5dnL3w3hADRo4gYowhxrmWZF7D3IdmdWRGXC2c5YfmeSoExckhdvlQlr65SAaNhHCNudIzjSGPrSRmGYcBai63qSQmU0vKataAtaLNQDr1YQAGO0lYjTOl0dV1PihijCK/rOvq+J4RItW6x1hBQuHRPFSMueILSYDQ+e2uZ6zkuDXuMkUhEY24YmTye24z7sfHPxkUZAzEIGEkeYhTDMKSrtXg2h5EYFU2zou8HpF6pzIG1IntrKyBOUQH5OUuv+yzH5HHH0Pc9JydSG3a329HUK0Ad3EMpOdAsf2459rxAjj3Ly2vy9+P0Wq01bvTsdrvJ+Ltx5MnTx5ycnlHXDUCKnBADVlk7jWccR0AiUpxz1LXo1TiOaK3x4dDQRLGXaJ1/XszLol9yIN7y99tBt3wvxEj0cojkUh5VVRGimqJXlk+KUQzwrLsROctPYbREEvgYCMGl9RsX6dJxgqBWiqjBKI2P4uk3RqJsQvRoJCoGZQhBycbHVpikO0QIwbPfj5iqmjZlS9gfG+0plVtruq6b9CRvuGTelmmxczQFIcqheNGBjhgN1hrGcWT0I6uqZegHKm95/eHH6a3D7zQDUD1oCcqx3W5x+z1tVaN9YNsN3D+/T91UWGs4qU5pqgbvPG+/+Q5KKWxrWW9W1Bc1SiucH3nvvcfTJsU5x363QytNXVU8efpEIjW8vPfwpYdoo9nv93RdR1PVrNdrhmHgentNt+/wo6Pr9lSmZrNpePr0PaKOGKt55ZWXuN5d0w973nvvPbpuh7WG1aq9oV+l3WyFaYVphWmFaYVphWkflVaYVphWmFaYVphWmPZRaIVnhWeFZ4VnhWd3n2cvdDCJYivkQC/Y7fZ4N06LJMaYPI8v9owfe31jJE1e8u4u1He6LDlxs3JkIVpriUqUPhuzrACTx1iLAZsMZ8hgOvIYp2dnQ980zdRfpRTjODIMA9572rZN9UOzcRJYGWOSUcgHh1XTwVcRUeTDx4q8ljI5BuMyPXSS8+L949fEm6zRKEIM04GAbVtjrSFGGaP3Aa0N1lYLAOa0XpKcbwdK/jlfM0d0iIyrqppAXFftNLf5PvI1p1PmOc1tGbmhlHiriTeBd1MGc6rmnOZqODk5YU5Dzn1n2qQsAXK8ecmGPqcdH8xD9qzf0jWV4m6U0PbWiA9ZM4fjyOMPwR/IfPmZpCkHMpjWhveM4zhtKpRK64HDSIfpXje6FeQrB04AOcdVadCoqRxqDAHncg1hOXCxrmucF5DIZuFILotn5wiMHMUxyQTZGOU5y2vy+PNZ/5QCTcAaQ8TJhlTVEsUQwY0OZTVt1fLo/BE//BN/C69HztszerxsblTADx1eh7S5EUBrDBrN6EZUFJnev3+fi4sLYgwYq4lKIm9iCKxWjWw4vGd7fc1msyZbtmHoIUaC81xfX2FrS93UAJyfnwNMNsZoM9WZvry8oLI1q3oFSmxlDJ4vfO5z1JsVtq5YtSvOz8/w3slzSnvfVphWmFaYdjQPhWmFaYVpd7YVphWmFaYdzUNhWmFaYdqdbIVnhWeFZ0fzUHhWeHYHefaBCsLGJGTnxsk7OC/euNCOD9ryZ29by4ee9PxzTuPKygDqwFOZDcjx17IpxeF7otHTvfPz8jPzBMQoUQbaiPc9xnTgHwqtzawgen5/4tUNw6xuLMDj/h4bouW1S7nIPee6lsBUnxTSoXGp5mmGT64xemAQpvvGW+7PwVwvwRsOdECaPGc2wMu5W47jOCLgcJ4UxNufe3ydSnM491t+r+saxZxCrBb9OdaLpaFfQiiEQDax+bXchdwfkf1s3Od7L58110M9GOOi3zm1OIbb0oKf38Rjv0jhZqErtyzLSY5p7U2yRdLSp96nH3TePOVnBT959vNaNcYcgPt4/S7h9Ty9njZqC/kvP7t8TW4MOoJREgVBHkOag+ADCk1lGk5XZzCA9ZZ1tcL3YsDz2tDKYHRFZWsUBrEJkXGQjV7wgbYVT/3oHLv9Dh883jucdxKJYeTwQhQ0bUNVzZEreY1aa3FuZBwGsSlVRQyB/W5H8FJTuK4k3XccR9w4CMQIYszHnv1uS7/b4caBum5Zr09omhW3HSha2vNbYVphWmFaYVphWmHaR6UVphWmFaYVphWmFaZ9FFrhWeFZ4VnhWeHZ3eXZByiRJ4dviQJplJoPuVNapbqSmviiG93SlgvlWB3FcM6Tno2C1PFsYRwJwRMiycNvp1qAcXE3USB1qPzHqh8PF3o+cC3GyHa7xVpLXdfUdT15emMav9YaEyM+K76SCIfROYiR2qS6qukz1lTMpuOmwXlee951kwHTSVZO0i1HNwqYtJEVG0G8+Ja2bRkHjzGkiJDZwy4L2B7MzwyIuU5sTutUWrzCYmwcOcVVACiyzqAMIVLV1cFY8v2OIZCf/yJZZN2YgZ0BKSmTIYi+oDTWMumA1pq42KjAnK6a01Rz896jjUVptTAyciDhwTykV5Z9SZ2cvs3ASNfqeZMzGy8xViFGNGL4dE4RDuBjftr8fUzp3DnNVsEU7aKYN2kTSBbPiSSDP91vsXFIA9HGoGJkGB37vkNhMaZdgFGnnxf6kdYQ3IwQyamn4zgSY5zWbl57OSIib9wyACQSxsvmRRs0BkWOlpCvEB0qKFTUSV8tlY1s9CmfevkTXMRrrnXP4zcec/+V+2zO1zR1y8nJGVpbVl7Wbgii37vdjrZaQaWobM3Dhw8Z/ci+39P1Hah4CMEE25P1hs559vs99+/fJ6YogU996lM8ffpUIo8iDF3PxbNnPHv2jFdffZXNZkMIgWdPnnJyckJdW6pasds7Li8v2fcdH/ua13n3i1/EdTsevvwy4+AZBw/MqeSlPb8VphWmFaYVphWmFaZ9VFphWmFaYVphWmFaYdpHoRWeFZ4VnhWeFZ7dfZ690MFktZED4mKURaMVWltsXaGUAfS8YG4x2O/XslG4zY5NHtgIUYHRZlKKYRjY7fdsTs7kkK+qRllLRBOjSqmpqbYqsqhu78CyHxGtwKXJEgWNNG1D3bb0biQnu2bjJobfT5EMS2NIjHgUTWXFu+89zsvBgCwgdpsRXxqh95XfwvD5ICl+zjnOT88IQRYOUZSuqRvqqgWlJN2XlIYYJfXW+0iM/uAZNyMNJK0xhEBtLU2b0l6JjIMDNKN3oKU+rVaGqECFeJjifDTW48iCSICYDf98GNthf+ZuiSwFXFobjDVp0QqMYhqtFcIdRJ7kZowRgCVD431E6ThZ5CxvdfhoSJEC80+KnOrJ4vVABlNOpfaH8kj3tnquBSqAUTlTlMkgx4gGghsJ3qFCnGFDNj5RaobmzcZC7grpogJMOsxSxUBUcl8iuOBgoRPGWBRm0v+s9845tFGgNVVV4cLc25zCHJMNWco3H1A3juNkJJWS2q+5jnD2qmd5GGOIPuKjT/AbURXYqkJ5mfPo4ez8Pv9/9v7tSZYcSfPEfgqYmbtHxLlmVlV3VfdU93CEXCFXyBeKkG/88/nCi3CEXBGuzO72dE91b10y89wi3M0MgPJBARjMI845WTPDnYoz0Kw4cXG7AArV70OJXvBx+ci8zCSvvLx5izKgfGRwwsf3PzHP93z/6i3hnEgsnMPC3esXeAcxLJzPZ47TEXHw6dMnEokQV5awcDqdcINlEAwuHyAYA5fzhftPn1hXm9uHDx/4xXff8fLFS6Zp4nRzxM1bltI4jtzd3XG5XLASXMfNzR3zPLOGhUlHXr19hXhhOo+8fPmS1//tf8uyLvz+97/n9es3gGcYDnT5unRO65zWOa1zWue0zmnfinRO65zWOa1zWue0zmnfgnQ+63zW+azzWeez589nXwwwOVFUzM6tNLGY2nVfUa2/Xzth+7tWhzF8MkdrrqWA+1aC6pwnJsU5A/tEG/n3SD5cL4knqUUWBQdagKOJ3INZd4meCtb3UBxgWROiEU2RGALj6HGDR5ygEQOe/AwD70AIgqjixFvJXFJEDUjF2cF85oKuxmr3UNHq5XFZ5tNZBOahkrMkLPoMa+5HCZbhQcr3Jjs4zXmPeGfrKEIquqIUKrIjn5KJURy7jDHGYDqwRzOMpmvnYZoOGVjtcxXAiZFLnWsBw/KV55nfq1oyWECTK5c091ctmCrEsiQyN5ttiDAOY7bZAqQJRTLx29+SppyJYZkg1ktTi9HbSLWUqOa/S8msyeOVsqrajEy29+bNSv09kw5Z94JgiRf5HnEoNu+iLa13F4syECcFnCp+sLJkJ+YDmlIeQe7ZSQLJZJDnqCQcDk0RkYgXCGnLfCGVVaSZn6KaMuHaz5rt0TnBeyFW4n1cXl2IupBEkQLk5foC+k/af9YLztZFY87UIevQYUSg4MVzcEeOwy0f1nuWOPPq7Q3n5YxLiRt3g56VQCRK4GG5Z3AOiYnpMCFeiCSCZpsXxfmS2RRhDayaM5pkwOEIS8A7x4vbFyRNLMvKw8OZw+GQy/1DPuBS8d7lHszCOFr2jyrEFFES6xoZpwN+GBDxDOOI9wNRIQabf4qRy6X39v450jmtc1rntM5pWXmd0zqnPXvpnNY5rXNa57SsvM5pndOetXQ+63zW+azzWVZe57NnzGdfDDCJBkQML6wUyxY9RMX74peaA6MG0vXezyzS44hwe4/9E9aI81Kj8yEmA/rBkYJF453zVkY4jKgbCeJR8fgM3Bk6s6GmDCT2N/OADKTOopaaAqSI0whxIawLx9tbnBeiKuIHXFBcBipJkbgEJFn5nGfAAylYD0YRh/iRkEBVUMnzLdFo2bIJnsoaaPXU/q5qxiYCztvBZFFBUmJZV9PNMNSopBOL/J+OB/w0gXOkRB5jE02/GsNGtGWctj6qFvn1g5BiZE4AOZPCDdzc3layUbX4Pc4hCol9n1Z7UQYeV/qjKiEkYxAzQpw3OEQhpZBB2VXSdM4zjhOX80JKkHIGwPF0AmBZQi4pzGWxTtEM+mtY8IP1sBUnOAbb1MRoDq1W4lnGqpLHIqDiNpvKvSgrMOqmUs0kh5R1lLwGvmK/E5eBS9FKgNk+BAPqDLSCRf3RADEwiDBNBw7DZI6vkIioK4fCRSLJdJyzKUIIaIy2kYqrlXU6ISwR1KF4UEfJYRAcqtGAXa1k3Hqwlgwjsq1A0pj9WmpJd/H3NtumHLinqrWEtfw+zzMuZyYUu689WMseTgQNQgpKnANyHJDBI8DDfI9zjoM/MY5wGl4Szr/n/cM7fvWbN7z7EeJFeT2+4qcfPxIIxFPkw4c/MDJwkIlXb15ZhoCuJJ8zJ/AMYtlM8zyzzDO6Rv7qr/6KcRzxOEJYubm74e3bt5zPZ3766Sc+vP/A69evuL//REqRly9fEmPKtmc6efHiJd57O9RPzDaXMCMykNQRomXLzPPCPC9G7AjzsvDh/ftHGNLlsXRO65zWOa1zmg29c1rntOcvndM6p3VO65xmQ++c1jnteUvns85nnc86n9nQO589Zz77YoCpHGZXFN2WGrbR5i9Je+12vTafZQ+o12/3ljK26XCqxjFfZhvLYCQwjiNusAwD69so1Xmux2Hv3/+uccUOybIyx4+fPiEOpsPENB2IalF0wcp2U47klrlsgOjq3IyIzKjbzICv6aqVL5FkKy5HhVOwqPcwDNU5wKKdn969Az/hnYHZ6AdSA0SSFSNXX6W3KNjYSw/UlBIEGEYrH67zxEoTz5dL1YmqZjAXiNej38+zZBmoboPTklGQAcwPvjWXqtdxHFmXuNtgWPmjsiwL4/GAILsNSVmXMgebM1TQViXFJqujrnE2MdX8TPtL/m37/Hqe0s53u+I6e+SztiKZ2LBsCE15ncSyRFSVmKzvrHMOp9QSVeeaLJs6njKXZCBOwnsHeARPiIKqZRBJIWSElFyN9DsnBE1o2nrkppgQ52p2RGhLhlVrf9x27qXPaclaWdcV7z3TNNW+qbV8t/HlNgPhutw5xohGK1395ekNP11e8h/WkX/8x3+2zAs38h/+9C+c7xdWWQlxZT2sJBmIKbAsS47yW6nqr3/961p6ezgcePnyJSklfvrDn3j58iWHw6HaUpnL7373O/72b/+Wt2/f8v79e47HG1KKLEtgXax/cEqJjx8/8uHDe47HE2/evEYvCTdMDCkDvyqn04nf//73RjCLldL++te/5nA4/FnY8l+zdE7rnNY5rXNa57TOad+KdE7rnNY5rXNa57TOad+CdD7rfNb5rPNZ57Pnz2dfDDCllKoTl4hga7ht2Vl6AlyLbCBbHKz9DBCszLN5th3eZu8fBl+NJOUIsxOLXrfTU7Uy0c+PA3YuqBZlTSmhKeb+hInBDwzDCPun5y+pBla+twRYop1ONgNsSfEpsL7+W7nuc2B//eeUrAeqbwhH0YJGu+d65wipfJbHI4IUYsqR/rKuFj22Z5Relinl8k6oYF4AHSCsay6tM6BwYvpIJaPjSvZgn8sU8y/X15cp5RnmV+51XPSkqoRg6+qnsUa6BaqDtevz1JgKIO5ttzDkpr8ysHpfo/nydGn8R9NmTz9X7FW5r6vuDyl01T+azAdKBlAG0fyf5rFbqbOA5pLTFGl4K8/FiCV3Xq2+a+8WELO5YRxyuXBqSCcyjYfdWNqS51bXO99xrh6eGUKovlBIgMzVZfPZbqjatXPO1WsPYeDt4Q2//f5f8eHhAw/xwpoS4hL3yydkhLvTiU9pJa5GVuNovUedc3VsMUYulwtghzMOw8DrN294uJxZwsrhcODDhw/EXDc9HQ/ElPh0f8+ffviB42mqZBCTqwXsxxhQTaxpIZF4//4nEOHm5pZPn+4ZBs84GMk4cYx+YLi7I8WIpsQ4fBHKu2TpnNY5rXNa57TOaZ3TvhXpnNY5rXNa57TOaZ3TvgXpfNb5rPNZ57POZ8+fz3422xVjKY7dRs9FpLQY3cm14reF3T63nyU7y+ZcMdgEhmHAuy3C2D7TALYBidK78QnHzVdsBqZkZ4nEFEgxsq5kwx3ww5SvNXAoFi5s4G0YsEUwN2PN/VN16/34NbkG9s8Bl6rFq1vSeAroRck9OhuwKtH9aGWaWSMgVkrqnMsGuiflAojlQLR2IavzmabqpqAFtfK8p+Z3rYPqsKobQIpUSDQ9aR3/btfA3uZijMSYI8LpRDmAEawHanHevY6vbDXbpm1mdLtOZCMi3SC9PovNMssj2/eUa2ie+XXJ5JDtLMZYn1vGVyL6RY+F0IZhyOuTn+NA1A6ntD/a4ZG1XHbnm1ayu9+glV8SfhgrNsQYcbnstGQiFf9oMwhaki22XL6GYag405JZtZlsz8Buk7Wu607HJZqPwiENvJlecjn9mn/in7nc/5E1nBGJBAlMw8DN6cB8PrNiWTmHw4Gbm5t6wCfYIXkPDw+QEnJ3xzAM3L644927dzDPjNPEx/tPNfvh9vaWyzJzWWY+Pdwj3iHOsn2ct/UUgZu7W87nB5ImQgp8+PiBwQ/cnG5Z5hnvToyHgRACg/d45yzLYlkBGHz/Py5/rnRO65zWOa1z2rWeO6d1Tnuu0jmtc1rntM5p13runNY57TlK57POZ53POp9d67nz2fPgs/+oANMwDLs+hmZw2SC+IpuDFwPP64aBTjGEEAJJE8fjEZRaBlfe55wtRGoWfH2iFPLzkg8gS4EY1jw/x92rVwzDiDjPsiy4wXqrpnLAGKAaWdcVUFLytZytREqHcQK3ZQPUks0/Q34OOQBVH8uy8Prlq/r3wXtSiqjziHMcjsdcwioMgycmcjS7vHB7XvneOmCbNTKOA35wNYJbMj7A1s3nqOblcuF4e1OJSJCvzmsjs00PpcxSleyC9eLM19a7dEdC+fM2Y6Bkf2g0EEoxMU3To3kbz2TCcILo1uc05VJQKSDyGTF7/sxnUjI9jA7ad5fPn36m5DGkvDExMDEuspJlywhIxJisF6oCSXE5pF4pRhzWylVhTZZRg2UTJA0kFcg9OgFiklyy6pkmK4Ve10AMkfFwYJ4XnLOD4xJbCenlcjFQc44QQi2rLGtVMgGK7xQdLMuC957T6cTlctn50jCNiDjWsGFCsbFxHBERlmWp2BGWwC+Ob1gvC//zR+WwnmD2nB9Wflw/8d/8b/4Nx4Pncv7Ai9Mtp7d3HE93PDw88NNPPwHw9u3bWiYaQuD+D39g/Nu/5cXLlyzLwt3dHZfLhX/37/5dJZlxHIkx8uHDB1JK/Pa3v+UPv/8T54eFuzsrD77MF0JY+Nt/9RtUA8syc3//EUgkTVwuF25vbzkej3jv+eGHHzgcDkzTxO3tLT/99BOn04nXr19/1ha7PC2d0zqndU7rnNY5rXPatyKd0zqndU7rnNY5rXPatyCdzzqfdT7rfNb57Hny2RcDTMMwVIBtF6UYYwGCWrbGVlJWjLUs3lORZBEzxOKI5t3K5XLJh4954jIzDRMxRJZ5RkQz0ZRDuBKoleHZ3W0UtDhHKa3TOsZ8M3GdSWHrtajiiICogsuFZCmZs2RnitkovXeV8JwTnBghjZjjRZrIOBt4AjUKXHTUZga017XfC2GUDAYR4Xw+E0NgGkeL7Of7rfzWvi/zzOn2JeKs5M4cWOwQt5gyYOpunYozlveGEAw8MjiZQ10RmFgJcXlOcbqyLoMbSGxg/BT5iYjpNNvc9TX2vMaOyt8yEakqSRXJznY8OrwfcHk+zuWMlMYu96+w/p5ljVo7jvmgR9hnHyRVvPO4xsa1HaUNcreuzjs0NqWrV8T6GPQdSmpmbjLWXrSJlEz/4+BIeeyqVmRqmyOPOF8JSCVnSpSD/DSRpByop4jYM8skNBMJxAzMW8aGOJ83ASOXZUVypPtynus8pmmq0X3vfSWq4nstVpSeo+u61s9CCHjvmS8Xy+7w444wNjKXeo8gDM5BEiaZeDG84HQ+8mp8CS+FdQyc53uSDhyPJ+5/vOf98oH3H+6Z57lmNfzpT3/KB+RZqeppmphOJ+Z55nK+1Ln9+m9+wzzPnM9nHh4eON2cOJyOrMvKv//Hf8/xeMN0HEGsn/Ob29cM48iHjx/5+OkjYV05nY7c3N4yjRMvTrc8nM+s80J0wtvXr1nXYMSdlLubW1KK/PTDD498qctj6ZzWOa1zWue0MonOaZ3Tnrt0Tuuc1jmtc1qZROe0zmnPWTqfdT7rfNb5rEyi89nz5bMvhv5VlRCs1K8sxnXpZfvzU79fP68YjYHJ1ecZmFrASSnVssJycJj1Rx2a5yk1bqvsHGX/fhpHSuYcYQFNOBH84FGx/pAJrNwyP1ry/WWOhWxKz0YsnGvvzaR1Dd5P6eKaBD8HftdfRSySCsM47kCggH3M+tyi8aWslB0oFb1d31/eb++x351zVj58Nd6SKdDeV+fU6O5z87TPSw/LjeDsfm3G3Vycgf9zducy4OzXvin7FdugbMB6pXM2oDf7Y+cD7ewfGXSjF2Rvl9f2/1m/kc1X6uaKbX2kIbgyH+8z6WS9CbLpHwP4XUWuKIhFts3YtepkTzhim4L69y2TKMa0bdjq0N1ujgWMCxDD075afLyUvl5vlgq5fp4UtxJWMPKLIeEZuDvcceLIyR05+QOjd6zrwrwshJBwMpBisgMax7E+p/x+PB45HA4cbm7wmYxCtKwn5x23t7ccctQfqKXfinKZZxS7bphs0+OHgePpSIiJdQ2EGEEc03TgMFn2z/FwwDtn/VmHEVRJMeJEGIcBTcr9p/un7afLTjqndU4r7++c1jmtc1rntOcundM6p5X3d07rnNY5rXPac5bOZ53Pyvs7n3U+63z2fPnsixVMl8uFy+XCsix1cdqDrYqByeYdO2W3nz0mByv5a9c4xECMK+M4kdJSHfxymW1izoFGhmGokcaCbimVvpR7gylKpvxUDEMjGgNhnXEO/DhwOJ2wuKujPhiLSgtYRoGDcRx58eIlIlqjzvbs7Iz2a3XudizX+mjLWHe65PPOXxy/RGK9cxwmK5fVlOq7TZ/BoqHe5cPVymgbsFWtOiprumU67H8vJIcYMbfkV+YQQtj9bo65lSFeX7/NqYlWq02iRIRBkKaXaStKOWSuHMRozw4xZmD2xBhwOJzzeTNBtemtR2dZGyo5pnzo47quIILz+VDCMtaU4Ill2vlBtvd2vvbt8+u73Zx15opdSY2qL8vC6XjKBJ7vdYId4SaEWr6bsyNkW/8WgZ3LhdspIt4O7kuUDBfXXGd+IeLYSkshhYASK/kUola1+6+BepvahiGl5BSoGQNlg1HK5IfBDvMbxxFyqWohl2KfhYRboFcVHu7P+KPn+7vvefvxNe/v3/N+jnx4/xOHl0dSjMwPM2/fvGEMCyFFfvvb3/L73/+eT58+MU3TLmvhfD4b4B8OLOVwyZx5tK4rim2+Hs5n1nUlpcTLV6+IGlCJvHh1y7/8y78gDtwgHI4H3P1AUuV0c0dYA84ZPP/yl7/k48ePfPz4kWVZOD884JoS33VdrT9rl69K5zS7p3Na57TOaZ3TOqc9f+mcZvd0Tuuc1jmtc1rntOctnc/sns5nnc86n3U+e8589sUAU9t/9OXLl/VvrRKr3ThXjeKzUdEnPrNft5LXlJRp9FibTosYhouVuznnuLu7Y1kW5mXmeJOBUXJvSTbw2N6jDYgYIaSULMqaEut6ZjjcME4jiActIC/5f7k8UBVxAqr2/vnCMGx9Ia23phlqLT39knIbXbSE2TrCl0TVyviGYWDw3kofU8oAnnBuYF1WlqVcM4AIIWklouKAevXc8rWB4FaiWnrhphSzLnlkD/M8k1LidDoxjUOObDueUkg752IDIQTTd15Ce76AuH0v1CvZSmk3wgEYhpGwRIiBGB0prHjn8Y8OKLM5XpNy+d17j8sR7pC2dzm/HfhWBm1mLbsntzoGRb5QQNj6SVLFHEKJcd0deigYuHvv0GhR87rRSFpfnFRBUx5T44OSgT4pIa5MkxGlF0dKe1/VvOEpt5sLpfx+649c/DEl6zdb+oEWHCm+WPRZfj+fz7UsFKjgGGPkcDjUvxuwhbyRUQ6HQ7Wb1g6LXTnnmEbrd7voQlhW/v5Xf8eP//5P/OGccEl5ffcKvOPT/ZlP9wvDaKWyf/zjH3n//j33nz7xcH/P/adPuDzm29vbOv5hGLhcLrU8tSUE5xwvXrxgGAZCXPjTv/wz4fLANI0ZO5Tz+Yz3nrvbF8SYQB3TdCTMC3/64x+5nM+EEFjXlfP5zM3tLTc3N9ycTvzw8MDxcOBv/uZvPmtLXTbpnNY5rXNa57R23J3TOqc9Z+mc1jmtc1rntHbcndM6pz1X6XzW+azzWeezdtydz54nn30xwOSc7CbzlBFe//6lSPh1lgGUMjxHXNedc5fyOtV9CZz3A8pKLWndgXt9OsXn1B6SI/4WJc0cAmSgcubAm6MWsFesps/+LiJWxrau3J8fOB2PjNNgEW5VVFK9bnOGzcXbrIGndHWdSfC0aAXplFJ1jhoJ1zrcDMaRwduBe4hDtfTTVIweM6hpBoOr9SrOZr8bqIgTUqCWbu5Gp5bZ0JYikoEtr/gTdrD9bO9LRl6NTpLSRO3lGqsysTbliw0Qp2R9TEXAi6BJcc1hgQUNWxsqINHqwnmH967qxVSeyy61rk4Fu0oUeYxJtz6wVMv9vFyToJJI0fq4whY1FxGceETKtfmfSjq2QSm2cf1iM1fb+DixzqgCuMoJTQZOVv213oruUypZIPu1LeMtz7m2d1WtvWe999XmhtzrdVufkm1i+ok5w2gYhl3GU3sYaNIE3kF0OHWc/JEX0y1vbl7hbz0yTswxkIKiXomiJMl9hJeFNYSsJKnju7u7q+8f/IAOW5l96QGcUuJ8f884DJCzD6bTEeeEy+WBw+GYwfvCy5evGIYJ5xIxKjEE1nlhnmdKj9mSzSAirOvKu/fvWcqBi2H9ijV1gc5pndM6p7W66JzWOa1z2vOWzmmd0zqnbbronNY5rXPa85XOZ53POp9tuuh81vnsufLZVwNM0zSSGqO3hSmLw2ZM0jqwgaM2vzVmRQGrAvJFKcUYoECt9fxTDNCdc7lcUVBtIvUqyPZL89LNGAv22vFbpZ8kqDqcG3DOeiC6ocxPtmeVkTshJWVeFz7d3yNOON4cGZ0ddqYJxOeMAzHnKsbeGnQdafPZNWDu7tmNozisGVWJVqaUahTZCDADvSbED4h4g3Zt3oviSIiWNeTROArRKrodJidiIJ/YOWtR/DQd8IPf1lUVSfYCW8u6QNuSCZVUYkxM02glkzEBliURU7Tov2z3tCRcVdPoQIGUrAR6699q5avlMMlr0WbdiupLmbYr9pcSiKt2Wcp2WxxVLCoveeOxd4ivwXyj0QzyqpEUA+RsECsZFgRH7TvarK+NLxNlfs61VYsYoDtR0GS5DWJJCC5vlCqIo9Y29YkNwUbSsZbxAju/LsBY7ik/t0BeS13zPeM4VvtOagdchpDLUr1nWRamaeJwOPDhwwcG7xnGsZaya0qsgDKAOLx6xuR4dXzJX/u/4he/Un734Q+cLwvrEhgPQy6PDYzjyBoCSZXD6cR0OFiZ/DDw4sVLzucHlmXhMB7xuQQ6hMDpeGRZF2IIPHz8aAdhAp8+feK7X7wBgYeP90yHA5fLhfv7B17cvcL7AZHEsqyEZbED9mJkXVcOhwM3NzeM08TD+cz9wwM/vn/H4XgkrCvny+Vn29N/zdI5rXNa5zT71jmtc1rntOcvndM6p3VOs2+d0zqndU573tL5rPNZ5zP71vms89lz5rMvBpiWMNfoXiL3Iq3gYxZRDZ0CotRPikQt0UMjD8GA2YAzEmNkCTPj6BichzjjNSDrwsP790yHG4ZxsEPTcIzTiQEBN4I6RAWPWL9NpAEAcBh6eS0OnPAo9+cHi+Yd7/DTLc5PJIQUBanTzD0dEYRA0IU1XljCmePpiB9HYoJRMtBjPVWRDZyLgRfZE2Z1learHFTXkkMGI1FElTDPpJiYxON1QNShYvrEK04jMVxIyaKM0R1ZkoF2DIGhlPRpGWWqAKXRDh50zpNiIKyBkCIKDNOI894c3lvkWpLg/Jj1BGjkxd3LPFwhzZtji6RKFGX6mtTWRzxRMbsQQWVAxaEEK69V8Hmti8oELNFDBMeIGzw+fwEM+XtKibvTyQgrRbwfAYdhkKPdEKQUciTa+qcmFJxnHMaGkNVANtu0pIgTjzpHVEdqykVj4QsbaP2vlbLOn80i0cQASBLSmhAHKo4AiNv6sjoRUgJNAn5iHIxYoyqkXORb2ChnZTjn8sF7iqZAWM6WZeEGgioh2v1BA4nEIANDJlazbRimyTZLuTcnfsA5I/nlcqnZSAqEdcU5x3Q8ErL2vXOM3uPHIa9dITYlRHDaZOCIMIyelJQUA8dxQFCW8wM3hylvBh0aVpwqMSmXOTAcDjgVpgQxDPxy+iW4gf/uX/4BOZxwcSbcnzkvFxYXWH3i9u6W8TTip4FlWZHBg7Mes/+vf/tveXn3gpcvXjA4hy4RQTgON/zi7S94eHhAFjj99l/bBgrl+7ff4YeBhDLd3PDju3fM92d0DZxuDnx8/5FlsV7Qp7s79HhDPNzw9hffE2JkDSsMjttXRw4xkO4/MZ6OxPlCSI83LF0eS+e0zmmd0zqndU7rnPatSOe0zmmd0zqndU7rnPYtSOezzmedzzqfdT57/nz2xQCTlevZIq3ryvF4MoNWzGgbw9R6TwbJ5rOnzLcc2GWR45D7KJrDCdTIcYxxi2BjUcd1DbVczwCqvv3Jtwm24AJ47yzqpmolnPm5SZUQE+ICotZbVLJT2yVSyx81A6SIlS8WXZXyxjaH4tqJS+S9RNnt75qBoB3/VtJXlOjEkXIfTE0JPw4IBj5JlYKBCMQQuZzPrCFyevE6O/lWLthcWktUVS0LImuk9kFF1fQhDUSJWOaCl31ZKgbitUywiQy3evicKFt2BLr1AXVZF2VTgYDm9JH69zYDgMdRbFXrj3o4Trsx1XdfZX3UfqMiu+vbcsx847Zsdb3k0XhagN/5zlVkvr2vXFXKf0MI+MHtNgJlTDGCqgOVbc6NHVk0vqxRYdpty+byxktSwjm1/qoiuQ+rHQTn2M+r6NXl8ala9k95fumPKmIH55Xr2t6lIUfKccbeMcU6/lJOfL0m5d2VRHbZH4rk99iyJVTMlgXFpYEbf8cLAocwcXErB3/g9evXLOGBNR++mEKyklSFH374kcF500lSfvWrX5mPXWYOOvHw8MC6rsS4Mh5Hy3oYPafpyIePH7g/35NIHEeHeMc0jnzIJbDDZPeXstvT6ci7H98RlxWJienjZNysCoPjcl5YYyDFwDLPeHG8efWKLl+Xzmn2zM5pndM6p3VO65z2/KVzmj2zc1rntM5pndM6pz1v6Xxmz+x81vms81nns+fMZ18JMBkwulyGtRnIBkim5MYiPyPXDi4iaAatlJRpGkDzYogQU7Iy1WbRWodLzaJuiyxPjkMBkh0KVgxOnB2opTHU56SUcCilp+RjfezBtwWYlK6AWdld95QOyjPb73v/0/qzy2NvicYyO/bPkqyHlBLruhJi2pyFzTFpQRtzIFWL6BedxqL/qnfJ/7N7jRT2IF4ApYB8fR9Pg9ujvym1zHV7Bqg0b8rL3NDpjmzKswsotGBgxLMRfAuyBbD2a13KQB9fv1vHDKgIu+t2NtFIq5dHz3ri7yKSMwUSXqUaSmsvMca8xr7aZSHe3TubzYbqnsw0RVQjaLLniKAOFGe2pWLZH/meQmji2jWw8mIR2fckzeBexuacqwcHTtOEih2uWbKXWlJ4rA/7R5qva2Itm4Kcj4KIIiheB07uxC2BG264jzOjjNzc3KJzYFgCa7DskNGPWBVvymXHZnyvXr3i4dM9l/PFMqGWhcvlzLzMvH//jmEcECccDgfi+8Q8L7jBcG30Dp97S7tc/hxjNJzLBnQ5X0ghcPCeh0+fLHPHORyjlbHGQFTb8I2HiePh+KQtddlL57RrfXRO65zWOa1zWue05yqd06710Tmtc1rntM5pndOeo3Q+u9ZH57POZ53POp89Pz77YoAJ7HC70jMyxYRFKX1VbrHsLXr+WJ4ydOsbaH1Oy4FxKUZisn6M67oQQmSaJqbc27BEJZ1zuAysKR8eJ6X88jPvF2cliSEEYkocDxPD6Pn47qcKyt57xnGCAqAKaKJASgyxGuu6ZmMUyxwoWRHjOBo4XUWqnwL7zShb2CqfpXqNkkjJHH1ZFgTsMLpdhN6cvaxISvkgPs92UKJQHceuk3ptcTg/mp7K33CbwznnCsTjnFDOjrvuX7ksS+4nGXnx4kXOFom78bYOee2kNl4DoRY8AVIeE7KRuq3dthkooNK+M4SQScPtDo5s10TVshjKvWZr/hH4XwNKJdgrM38K7De/+by0zy9gGrNv+HJwIqnOx4mNL8aI9x5/rYe8UO3mSGqmAblcGLzAvC4gK+ICKSkxOaJCLLpWYctUMPJPbESBCEPeGKaUcONoG45gG6qWdNd1OzRzWRaGycqB13XdrQNQ7a8+AyFofHIzUfSbUiKmxBpXmDwex6iOabhhkAM4z29uf8P5vDKHmaCRu1cv4cEhZ8/3r9/y448/cn9/z7quzJdLJal3P/3Iqxev+O71W87vH/BemA4Tty/v+PGnH3HecTwemY4H65l8PHL74oaH5cy6rtzc3PDy5UvO92fCGvjVL35JCJH7T/f807//R+5Ot7x685bvXr/mH/7hH0jLivOOgwhvX71mDYHf/f5fGMeR87Ly8af3X7SpLpt0Tuuc1jmtc1rntM5p34p0Tuuc1jmtc1rntM5p34J0Put81vms81nns+fNZ1+uYELAecQPjMPIugQkKdPkq3G1BvQ5+70GN1XNoB4zuI6ommN55xFNzPPMssy2wNmJy+KXhS/PM2tVA4DPiDhHDCvLsnA8HFjXmYfzvZWfsjkkYmWB4h0aUz1Ejew0BQwqOGB9Oj8X/b2e/+c0rSo7/bWGm5LinPWUXdeVQyY955xVEQrmdBqbHA84nU4gprc2OhtCMIfIYy9OGWNEByM26yub8M76nxZgMODxeXypgnyJDJfocZFlWTaS+KoeTLz3hKSNXRm9qCox2eGAUoDGbXbXvqMAS5EQAqpa+6Nev6/NOrBNSOBwOOyA43ORf1tB6lif/LwF7q/IlsWwXbuugbgu5itY9o0RWSbdXOos+R2F2EoJc9Hh9VhTLFkkAnhiXHFxRTVgHYZtQ+CBJGVzYE8qYJ9SpFidEyu9LOBegLhs0srv4zgauA/DdjCeJlRhmqZqs/M815JW2Na1Xedr3RY7F8n9kWOEGEl4EiOTOyFhxqeRXx6+43cf/pkpDhyHIw8PDzj13B5vcUkY/cjt6Zbx5cB0mLjMFz58+sDr169Jmng43+O8Y5gm1AniYZxGlnXhp/fvCGo9TJ0X7u7umN8vzMvMTz/8CKqMw8DpcMJ7zzKvaFJubm44Ho8kjfz44w/89Kc/Mh0P3L54wS+//w4ViJr4u/FvePfuHeIc42H6ql116ZzWOa1zWue0zmmd074d6ZzWOa1zWue0zmmd074F6XzW+azzWeezzmfPn88eN4RsxCKWeanFYYVejcGLFNNig5erZzxh2Kpae5xuzin5EVsU2ADO4f1wFTW38Ww/73sk2vAyeJOjshmoQwgbULegLU1E+Yl5WERfq9GO45gBbhuTa8GQfSli+fujKHHV53UmwRaBLvoq0X4R2Wcq6O4bihGCiOycRJPu9FTnpZuOCninFGifWuaqxRYUUtrIr51X0UOJAl/PtY2wX4+Dqr/tWeXzQkgbtytCPtQtg0ZLwq3On9ogtFHoVtft718D5s9lFzwlP5foWv2YKW8+IWI9ecvYaXzS7Fgejaf1V3n0nlp4TL4dUKxUVXFiiTVAPeiwtfE6r3b+je6KfZTrntLVdt32+/XGoF1Du/+xPq8JubXHknyS/8HhmWTk5fiCV+MdL/0tRzni08DAxORGJNlBf4KAwnE6Mo0GqDFGLpezZRmEBcUOQQQrtUaEmAIhBkJYa5bR4K0UfL5cGMeRQ86UStEOwkwpMY0TY94sXeYLawxofq4fvGFazrhyInjnmIavFqN2oXPa9Zg7p3VOu5bOaZ3TOqc9H+mcth9z57TOadfSOa1zWue05yGdz/Zj7nzW+exaOp91PnsOfPbFT70fgS3S6DLwp6QMQ1Fy65hPG3pZ4Poz2OF9N6ddFDoGCDGiy2KG6wSHcDhMlMP5ysKV6L/3RakJyQZQnMOAAGDrsRhCwB1POVopiKYaER0GbzauiZSwA/3yuNtI8ziOiHh7typRY+03uo1Pnowut3ooZaMpbRC9EeYeBFM+eLCQ3O6ZzcIbKCgPDw9GSllvxVFKb9TWOQtIOO9QTcQY6sFoKdlBZZM72GGESRDviHGt+imgU95T7pvnmdPptCOra10UPdSfG991IlaCnAklxYRi/SNlMz+WZeVy/4nT6VQJuB7YxxZtvgZ+Va0RbbAS2XLt1rPzSZPe2XRKycDAOa5vaEmnfG9/vib+8nuboVEOeSw6XkNANWY7LBunsq5XesXsob7TbVkZdTw463mqjnGwzAQ04TyoCJqEGPMcd5si8zfvPUkT5NLgZVlAhHEY0Lj11G37opaNiKqVqcYYGd2WQVD8bRw3DCprZOOGIW8AyzOd23qrtjblB4/6BMkh6oiqDH7EycAwDfzNy7/GXYTlvJA8LHIhsEACDYlwCTyEe/7qV7/Ee8f9p3vev39PDBFNidvpVLM1vHjEO/zgmaaR27sbfvqpZEXBNI7MzrOuK29evSaEaPZ7PjPPMzFEpsEyt8K6sKSAPx2Ybk+MxwPn+cKyLCzLwsePHxHnGICwrE8bapeddE7rnNY57UmT7pzWOa1z2jOUzmmd0zqnPWnSndM6p3VOe2bS+azzWeezJ02681nns2fFZ18MMA2jleoVIC09KlElpgK1infucxi/M/gS8dVkh2u1iyhiBqPJ8/7+PkfURu5u72izBuritYiQ351SRNyQ+5PmKLZYrPT+0ydAOZ1O+MEzz2cu84Wbw1T7S4Zk5FKyAGKMeNdEkzMOFyMv4BjDPkKvqiUkuwO2r0lrsBatt+jxMHjmObAuCy9ubxnkKquikBtWmthmTLRk45yH5t72GfZ+60GbkjnL4TDmUlgHOIb8vBACMdiauQa4WrkG251u2rHne0sJopN8SFyJKtuFNQpu24mNnFBqSePpdHr0jrqOTQ/OomNgByoFKESklk9uxLv1jK26bdfLOcRZX9D/FBHZsj/qPBr93tzcsL6fiUk5Hu2ANc1R+JQUJ1qXOCWzB2SzSZftqn6eEj73NA22yyHGhYRj8AdIgBbCe/rgQOdctbuykTPf8CipzsdsuQC1ZeQUnU7TVDNg1nWtWSHjOFbbKPouaxljrITh3FaC3epPkyJJcCq5L2piHAbCGkgp4lG+m14hDtQJ6f6fWE4Ts7/n3/4//u+8/dWvePPqNcebA2GJLJcFEeH8cEaxA/Jwws3phDhhXhfWy4ofPC9OLxkGz69/81fEmPh//3f/H968es3oB96+fsMf//jHnMfh+MOyslwWNCm3t7e8f/eOqIGgAX8YCBL5dPnEH//0h4q7Zf3u7u44nnrrhZ8jndM6p3VO65zWOa1z2rcindM6p3VO65zWOa1z2rcgnc86n3U+63zW+ez589nPrtctWQQZ6ZqDu8RO6dKnkb46JJtTCRYhrI6bI6EpBmIMFQCc85UMivEUI6GCUxs1fRyxL/eW6OLgBwTrkZji/gA4AwIQ2Tt4AZMC8mVsBThKVLqOLd9UCEo/o5vHutp82p5Tnp+BzW0H3m1vyWPKv4hIBV/ntgPKyrhcJtxrkTy3FKMdYJhfUECs6icpMSTIGSTeyw6wW/0/1glVV2V85bOyPlIiw83ACthv88wA2wB3a2dl3GUc7aF719eWctqyluXna4La62o/5zou5Mm1/nPI/tH1auBbbVwVPwxP8PWe3Db9N7ZcDEz2flK1nd+jJGKKTKIGfkksL0ZsA+K9y3Zp7zVCscfGGCFn2Gh6XL5bfm8zI2oWgMYK5NfzaEEeqD2KN3/cMgoq4TQbBq8DiUSSSHCJ5BKWKqDc+InkXhBc4n/+8EfO95+4lwdubl5wnA5Mw8g0TJxnOyjPO2EcPUnt3dM01vJUFx3jNLKGhYeHGechJMtOubmxjci6rszzbOQWDfvCuprNCSzzzOFwYJABpyvLujBOE9M0kVLuIx0jaV0ZpxE3eNLXdtpdHknntM5pndO2temc1jmtc9rzls5pndM6p21r0zmtc1rntOcrnc86n3U+29am81nns+fEZ18MMLUO4VxzSFktG20gp6D2lbQOVMoAnXMMw0DUzelDsKhgXFeGcUSDr9HomMoLypjM4GJK1uPQ5WhmNlbN8eYcGCWxOYrzxSBsuNVwUJzzGVQzpRVnVovM26F/BsDeeyOMlLBDwhIiBqwpR3b3/vrlhai8mX+wA9RsrOtqZWhtn89CuBViitEj4AoI++qUkKP+rdM3c1S1d66pRPR9dSznh7pOqkoIMZNILmu8spnyvXXi9m+tbbSA61wumcy2ZbOUHcFKGXrGL81AWJ5X3rOtg9aD88r7CxCU97ZZBi0BbZsZbdZlvzHIP1BBXr60zjt6/uxVBbTKlTYfQcQOAhzHgZRsflaq3eB3Q5yaDbk+T5oNWTWDFugTTobsA8GeSevllolQdCuSNyEN0IcQLLMIK+GujkbzvoZsiz3YJsKuKRkG15uo8lkIwUpjVeralXVZ17XakqrmOL3D62BEIkqQhAyKJBBVjm7COY8bJ07xyO8fFt6HD3z33S+ZTiPemS9cLhfWuOJEmMaRqPkgwXEAEZTsL4Nj+Tjz8eNH/Jh15YTXr18xn2eWJXA+X3j54gVhjaS0oDExTUcE4fzwwIuXLxEvEIVxOTAdDhwOB8Z8kOG6LOgZxnHCDQNr2PoOd/m8dE7rnNY5jc5pndM6p30j0jmtc1rnNDqndU7rnPYNSOezzmedz+h81vns2fPZFwNMQROTH3Dec7nMHA8nXAbYmL1NRBnxn32Gc66Wn53PZ+7u7jhME5oXxTnHOI6kKCzzhWU+892rOz4uF1QTawwgQ+URA2QzvxQj6hKQI+wqRFVSNlKzXYuK3t7dgSbWdWUcvCltHHHjgZgSRPCjJ6Y1G7PHOXBixrYsCze3x2pYVkIXCWvMBgzFootT/bwIcrlmb9Ag2XmFZVlw3nE4HFhiZHSewTljuPxeyXkUcQ2s80wIgbu7O6bJ5icZjJDSu3N78waQ1vfSANzKNqeDZxgn1mBR3qQpg82Ic1DKHuu885qXctkCRs5Zj8zrv5WvtowRBceGDwLWk7MFqgwglt8ij0CjjUaXLAVV3fXKLJ+X94YQ7GexklNl01ksz86bFOv36Wppc8q2+eRuZycF5L9sG7tMiBCYBs/o83vZ5ufEyodVEipaybztm1v0a6BeXr2939bQMnGs1FXRFFmWM0EjMXrWYCvihgQyVKIXZ9WsojbWZVm4HQ+gW0S/vLuU/xY7aUtVC4gqcDweKxFYpH6q63i5XOpn3g31GaVMtSWGYbCy9cENpMXK6xNKlIXJWxYMAYgwJM/JOf7mxa/553f/yIc//sSv//f/DUEC5/nMP/3+d9y+ukFFWUPi4eHM8XhknEZ+/8MfGIeJw3Tg9vaWmMlgHEfu7+958/YNp9sTy7IwjUcGNzH6CYnCcTxymm5YloXL+Ywg/OIXv+TDxw98unzi0+Uj//rf/Bvmy4UPnz4QY+D25o4XL1/y3fff84//+B/4dP/wFZvrUqRzWue0zmmd0zqndU77VqRzWue0zmmd0zqndU77FqTzWeezzmedzzqfPX8++2KAyedVVBKD89Zz0WVjKhFqVWIMFtEHkia8G1AMEJIm1rASYuB4OuIHhwqk/AxEUI3EuDJ6i+bN53MG1AHF4XIPVsmGXaK8y7IwTgdACWHNkWhzT7UmnmhSwhI4jgdojP6yLIQQ8eMNiEec5MwCZwCbEkJCo6Ix4F0uT8WyDUApkUxVCBkIAdzgUYS0w+0Kq/lbjnQ6wTlI2VCz621OHqM5q3gEG5vKQMRKVp0rTmalvk4U74tjY/eS781adPk5NgcrS0054g4OVSElV3vfJt2yCMg6UlVStOlY1kOTPeGsFHhZZ5wXcIqSdlH6HZDlyO92wJ3p1kBWceJIKjhv0dqaLWHHCzI4X3vrtqWwBdRLuTMYgI6Dy+NVhsGi8zZCe1eNxrfkjYF8IvcQzdcZGPnCB4BWLtrLlj2gWtbZdI2WLATJutRKoM5Z6ahqAf3VSomd4+Z0Q1TzORXMhkWB7UDFkn1B7t2aVI20csaDF+sTWmYZXbIIuxNwEaem8UE8MYJqZA0pl5rb2AWQErV3Hg2hmvk4jSiRGNNmc3k+5hZSN4OCyxuRbG9JWeLK8Xg0QggRYcseML8LlTwqAXhf7UCdlXhHZ3Mesfn6ZCTt8voLcAB+eXzF37/6DUHv+X56w3949zselgdOd0d0cCzrynlZeHn7xnSkihs9y7oQYkCTss4LIvDi5gXn5Z51XeDByOh0OuHdwChT7fcqIqxL4HS8zXarnG5uSJI4r2cG77lfF+4fPnE6HtEUSTGQosepcJpO3NzcPWV0Xa6kc1rntM5pndM6p3VO+1akc1rntM5pndM6p3VO+xak81nns85nnc86nz1/PvtigMmJA1U0kQ/USznSnAEH8iFbEfGuLs4eqFZiDIAyTWMFDItoCmDlbiEsCAZSaV1xbqgZA9KUyVqZqkBMhLgdqBczSLkCnJDL6JQSPpVsWDEpMUXWGElY79AyFuckG63FhkvkXEQyybj8LjZHykZdDA1nDtACfS0RLdfW+ZPHtRl+AYUSZS2luWXuigGfOVbRt4E2gNv1ybRx11fr9g5pL1MlrKtdpVY26MTnn7U6lggG3uWtSu5JmkG4zs/+EyfZVjagb3VXHLQQQJvV0GgnO5XNeQdgbNHycgBeiaKXstRroHdui277YSBGA3obvH1pnkNdrQKkqtsBhJKzG5yV2AKIJlr7b9RreqgAp9uyNfYJm67J+rRNU46Cx0BMCT94nPO5R6/Zr7htXYo+y5OdM31I3pyVe7zzkJTS/jehICmDeAQSzhnYp2Sfa0qk5BDx1aZQG6sAKYZsKJI3AtT5UG1Eqh08sol86KUqxBCt72kynLD12w7ba4m9rC9QbQAREokotoYeQZKQ+TBvWLPfJOHleMuvX/6K5BcYBv6Q/sCgjsPpyENczFbFMY0HNCVCXA1/NJFi4pwuxCVwczpxe3PDmmbbDIfAMs8MtwODm9AE03iom6YYopGnOC6XC6ebI+M02YGKmmzdo5UBx7iyAsNgZDv6iZvDDV2+Lp3TOqd1TitK65zWOa1z2nOXzmmd0zqnFaV1Tuuc1jntOUvns85nnc+K0jqfdT57vnz2xQDTtbRlZm1kuUbyoH5Wvs/zXJ1wuzZadBrQPIn7+3sGEQbvOd7cElcD/lI6eP3+NnpYvuzgNHO6YRi5vz8TY+J0vKWYWRmXPdcKHSs67aQAuEV1bYHtunJAWymzLM80EMYMMz/j50qNoDf6K+8qQFh7RCbFigvJRLb1GFU1UNj3Tc2wlQFFSZAyEIMZbAj1ALDifGU+UscooJ5BBMFXvZs/FYIqB+d5TqeT6UdAciZAWbu2RLU9DK9qXzY8ggKepSxWmi8l6faMlKwc+XQ6VX0WAii6NXDY3hUzMI2Hg21uMEBzTkgxgaodpta+2RmQ2bq7jXh0b5tlvuX7RmaltHYjvM1cUp1/jMHWV5U1l3QWAK3ls3kMrhCJWjaLLZ5YlkG+rvUBcZZjkjCbQkrJswF/DAHxEcsEiCh5npkondsye5wrkf2FGD3iHc57lmXGezs0M4bU6MtVO/DeM89zBebrzIAC6MUfhmFo1lEbHUj1o9Bk5qS0v+b6+SUbAeD2eMPfHX/Ld9+94XfnfyG+/Ve8T2/45B/4px9+x80w8IvvX/Phpw95pRKXeGFyB0SEZV64mU68ePmCN29f8bB84nA6gMBPP75j+qsBL4737z/x4oWB8zwvfPz4vpbyfvjwge++/57pNPH27Vvu7+9JqpxOJ47HIx/evUdk5ubmDu+MqKyEucufK53TOqd1Tuuc1jmtc9q3Ip3TOqd1Tuuc1jmtc9q3IJ3POp91Put81vns+fHZzwowlQUpACci1XlqJC9fWxy29CcEGMeRaZooPShFBA2BwQkxlP6kA+RnTdPEJaz2jKiMwwbS7ZhUr/+Kxa91W7yyuAWInHOczw/m2OO4OSj5HSk1BpEqEBUDSSkxz3O9t+ijGJiQo632yc9Rb5UW4FuZpqkSpRl8A+KyRdc1j7E4g3NWnpqafqUiRW82VifCHNZc6mvAKq6N6Oeels7t5rMjJtcSjT1/GDx3d3dVvwY2m77a7yml6rzl2SHkUsO8xCWroo2QqxqxadqIvnxd90VtDyxsAXb72VUbseQTsyP7u+KQqkfvnB0ul2G/2pi98UkdVT2KIJIzBLRsEsq62KGN5W/kcu8yD5ffMwwjwzBa6XjVZdFHBrBkJeFAJS/KOIetjDU2PowqOMGp4lRZ44yoRxxMw0RIgub+r3ZPIqSE4J7cWJQ1Kr6YItzc3OQMiFQBGKz/aUwpZyZt9vDU5rLgUDl4r0jxkbKRs82cZTt55yuJlLUp421LnAkJWQU3O+SDMunIGDyX8yfe3L5gZeX87iOEBRk8wyDcDndmn2u0wwZvHWtYef/+A+Mw8XD/QIgrr16+ZJwGwhJ5OH/iH/79PXe3t5xubvBeePvda8ZhBEmMo2cYPeNx4t//9/9f/HFiOh05n89VNx8+fOD29hbvJ8ZhosvPl85pndM6p3VO65zWOe1bkc5pndM6p3VO65zWOe1bkM5nnc86n3U+63z2fPnsiwGmFniuQejaWU1/Wo07xsi6rhVwilRDREELIdh1KVhPVecteyBflcGM6vQFqMr7WyDPl7OuIf/dbe/MU1jXwDCNDNkwzAGaF+TnpmRGV0rkyrxCCLVvZzuGbZJf1PljPZdB17nt9dySp41j0700eijrUIjZ/iaNo1T3a54PKUVUN+cS5ApQDQRE9u/avhcnZOeUJSvBMgn0SbW0Ed12ziV7YwP6SOm3WsBV1bIhtIJKWa89QLa6K/dVoEulhLboyvRT3u0FRLa+rSC5NPeKynX75fFc9hkvm826ZpNRxlzGXd7ZEFbOJhnHAT8MBqeNvbRAn7DepHlxNlvR3BU3vzjFRNSyWbNJCJl4woqmBfGew+RI0XzSSmJd1lN+b9442NTtj5bVUjJurJdyyYyx5dk2T6rWs9RdbeBEtrJiYLeG+7nrDm+2DWhZqz1ZlHUoz9synSKjTJzcDb969dekB2EOAc7CdJoYpoHxMPLuPiCjA+9wIW9yxDJ4Yowsy0xKEbywLitLWDidbgjryrIsLMvCMHhCXAlh4XRzRARisrJ+q6W1cR6OB9yY+6aqHVI6eM/tq1vSivVWHbZy7C6fl85pndPqmDqndU7rnNY57ZlL57TOaXVMndM6p3VO65z2jKXzWeezOqbOZ53POp89Wz77s1rkFYW2wFZ+1qJYkZpFMM8zb9++RVUrOKYYURRP6e24sq4rtzcnM3FVnPMgPk+2RLCvYEL3IC9iBlfGZYs+4WQ7eKsA0rouDNOA90M1zHpv85oCGKqK884OKstAX8rKirTOvUHPnyP7OZZnbweL2bsPhwNJhdIr1F6bXTST3Q7orwiseLh9JA3QWwZHjPv1rS5cotaygURLrq0eCqiOo9/GVACAzYZa8LueN1iP3SIpxRyJhwIyNnYDxwIIZc1KhLkF+jIvi1S7+rP3o9ncjjAyMDiHFyGkhMYE4qCYZlan6H5uRVqCbklTM2kOw9bjV3NkvnxWNigle6GUdI7jyDhNeD80JdFFdw3Qq+JzJkkBNEVzn1/swMtkIB9SAk2IS/h8EKNTRWMi4XAMDF4ISt14eS/VllLK5dO6KUVROxRujZxubpimifmyMs9z9jXH4XCs2JBSYpwmXHNQXtFXyeQRkdrXtgX28oxlWbi9va0ZBmXeKWvJNZuzYh/TZBF4730tBRc8h+GG3/zy75F/nljWyBT+GVblcHfg5hc33P/uAQZBvSNdZrzzuMEhR2FZFtZ1xXvHzYsbwhoJa8CL5/7+nvlyYV1nXr78DhGY5wuvX7/i4eGBy+XCPF843hzNnpeFX/zmNyzLzLoueOf5+KcfYDrw13/91/z+n/+Iw9d5dPnzpXNa57TOaZ3TOqd1TvtWpHNa57TOaZ3TOqd1TvsWpPNZ57POZ53POp89Lz772QGmopineg1mM6uLcz6fAbi7u7Nlly1S552BqabEMs/EGJhGz6dPHzkejxyOR+vrJ4K40q+0GLA5pZql1KhdeceQy7xSKmB8wA8eTVKdzTnhcDjkeWQHVDWHzUCmUEEm5TLI0+kGn6OU5/O5PqOASjGqEAJJAHHs1PMFUVXQtAOJy+ViC5Tf2YJjRVpyX1ZNCKXEMzLPl61UtZKglWCKagY3K8E8ny+A4hyoRoZhxMl2kJtNxcpg2+mUMZUorb3L9DaOI85t76UhvrIObVmhRZq3zIWSBVDeUT4rIF5AT1X58af3OCe8ePGCm5sbYozM88w8zwzDUImyOLiqVpLe97SVOsZia6OfQO1gRspcU8y8WaLzwrbQmw+0vlHePwxDE0m3DJVStmrjKJH1XMqKgdlxnFBVLpcLl8tlAyzxmfI2O3W5pDgGCLm013lP0GQl1M0iinPWs1S19tT1kjMLwspPf3rH6cUvOfiRh/NHcCfEj1aqGgv5WvS8qCFFJcbFyk5RDqcb5nkmxsg0Hhv72ezTe8+yLKb/Rn/lq7XlbX1sY1L8YhzHCvqXy4VxHK8OX6Ru1ErJcwv6qjaPEBIRIYrw7t09b1/8iul4w3h74p/u/5Ef3v/Av/vDPzAfI/444J3j9vSCGAPOCcfXR3569yMxBDTb1ul4wuP48KePvOcnQlg5n8/86U9/qrbsnOPFixe8efOGly9fogLzuvDx40e8d4S4klLk5d0L3OHAovC73/2Odz984DAec2ZGlz9HOqd1Tuuc1jmtc1rntG9FOqd1Tuuc1jmtc1rntG9BOp91Put81vms89nz5LM/q4LpWtH7zxxgfUJhi8xdi2GqIiTCutTo8M3NTXXKHBtGpRTAlXubaG0FvkRKW6lqDJEQ17qI1+Mu5ae7v7NBZxmfOc/+uoIErZE8ml/zwD0Rfl6X19JGUst7WsMvtxQQlfyzUiLz7WdqpYXZECyynOeoW4S+Hdf2HkWT9QrdDqyzyW3jlobQWtBmA3rJ+hW300kLiuX59W8FwTSP1+Wxg+WgZMRyzu4tpF+AoRBCSxZAvaYA49bD0+a8rhFk65nqsg5TTKQYbQQK4gebk6plEgj1Gdf6vNbt9vdiz42u8vqkSvwZ9NvxukwwYNkdupFFXTu3rVHUVMddjSA/QZzHeVCxzReScJJwTjlMA5pW4jpzunWoCEmyfdrbd0Rl6wEpbZkLRecA00gGYyOmEEIF8HEcay9foH5W1qcQ5DiORgoNYbc6LTZUS0+LT1BKaaUSQrHBYhvrurLGlLHH4VSQKBw48KsXf8U5PvBwfmB+tzD98ohXD0mY5zPLOiNOcG4b+zSNaFKmYcIfPetlsU0gcDrZ5iSEwLqunM8XjscbQJjnhWEcidHw9OXLO8ZpAJTz+YzzDlS4XC7c3Nww+J4V9x8rndPsW+e0zmmd06BzWue05y6d0+xb57TOaZ3ToHNa57TnLJ3P7Fvns85nnc+g89nz4bM/u0VekdZJ7QdAqeVjxfmuSUGy4kUTMQY0JZz33Nzc1EhxUit7UwUnj99bFy47bOnRWaL6ViY2PmkA6eogtg28NnCsDlIjzVvfTfv9qk/ofw4RqYDdRtKfIiwykEv52f6QS/K2ubTALDmDQ8p8TGFok8Fg3zNCl2ckI2V1lnlgAKvVwexru3cDNtkBQJ3jFdC3hFbWvzyzjkmKk9rYbcb54Lu8oWizSiwivGV3FGdu31fAanRuB5IxRpz3eOeIMVXdah4bKpl4tEXoCrztvNr1a/W1reW2xai9XzMx2zPSRphsAObElT8Wt7tWNM67WsqaVK1/69U6KCDqCu4jEhEJKAb2x8PAHAJxnRkHR6CQf3nOBp7VD6RsTrLeGp2X8ZcNRgHvkmWRGt21awnUDIBxHGtJci0tfWqj0PiKiJAaQigkXvys3LssCxFBnWVGeHFIEAYGvju+4eH2F3z89JFjPHBwJxQlRCvHDWsAgXmwsQ6D53CYSGtiHCYmDz45IoE0jqSkvHjxivly4V4fWJbAuqyowjwviPOkuB08OAweRfnw7j2DHxBxLMvCd69eIlgGRJf/NOmc1jmtc1rntM5pndO+Femc1jmtc1rntM5pndO+Bel81vms81nns85nz4PPvhhg+my0/MpoAcK61sU5HA5Pgny5lxgJy2xOlsu0Bu8JzUJv9+7RokTZy2PLdeakqRrBNPlcAuarYcYYCXHZ3VcN0v64AShGWk4E65PJzpnL3AtA/eeQYuAhhF2ZZXlfiVLb3DfHlYasCtiO47ARXP4sQ0rWY0RTzAenGbmGANNkh8nVdzqHVDvIAFwBXuryFCDdSHTTU11FkZ1NtaWirZ20wHBNBsWuih0UJwCL4LY2WA4PbJ26XGeDa8ugIynFTA6bbaeUIG8gYgiIOJzfgK21znYj0BJKm5nRfrb5T2ruN23FGEjJSirFOZKGCnTObUQcywYmKVu5qoI4i1orlB2TKNRQdiNJBRCcwOAcy3phmR+IQTMRJNZ1Rv0I4nK2QOuL0gDzgPNmQyn3MS4re7lcmKap+lMB3LVgx9WmRnXrodxuBE6n0zb2ZtNW7inPdM7hh4Hb05H7h0slhHL9NRGrKuM0EBWUxIRjZCIlT7jA/+Gv/3f89pd/w7/5u7/jv//h3/H7j3/gp+U9r1++Rp2y6srD8gk/OvzocYNHgzL6gcMw8ebFGz48fGDNpbGC53S643S649OnT8QI3gtvXn/PEhYmEd5Mb1iWhR9/+sR8ufD2zRsO04EUE+/ffyQm69F7uSyP1rXLY+mc1jmtc1rntM5pndO+Femc1jmtc1rntM5pndO+Bel81vms81nns85nz5/PvhhgSioNOGyOKI3DbgtijnJzc7M9fBhY15XyiGJiMSXuz2eOx0OOyjsu87wjjx09CLVUT2QP7haBFUqE28Y21EW0krlyjxme9UncgLCItt+zwTjvoUTeW+dnI8IKHP8poloNOaVUybLouAUem8d2X+WrHJ1NKeH9lK8NIENdABFAA5oCKUTCamV/3jmm6YA4j2KRVxXwQyZLyUTXkAAV6JVcMPtoA5CXr+qr1XkL4m1/yqQhf5Yq+JZ3FptSTcSYsCySDfiL/m5ubjaiK2uZiWJZFpzzONmAOKmyxsA0HikH3dnYN6AqoFtLsJuNgeb/TEV70Gl9heZZ+SHoztjzdZlgBJujEokpMLqJpGC9c7N2y/dMEqpSD+kD8x3P5reFkIrN2bjsOUmV+TLz6cNH5uA53t4yTAPrcsYfDnZIH776IjTZLio48dhhiPbvnDM6SsZHCBE7aHCkHPKoIozTZLarW2ZBS+hl7Z1zef21EkDRa+mBOuVnhRCIKRKS9UJtS5rb9SjP8N4jCqIJTYk1JQ7+wOg8aYb1w8w4CL+cvuPD9J6fHn7k/OM9c1Beff+S43QkaWScRmIMnO8fePvqLct55nw5Mw4TNzd3limVIsuycH9vB+ydTkfevXsPwHfffc/heCAtiU/vP5I0cpwmXtzeoqp8/PSuowwTAACMkElEQVQJJ47vvv+emCIPDw98+OkdXb4undM6p3VO65zWOa1z2rcindM6p3VO65zWOa1z2rcgnc86n3U+63zW+ez589nTqQJZkuaepM0XNOV2hQZqNC87gZIdIEeczfzMFFMipkRUi3aK84h3FhHTjVAEwxEnIBXIFE22CKiNCNX6s0XNc/RbfAUlBOsfKBBCtGin8/aBbAAoUO+p2QpiRKRQnWcPZLZoBnJQSVH3GRH7qPjVz/muAlIiVloqzfjLd21+bRFCROozSrkl+W9FR2gCtcP5NAZiXEkaEayUcRgGI7VMpuJdBXSF7aA2EYM1YfsC68tZFVmGqBsOFZ7IXylZKaiVAqeqQ9VNf7ubtdhdhjWN+WsDg/LOnR3XiPf2TCnrqJt+yjPMTvPzGuAuRLTdR3N/Q4RlkuLqaKWUl9J+ti1hnWvZEGk2TVvArG9FHLlHcC7npmxMmmc3C1B10ahEkN27nFj5q2DlrTEmYjC78N4xDEJKATA7L+/NpgAiiHjEeSADfXKQpII5anosGQ9GSJttiDNnl6Jvl8urAdWEuC0LYO8/jZ2p7taJvI4pRqTpw1tJXKTaBmTAp2CVoikgksuFnZDWhFsdN3riF+P3/GL8njf+DT5NDDoxcuAwHJmGA4MzHFzXhSV/lQM8Ueuti0KKkXVd8M6TYs6qCAEQHIJHkKR45xnH0TaZAuIdwzQSNRLSyppWunxdOqfROa1zWue0zmmd074R6ZxG57TOaZ3TOqd1TvsGpPMZnc86n3U+63z27PnsixVMOf64WV6r2EhF4GVeGMcDfhhIMasqQVwDg3NY30pz4vmysK4Lh9MN6jzqPH5wxiqC9fMUq1oTwDuQCLom4hKQIZtySmiw350TPI4QI6oCbkD8QExKElPuOIzExTIYXr58aYaJq4RiRmCGktRKAJMxBG4YSFi5YkqRcRyysRmozvPM6XSqUVojCM1KekqkclBrsCFFYopM04Rih6TVTAjUIsK5/I1o9zmXgVgtAhtTZAkLQy5VRSGumg+Ny8cYaiCGmbAueKw3peS5q8vOKDCOEzEqKQZwpi0RK0OMO1SifnciGffy5ynVw+GUiPXPVCAR4mLR3RyZN+zL0XdA1O4bZATMKZfzivO5HDSthHWxYRyklvaqWl/LabJDyMrhZ6pqfXKdw4nDZ7AtmSGTH5BCCDabqhPxzkp4FWLMPTQll39qQrz5SsnyqBsc32wMVNl2LYpqQBWsxDQZ6Joh4EVI4pCk1TdkFBhMT5pBQjLAJwQnA5VYss0Ukkpxs8VKZgqT8yQZs69H4vyAc57T6QZkYBgdzikqkWGAKKBRMhbYu6ImHAOiHlWHZfYoSSOOhDPLQSO2wRJH1FygK4I4WOJshONd3qyEahMpJbzzeO+sjHs0ELWshC0z5Xg8AlsJtPc+g6KRT13/KzspxFB6q3px+EwyQWwc/pSzJ5LAWfjXx7/n1d++4e++/1/xf53/LUuaSUtgOAx8OP/AwU8c7w78j//jv+N0PHJzc8MSZ84fZ+Z5Zp4vfPfddxynEccNx2lEXtrheymszPcPDIPjN9/9knefPjKHhY8fPoETXr19ix88Hz9+NCs9Oe6GF5/Bmi6tdE7rnNY5rXNa57TOad+KdE7rnNY5rXNa57TOad+CdD7rfNb5rPNZ57Pnz2dfDDB9SVRLGVk+DGoc8G4AtUgkFcCK0Vkp2bquxJQYctlY6VeaNOSryD0nZVtEefr9ZdGAXMJ2wfmRoUYcNyAtzjwMA9M0bdkQ8vjhJRovInWMa4i7SH+RMs7thRnkkoLT3XVfknLwWBljO/723i26/oTUuTYlkmrAGzUa0EsihkDIX85bpoQTi1gi1hNT2znaBGpZsYjsPv+58nRWBXWd276WXF3TZpkAVkq4rhwOB8bJ+p4ui/WDbJ22ZlZkoI8h4J2vGQspJdZgazpMdpBbyQYoUxSRrXQyms2Xclb72dt4v6CTdh7XmQ5PyboG0IQTYVkWQrTyXbMNl8mhZGBkDxNv64OR0+d0XceUv1Kyd6GJ8/nM+eHMfJl5+90r3HRAnWMOK6pW8jm4kVii4giuFkJa1L2UlUNinEa8n/DONh7TOIIISQXnRnKOBFAOftz8soy5LUMWEdZ1tZ9pSDVf3/YPrnpPiRBLn1vH/f090zThvc+9ZbcsA7M9QVtbU+vJWnCjZCCcxgN/NX7P/+Xm/8h/+MM/8eP9j/zphx/4xcvvOacznx7u+V//6/8t5+XMw+XC//SP/8zbu9ccxxM30w0ffvhASBEV4eWLkWW+J4bA7e0LhsER15WP9w9MhwPg8Cky3RwZ3UQMiTgnVCPjMOxaBHT5j5POadT7O6f9POmc1jmtc1rntL9U6ZxGvb9z2s+Tzmmd0zqndU77S5TOZ9T7O5/9POl81vms89l/GT77jw4wWbTOsgPGcUSkVbrBe1Ge5sValsUASJxFBp3f+kRmNC8LWg480xKwfEIKMJSfCymUhWhBuO2puPWh5LFR5O8FHOxz2RleLYNTrX/fOZQqKglNVyTA5wG/PXiuve7ngEI7/jLnOt6iQM1ZGjn7QVWrzu3wtqyzz7xPMvhtIChb6erPGRt7gmpLBK/XoAA9FJ2WzyyyXK4vZOwy+ZSetG2/y5Yo68bD7W1NUwKf+7AmdmMqQ67rbKqqJb1JTafOZXK6mk9rN+1afnZd80ahZhY4x7wslhXgrPS1kL33nhhaPdpYyGBPM++n3qdlXfJvToR1DcSUEO/w44h4RyqbtRgBm2vJB9keZIRjtpcQSRsJtZuTrCNbl3K7lXu7nLnRgq6qftZfW2n9sOCC5nfRkARZp5/bdNS1EerBjtfrWJ4zyMCNHJmcx51W7vTIpAM/PPzEHGd8HDje3qDeE7wjLO/w4vHiQSGGVHvrns8PpHL4pcC8zKRgNhyTIpIzKdbIMOTxRmWdV5JP6PrzcaLL09I5rXNa57TOaduDOqd1Tnve0jmtc1rntM5p24M6p3VOe77S+azzWeezzmfbgzqf/SXz2Z8dYLJFUdZgJX/jOFjUVjdnFsllemJlj2iqQO+9x3mP9+MGmMnKGVFQFWJM9rkIpDamdzWWxoFCCIhQnT6l1PRO3PqMwn6hr5/VXttGJZ1zhHxPuf8aoAp4mYFJBgMDokdkdTWpQiztmJ8aWwGrz4mI4P2mA01WCikUJ7BDyFQVcU12ALn0MeZD3K6AwYCeCnL5r48n8gVpibnNnCjO3ZLx/qkF7Pd/rc4rppd1XasOi60VYq9ZCi6vBzRr1fTtNcPFMgWoYInkAwjzuhbCM/NOkBzi92tzbR/Xm4EviZGaZbKsS8ANwjQOlkFgiTqWgSOpPiolxbmiY7ez3Zb4dmPKmzUR61W8zCuKME5HhnEi4YxUBDQGkIg4K0lOqmiSTEw2dy8OcQkhZr93WCFtwns7aFMxvQ3D5ochBEY/5k3HZhPl52tbcbKRXrvG7QbPALs8x2wopcQ4jtV+WhzYNkq2vNvBjvoI9AEGPCOOcR65ufkbvp++49X0kv/b//D/xEfP5A+4s+cw3KJ+YJJ3TMOEd86yXgSmaWI6Hnj/8R3D4BmngaiRT/efcMDxcGAJATcOOO/59OkT3o825+RYzytzvPDwBUzo8mXpnNY5rXNa57TOaZ3TvhXpnNY5rXNa57TOaZ3TvgXpfNb5rPNZ57POZ8+Lz/7sAFNKiRBWBKlln+u65sPdSm/JHGdUM6JSLriuK+M4Mo1jVbaVtcHgBeuTmRqF70vGrqWUJJbI8eFwRNw2pdZYyhiKwX8uIlmkXdyUIn7wrOwdpX12C/QpWTTXiYccryVhRkeGgoIf+eMYI2PWS4zxq4D/ZZ2MtXTTxmT9SzU/Y1lXShkkUMuFUSGFaOClCsOQcS4PNhPX7m9/hlQQvyLZ9vMW6MslBURUIawrIrbGwzDgnPU1LY5+OByYpomHh4dMernUeF0RMZt14nbEb4cvuqwrAAeaWNeI966CT6tj53wer90Tox1OJ4PjP4c45xi8YxxHA0FnB0QOw8C6BNtAETOQ2TiAnAlhvUNpMmCuNze7NXCBmCLrPPPx/p67uztevX6DYn1Oo4J3jhgviAw4d4NEkFwGTeY31cS8nPGDMAye6XBgCfmgO+eYpomYEikT6Lqu5t9i2Uio1A2A2aSrX8W3qv86QXJ/U+99LU1u7co5Z8TuPVPulVxwqEh5j+nOyu5jsjLuZVlqKeu27tuXBkWTMoWBpAs3Kvxq+AX/53/9f+JTvOd9+MT/8Pv/icOLA/408Mvfvmae7jmHB5b1wtu3b7ksM+fLPb/5zV9zf/7EZbnw/uM70+X9Pb//Dz/xq7/7e+t5q8rLly+5zBc8jl++/Z5Bhcv5wjJf/rPY3X+N0jmtc1rntM5pndM6p30r0jmtc1rntM5pndM6p30L0vms81nns85nnc+eF599McCUkvXTzIVqFoFOWwkdbCWWauuVnaeNVsK6LMzzhXE0AHLeejaaIRYjFfIjsxNqLSmUHDHclaDpPqq/rivTMObIstTrCtiXxT2dTrXEEdh9v3aCrVQ1P8surM/d5rh3INONLYxFqMmH0DV6yZkDMUbWEHZZC+2zr4FRVe2IwPz3pIkSdz+fz5kk3M7onXOkaAcFpmS9UJ0TpBisJlK06DmqRj5sACtui56LiPXZTAXqS5TWPF6hEkgFbs2r2MyrONV1tkbNKshlp/bZppcWsKqDato5b6u78szd90wa9Rqrf8325RDZ1lHcVtoq3tW5FNJUVdvgOIwEm/W7JrMduGayLL6Usl+RwSzGmFUuHA4H/OAR8bkS1eNzRg+qzVhyebUrvvXYtq/HpTnCn1LkcnkwUJwOHI43hKRIWWdJoBFNK5pmRAZE7Rg9Lf1XRcCDaiDEggJb1kbpMysi+cOSuWFgTdqTe8kKKGvb2glYqWeLQ+X61m/L99hkERUbazdq7Ttbv2ntqNWfiJDypjSIoDi8TJxGz3d+5Cbdchtu8G+FlZV5nVk+PRBeCBIgXRKJyKgeP5y43J8hKS7Bw8dPNq414NzE/buPnG5uORyPDKNjigOiEB8CfhYOaWR0/3k2GN+6dE7rnNY5rXNa57TOad+KdE7rnNY5rXNa57TOad+CdD7rfNb5rPNZ57Pnz2c/q4JJbXa1/+mQjb8sninBjM6UvSlENbGGUA9GG4YB3xiiGaGw9VLc9yk0x2G3IE9JIZyMCXuAFuuT6ZzjeDzWMRfQuI6yFtkbw2ccpblvb1xNeR1baV9x5nwzKRlIDdO4A6Mn12FDp0wg5jDZ1ZnnmZTifj4C3nnWuOnUCNzXUlXL/Eg4UVTyOlQYz8RZdNEA7+fE1jTrhjK+/Zq0QN+uUwUlsU1A6bGbp413HtVYbaFcn9JG3kVah28dvE1OESlAntdHaNbT1TlvdpKXTyk5Irl/pYB8eeNwpSXa4TrnICnItkZJbe7jOCLOIfi8Vg5jnGhlomxrLc0Yvrw+ZfSKE0gaOF8eLJtlOuCHibgGRBSHklBEI6qBFBe8t0MZExnk85zM1mxDmBS8l6qwGCNuGGycef7lvmKzZXwl80NEdn2JK8ErO/0+VV4ujc2GfIBnezhne2+Rdq3ad7Z6KxtFRUkCAUURRAYOfkLEM8rA0Y2cXh/4eP7Ih/N7Ps7viMeRNY2MYUIu+XBA77n/cI8fHJIgngMoOBxHf4teFJzZ/oDHpwNERefEMRwY/A3TcfrimnfZS+e0zmmd0zqndU7rnPatSOe0zmmd0zqndU7rnPYtSOezzmedzzqfdT57vnz2xQBTLZdUi9RbWZfgbVpVMTFGW263GVIBsU+fPpKSgew0TYzjaKWlO8zewDQlxbsRJ4MtFKlGvJ1zu4Uax7Eugvf+s/CjqrXszHtfD2V7aqHXdb06aG8T6/2YtshvYwTlWe132ANb+bnMJcZIihGNkWm6fZS5sH/WbkZc/1lVCdH6T7p8iOE4jkZQEcZhZCUR4lLJpJRxgvVMhdwP1dnfB2crrU7AOXCughuZ23lqeHWMhoqqslubsh6Hw2Hn4K0YAFuf1q13qfXMjTFayWMmC0WY58A0TXVtVLXOb55npskcIcaIl+tshC8A49V6SCmLjaUHp5WE/hwCvH6skehGcnZ7sXPL6pjn2Uq8pwPOeVICkU1fpa8twDROVvqdIvLEXqHdRGheF0VJaWa53PPx4zv+6q/+lnE8cpmX3K9YzaZIKCtRHRrPllWBzcFyh0rGkSBuJKWIJiWlwOAd4nO0H/DDwDQdmOeEYqkKVkqe0KS2pmWjJ1JLkoG6Mcoz2tlOua9s6tpy74eHe8Zx4nQ6VftoNxaFKA6HAzFBjFu2UIyRZVk4Ho+s68r5fGaaJoZxREbPZZ2xlbP+pOEh4MRxO9zw4nDDm/Ell5vveH18xb+kP/EpfOJXLx/4lz/8C5flzBxmBvEEAojwK/8Ls1/nOQxHXr9+S4iJeI4M88Dd8RZi4qc//Mhvfvlb/uY3f8vf//bvf7bt/dcsndM6p3VO65zWOa1z2rcindM6p3VO65zWOa1z2rcgnc86n3U+63zW+ez589kXA0whBFNKSkguJ3MCKQa8CM5tkfZrUCwKDyEwjQOHw7Q5W0qAzwsFoHaAGFuU28BQ0JgyWGzRvAKU0zTtHFbJAVZpoohs4Focv2YdZCmRQedcNaJhGK7mtL3/scPso4/195QQ79FopbfOlwhoQrG+nprUIsXNeB89J0v9XBvw0RyrV8tSaCFTVc3ZYkTygYel56r3m+PEGEEFkWTRcwtKZ/DJZZzYuHMYmHwFTyJ9yXDIxOjzM1p9lTmWMZTfRaxkVPJztkwOc86hlOGmrUxRm3m1mQNt6WK7Xu16Frt5kk+FHKeXWqpbQF2TgnM5hu4+z3ePVNP6SwItEeqiIsl9gq2k+P7+nhSt7lNwaC3h3pdV1gj6V7II2jEUHSyXM8t8BlVuTrc4NxJzSawjWR9OlEgixZWYzjgmEG+mmHui1qwAcagoSSND9jnnjFSGZi2KaO4763C1r2kR5xzzPNdMgGmaWJalEnkL2mUT126qsOHh/fCoNLr4XGsn7d9bKaWwJRvJe294FVYGb1F/weGB03QkqZFYigmRiWn0/PLVgRfxO9a0El4G1u9nzssD5+XMwsKH+w+EFLh7dcvxcETUocH626ZkpPrq7hU30wmXhOW7C3fTDcfxyPGH01fXvUvntM5pndM6p3VOg85p34p0Tuuc1jmtc1rntM5p34J0Put81vms81nns+fPZ18NMNUo5VAOpdJiLXvDkb3bl+iciOCHgXGadkBV+kvuFqSCdulnSA1WK2qR/GYhnM8HnrEZb3bL3UK1UcNWyt9bEmnLWGHvFGV+5VlPR8AL9mp5SQYS3YFNjBY5FRGGcvCg7nusXo/tsTTlvPWaomMbQiWilEi1x2bOJHC+rpdIBsdCIs0YNpDefVJ18kgH+aqNBB/TQQH14mjlXWXOxtdbyay933qVlmd5P5BUcwZKqpHjAvrFBsuBanVDEFPVTQHZzwF1tszGfnJZplRrsK8nmeJKL9c+88R7syvZhiSXeDs/PGFzsiPIzV6bh9TxfXZECHA5n1mXhcM0bT1X83OsLDaZzwKQUF0QYi5r1rzxyLpRKkG34FmUKXndbXNFvs+hKYB7vEEra1YO4bvW57WfP6VrVd2B/FO+dP33p35ufbGsn32zfsSC+dngB5ImQgrZVuywwsEduPF3pJSY1xl/FMJpZY4LD+s999MDSuLF6zuz5SSkFZYlgBqB3p5uuZluGBlgjLiI6b+fHfuzpHNa57TOafZv57TOaZ3Tnr90Tuuc1jnN/u2c1jmtc9rzls5nnc86n9m/nc86nz1nPvtqgMl5z+A9h8OhKr4tC60gZhqgRJtVLZNgHMdaIqop4cSDlAPNyhMAMSW1oGwYIySUqNaPUQUrnQTEe2I0MnJ+M6i64A3zFCcpBlOu8ZksipQsg+top1mhQ0g7kNoZVwE+LfQE5hhlMJvxRk2EaOWVh2liLbWi5VlPEEl9TiWS7UvYeroKYtHHqKRopYAxRdZ1tbLNnNkxThNJFQ84cYzDwLJCYovclwPfkiqqyQ5UK7orJaQ8lkoyZT0q723lh58jy/JQm65DM6GllIkDRUUYhomYopU4qmVkXC4X5nnm7u6Oh4cHQgjc3NxUnRpobBF42Zj5yTEUrJRss2TdbuufV/7RWn0e+ItttzYhmThUlRRDBZd1Xbk93uBzdL21uxLZLtH3lEIFUisBbTZmT4xNUEjw8af3JA28efO99YN1IMNo1qwbmDtKP9QIEoEADOYaaUA1Z4V4y7Bwsm3gLKKecJmAQlgQRyVhViOT4qPrulYiu729JWTSK1kERQ+F0Es5aVnjNhMopQQHv9tUoFo3MOVZJTOBsunJG9XWdkIIzPPc9HUWluUBxaHJbON0GhH1pLjhkKJ4HXjp7gjrSvi4Mp0O3B1f4G8cf/jxj/zq9V9zOE1Mx4n3796RIgx+hBN4NyAI83nhVm45DhOHw8jDx3tI4JqNc5fPS+e0VjqndU4z/XdO65zWOe15Sue0VjqndU4z/XdO65zWOe35SeezVjqfdT4z/Xc+63z23PjsiwEmnMMPA34YCDGbZXZuJw6rC1VTWlKUCCTW1foVXpaFFy9e4IYRxdd4mwBOQTUaeKiCWmTROVjiwqgelxtuhmhQJm7A+9GcP4PIZV5xIhyOI0pu2emljq0AjfXJ9MSoeD9Seh+GEGoEeuv36mo0ugJeBtE63ahGRKpIjLgUkeRAIxqi4YM4kHyIXi4vdF5ICmFJ+HFEvCeklMnPorFmnHURuHZWFcgeg3cGVBIVKZkb3hFVcX5gGEYm5zijpHTgqJFxHHB+yPQQUZfLENNC1KEScUgRFwRxCl4YSnmxgFG7RdVtrDm66oSYlJiUpLCGwGE65BLhgIodERdTIqnispM5rCy5cL+XsWYRrMGcGwHxsJ5XQgwcTi8IYUVxjKOrZYoisgOKqrcMjpdlro6Ny2CWCdI3/mKE0pbZbrkNJYfCPmr6bsre4dosifY7xZfUysAzn5BS5DI/cDre4P3ANB45HU847/NGSus8QPBuwDuP4BAZ8ujKGqVMVBthmxlaqa2q4IDwMOMGx8ublyQZSTqQ8CgO0Wilyxqth3BKjA5ieEC8B38gRQhqz8M7HAEEBhxC7mtKxDsPGhB1DI5KuNE7UsgblmZDJmKlqeu6Vh8sZaxGHlo3ZgWIi0/XDWl+njM15A1mya6x71PO5AkxZL8p/rqRYimLh42cDEMADHvUmc7vL5/qhmQYxuq0zinRX+Co3PqjrSlWZv3idGMlwRezxRs5ESUSQs6g8CDOMXqI6cIcV/AHlnGuuunyM6RzWue0zmmd0zqndU77VqRzWue0zmmd0zqndU77FqTzWeezzmedzzqfPXs++2KAqRxyJTlSWstLi9EiBqU1ImwGuCwLKSWmadoihVJAwRxRnFSQt96WsYJp7fEoWqN/qtlJGkcpJYrk6Gkp2SuLUJ53uVzyWLYMgPKVkjbPS3V8reI2x2okL3r7L/W67ZkFum3YW0Q8pbTr3Vk1Kq2BSTbM+tJG/1qBUOrrrUxuHEbTlytR58SyrIQMfuM4NZFHqQ8oEVpli1SLGKBYr9Q8AtkKSLUOSerSaB6niLPPG31vWsn3OAeN01Wd2RQtuyQZyDuX7SxfZ+u1lSaXfpkWVd8yPlqibolbSuaFOJyUTIU8LsSmnUek2predbljC97b+n0um6BmUOw+3jZNMcaqsy3LwKREs1NK+Hwo4mZD0vhI2ZRsoOadtzmnkvGSeLh/YBgGxmlk9CNznnf16fylKY9XqL6LpszytmWwnU9jy7vpWSZKjBFRRbFeyvYe6pqVNSrEWbKRyjyccxvwi3+k41ZXu5/z0jgpWTGbHdVnaKHvQrs8ev412ZSv8i7X9lrOu5ayLilGVordC7HJGKkZTQoaE2VL4bKPSPa5YfD50UrUiObGxSpP21qXvXROM+mc1jmtc1rntKL/zmnPVzqnmXRO65zWOa1zWtF/57TnKZ3PTDqfdT7rfNb5rOj/OfLZF8NP4zhmwCtGK+zsu5GyQCmlemBdOZSqVbiq1p6cBfDar1Z58HjhpHGAtj9i+bsZ/77M7OHhIS/S5hh74DFpo5AF6MvzU0oNnjfw3oxX2QhBWjBoXhNTquNue1h+Saojl3E3P7cPL8+8PrwvxsDlcs6HlkWGYcQ5n0fpqkuqlh6lrREDX7ahCqh1LAXcd+VzUp3XsEia+0wvxQa2+STLPsCyNpxrDkZDGnvZABCoByq2B7ftbcau996RNOHE4XJf2EpceXPyGXPfzes/TbR+Mz/PwJE/jTEhBQAyoMTsP2UTZZsx6jxdISo2f0tx062q5fRoSnz8+J7pMHG6ucE5X9dGRMGKw0kZoDMCWBaRaibhhBPFO3DO7ilaedK/YiDEsCPiQs5t5L9gz7quOzsp8y+k/pQft7bVbpzazWD7t2JHBYDb57W4VJ5dQPkas8pnZV2uibqMu2DMuq6PSuN3G12KnW7PLJvv8rwypmsddHlaOqd1Tuuc9tXV+eoVX5bOaZ3TOqf9LyWd0zqndU776up89YovS+e0zmmd0/6XkM5nnc86n311db56xZel81nns///89mXW+Q9kuvo59WnqszzjPeecRw5HA4V1Mpixhgr0BejcE5wfqyK3SnqSsntwl33RWw/c87lnoqhUdim/GsyKRkQwzB8tk+nxT8faaRZVHu3GaxDuQJAIIZATLqb42fZs5F2PCX6fr0WKVmpp7jtQLqYInGeOZ/PqCaG0V8/GXD5WWao1snSDjP03iPePTHzbVztk8qmQMSiw9pcp2qR4ZRBKqWEK+vRrLM9S+r8fJM1QdrKcb3AGkN23u3AwtJLs4AHUP92uVw4nm6qw8QYsVi45T9ETYjYgXNo+uy8qdHmLer850prh1qQHtPbsiykqBwOVmKNy5k4xZ+wqLirmwnJhKA7syjWpyQL/FsVKTFGzucHfv/73/Pbf/Vrjrc3zCEwnE5EzQcT5vtQzWXF3irVvWON2MN0ZvDRdIZAycwRAXGIbn6ZEoQ14IeB6TBVm0h1Tf1W8gs7sCxfLbjt7CWDeIsHBR8K7rTXtxusNgPFD75uOgrogpWq1iyohhxaiTEyzzNjLn0t9lV+LnjYzvV6E1zeWTYtLcmVsZb7ClleX9Plz5HOaZ3THo+rfVLntJ8vndM6p3VO+y8tndM6pz0eV/ukzmk/XzqndU7rnPZfUjqfdT57PK72SZ3Pfr50Put89r8Un30xwFSU1v5uSn18rR88Em0gp9MJ770d1ncVKddsiPa5KSEl8IOrE6vRsicU2i5wUVqZbFGSCDUKGWOokXVVG/v1gpc5hhDqIVrlPS15SHXsOpknicae6Tabb+Zv84Fx2FR/9dSd7N5fFK9lEfYL0WZk+GGs94yHiRcvXyJOOBxGxOU+jkjVS8Ii8qmZj61Bed9TIyyZJWVcug0vf1ZGKGJR8RgjmhJjKU1swSrfrDEfFCfWqjRqASxlzUQBNs4QVpw4O4yseU7JJgCq4xV9blFb8H5AxBGjouXAQy16/7zz2FSLTvb2+Z8iInaA3DKvoJJ7+Ao4R1KLPnNtl7L1ATVgEsQNpiNyz2Ix8lUSXoTzfOZyeeDuxS3DdMD5kUTJwHEMzhGSZQ8kFJJSSksNwBMJO3DPOetvKupIWWflcMLi5wa0ghsNTEtpqpWNbr5cAHee57p2JfIO1AyKMvf2q2QdlK/2uo1sUoMTUsEXbIO0rquNiX1Zc8EzoOq6PLPFoVJWW+4p11rmVFMmLdvBfzFuZfplvO0z2/Gv61r/3t7X/4/Lz5POaZ3TOqc9LZ3TOqd1Tnt+0jmtc1rntKelc1rntM5pz0s6n3U+63z2tHQ+63z2nPjsi3WStQwLM+US8Wy/twZavtoJls92xvzE31qpETbVR89olXH93vL3MvFSDmvOsi89vR53+awYyfU1T4lefV7mJWIOYT/bB23pYJlj84IvYcr+nfo5SrByT02pAm4dowjDYeRwPHA8nep6gqDi7EA5HOUgRSglsU2p5iMdlPls42rLLMvV23rlMWaD381fW72Vi7V+Ny7RSqzWQzf34MyXtY4HW2/N4sTFlrdSxAJInnLQ4UZasumuTr/92z4T5XPyyO6b35/aQCFU/wkhEGPMmxRHWY0UI1Yuav5hPUq3r2r/CI5tE9MUJBsoLjPLMnM4HnDDaMQg1icV7HA+pwbzZdY1al2JPUGKqK6IREQU4do/N79TTY/8rXy1m5Tiu8Ufy7NaUHO75z7Rq7gZb9F32by1X+11rr437Z791Fper2O5trXrdlNR5tbOtfz81Pivn1uk1dO1/rp8XTqnbdc8JZ3TOqd9STqndU7rnPaXJZ3Ttmueks5pndO+JJ3TOqd1TvvLkc5n2zVPSeezzmdfks5nnc/+Uvjsqy3yiuJL+aI5YCKo+b5zVsY1Xy7EsHJzs5UBVgBslOy9B+/RGLIR7KNkKUWmcdgpZJqmuvDle0qpRu3a97QZCGXsd7d3eTb7hbOehvGLim6VLc4MbBgGSHtjSSmRciQRMlSIowCHosRgh6i1mQ/12q/IzkhJiOa1oGCjle4ej0ec86QUURFiXHn4+I51XU2Pea7O2WFzBdvEweBGPC4Tj+S/56wINuCze7bS3GqIGZgH70lqpaEhxgruImKRUOBwONgtGYxrdBbACYL1nk0pR3kxvQ0OFk3WUzas3NwcASsLruuQEofDoa6p9555nkmaGKeJJAUAhAiQ1LI+3GC6dI5YZ/oYzG3OnwfuVr60aXDek+K2kZES73VqJYluqJuUlBIaIyRlGLP9JEWJpCQ5qk9O+BBk2A70E81ZAJk8Q5i5XB64zGeGcSAhJHFQIu8pZkIBDflnEcZpQkUIMeEcaExoWlgvnzgcPd5ZCaviUYUYFSHmCPvEsqxoSoR1JYRkZGzq3xFAjLH2sC1lpGXzuCyLrbPuo+3ee06nUwVA7309lK+URZf1KFH4lBIPDw+77AMRMZt3G+FWssxrWXqhtpvM8q4WD0pP6GsCKHMsRD5NE8uy1Gc55zgej6SUHmVjFcwrm6Xyjms76/J56ZzWOa1z2l46p3VO65z2fKVzWue0zml76ZzWOa1z2vOUzmedzzqf7aXzWeez58Znf36LPLWI8SDWW9QPrva3hM2BP2f4RWxCBhbX72zfVZVfQBwjhBACwzDU8rFKJnYzsJW0loWFEi1uxyc1agvUHoXlnvbdinsSlLcFVkrk1N4B5RC3qFZK5weP88Nnsyw+J7pT1Geimvm6lBI464caY+J8PufyYXvvMAw5EutIKeKcrwfOSdJHa1IDxxngLPKvWZ+Pe8Y+ObbMKN77HXa2d9ZIcdJ6kFtmEwtoZwIfvcc7qeQlzkByXVdKH97yvDLulBLiHOMwId6jKnUAIg67dMsyqGvaqFrksb22stneF65pNhrlu33ZoYikyDh4OxBRXD6cLe70XOyxBRLVkgVjtovmPsJ5PcVt9nE+3xPDgpfEq1cv8NMEztepZu8x0EwRNOUNlH1q87S1UU3oeiGNR5wIg5tQN+SyV6k+BGS/yv6QQa4cDNjqIzXkPwzbpk9Va4YIaC1HLkTQ9ju9tsny/GoLsvUjLc8uGNDquGwc5nmuPluIqJDFU9lHZU0OhwOXy4WUEnd3d9bjNj+nPTivAHgZR91gu20DUwijxbUy/nbsXT4vndM6p7Wv65zWOa1zWue05yyd0zqnta/rnNY5rXNa57TnKp3POp+1r+t81vms89nz5LOvVjCp1vyBHUi6HG1T1RwFA++23oOtcq+lRKKvSaQAyQac7IC+TLgoYhzHOsFKSqpbxFsEtF3068W30bRZB20GxLUeruckctXHs/lcmzGlWlpJdcQdiWZiegq/rw23zOkpEdnKEe2erCtV/DDgvAexQ/SkHLAnzkoancvgH+vY6zoUneo2xtaPKggJyGdIqIiB0OZs7YPqu9SAnpQKrFg+hthhheLAi2NNod7DVZZIq98aRS6EXcsc7clSdC9iiKiQkuZ3PzWjbcOwGzufXZod0GxP2f+eYiTFgJPr+8qBhBnMiw2Ue5Xt97JmSQvzbACuimpkWWZUE8PgOR4O4KxcdxuXzb0tCfbe9JI1bc8VK4lNaUXTiqYRJ5Ck6PR6E5B7G+ehlg0JV6D6lB8WgC4ZBoYJW0/SsublfQX0W8DedPA4+6j83YvbNQ5tn11IqJBqC/TtONu1Ll/t+8tzy7O+tlEq18G2aW0Jq8ufJ53TtvFdz6lz2vbezmmd0zqndU57DtI5bRvf9Zw6p23v7ZzWOa1zWue0v3TpfLaN73pOnc+293Y+63zW+ewvl89+VgVThiFUzReccwzjiKZICCuXy4Wb04lx3Ef2n4qwFWnBG92DyiPHbD5flmUX9S/jLJNOFGXm5zcKbR30KZJpjet6YVJKOwdEcn/JJrJ3NUObg3N22Fiw6KP3zqL8zS1fXmYeGcPnwNRlpykSQiSmiPPODjjzHnA5GmmHHIrzOPEbWDeSkoIkBEFlwxNymayt4TbX3U/Xkyqgk8cXQnhUvrcBvEKMFcisXam9a1kuHA4HnPe4tJFCSvsXtg5cyg1r2W3BQAERV4ZWR69ATAqkXM77BNTv/vRlkC/QuZtn8a0GrEMIhHUBTSRN+LwpcM4T1kBKkSlnGJivbDqvEeeokAIyeOOtvEaaEqqRGAPz5YIIjNOBw/HIwpDJryUHrVkEAgyDJ2nujFpwQACvpBhIaUV0BXcw3eQNljiX362klLMlRHBusLJlb5k1h8OBeZ6J0Q7Taw+8hG2DVzd+xsz1urLmhSyKP5dnlayL8qxyPY3uVBU3eCuvLiXSskXzC2YVcC+/XwN7sbuU0u6gwIKN5VklC6LFmhY3y1zaklzDk1Dn2P+Py58nndM6p3VO65zWOa1z2rcindM6p3VO65zWOa1z2rcgnc86n3U+63zW+ez589lXK5jQbP7lefl7WFdCDISwME0Tw7CVdxbFtD8/Jc4567uo5lTsOm5uCvGDYGWVjnmea4nZOI67HoIyeIjRjJ0NuNuS0+bJ+WtfnqZA0rQ5keSSvxhx44DLaK9JSSilV+fnRERIMRFi4O72hQGm5nl5b2MoTPoZ2YG8yLYm1+/y4JxFWAHiOpNCyGWqPn+NiPgd+e3WSxxShuS8kVL+vXSLld0aZa/+ouS7NeGw+acYtzJFxfpqxlgPDPR5rqqJmBSNK06F8/yAHx3TINvaCIDPB9MZOIzjSNREiokUo5UIO1cB1w50NOAvoCnSRohtbk87UpmvXP3+GZG6VWoA3zIC1NlGwZEQtf6uH84PDH5iGEcOp5NtbLJzn06n3DN4693rnKulk8saiEmZ/MTgTb8hWU/ZsC7M8wOXy5nT6cg4DGg0u9nyhbbnxlj6bNom4bKspAIuIjgHXiF5iGEhqcONA5GBkISQlOkw1M2BOHBp66FrqtsAUUQZBuubWoC5LUcdx3EfVRdfs4kKLrRgWUD0crlUkCwA2m4ey30hBGQVxDuc3zaYddV1ywooa9A+o7zvGphLOemnT592faJPp9PuudfS3l/k/v6e4/HIOI61NPupTVqXL0jntM5pndOemE+Ze/v7Z6RzWue0zml/OdI5rXNa57Qn5lPm3v7+Gemc1jmtc9pfhnQ+63zW+eyJ+ZS5t79/RjqfdT77L8xnXwwwiSoiIOSSTNmi+2tedMQzjAYequDcUAETlKRii5yB1RxHUVKO1lrJGwoe7B2AZgeLqqTcq3SNAfGCSgZZVcjAJt7naLKA2KFc0zQx+IEK6iLoVVzY53K3pMowjiQS6NbzVIo9xgRjHiDNd7WIvBmvgFrPSCNHIYRYx+Wct7LVer99aZPhcL1cW8S4+V5r6YqxJpJa9FNJ4JK1qkzmvuM44f2Ay6XEKUWKXXjvG6wWW686PWm+bD7lq/TWvBZVBWmi76YYUMWJgZmqGsirM5BXSDGh0T5HQDMAJ2y6ZkZqh8NpIsSI8wMxh7bFFQAiZwJYdkHMX+M0IL6UZHqzrzIBJ1XxCQUHbig282iG9o6UKDZVNZUZsth+u1nYVrescMobiqwTIt4Jg3eE1cbrB8eSgs0j68Xsx/6TvOkQZ5kKZf1VIGjEqcNJAkmIKCmurPMMqkzTgdPpxqagEacbwJtd2axsg+SyrxoWkDcqImb43kNMK5oc6AosOHJPVsnmqqBqmRloU0qsedMUE2VJ4jrb4Z4KcV1zZD4vlXOEGOrmqJRtluh9G9UvAFgi8sMw1FLXp6RmDJD9vlmusgEgWUdkUcNCnN9tIguRFLkuJ22zEcrfy/hqNkjz2eMNqvWCbsG9zL/L16VzWue0zmmPZtg5rXNa57RnKp3TOqd1Tns0w85pndM6pz1D6XzW+azz2aMZdj7rfPbs+OzLAabyVf27RJ2FJYQc5Rrxw4SBN7hSuplJoszZuS1irWLgFFPCjrAzQEqYTRSnUQzoNdnheiEGpvFo4EYypxQr/aROvBxEFjgcjvlgu+Jc2WCkGKviRawHZUocjgcDHcllmNlXRQugFq2AiAM1Z7dzxzJoqyD2cEQcYQ3Ww1LqzK61SwEUw/GNBtp1MJUqJNOriuS/G+FFEjFFEhGklOtZmek4TAx+rM8LIQIWzfTe2fjzmDUV4Mrj0m180gyo2J40/xZnrLxRrKLYAxHBsgmcOJzmKH5UUtRKTOKEJM4IPSWco4L9eBhJSVlD5HQ6EFMpjd70pSgx2kYiqZVI4jPQm4Vk0rdrXWWtHNGVLRvj6QhtyQiwbUN1Sir+7O/TbHJa4vV2b1JFMtCnZD1QR+9ZRZjGAecdawxGgqo7oC+LUaLSitpnebkSiajbRko1kpKVwjqEw3TkeLohIaChzqMSffYlcdbzOEazJYcSNbtcnot3Bejzs2TBuYkh67XMXWMu8VQrBRYhb0gsi8R7QcQOpRzHAwklhJXxeLKMkKSIdxD3vUmLHtqMoesMgAKeRV/t2rbZBylZFlEL9CKCF2e2lqxfqii5h3Aux9Wt/LglmPJzAeNrMC/XlQP8zD/DjjhaEBeRXZZF+84uX5fOaZ3TOqd1Tuuc1jntW5HOaZ3TOqd1Tuuc1jntW5DOZ53POp91Put89vz57Kst8spA2tKvdV1Z15Wbmxtub2+5XC4ggi+RzUaR130H62efeVfphUh2OFOKlZGt68rpeIv3OUqZ+3ruom7OShAPhwMpJZZlASQr0Yy3GoFCJFYHBGp01v4mkIH0WsQJJFcP23POsi1svhYx9YNnWWbcMDCNEynFDCLySAHNdKt+ZPe5WvTa2MCcrLnADDWXx4kjaORyPqPA3YtbSkQYhBgXnJOqO3EbIZf58xXD0Uzkn/tM2XSsmDM/zA+IOPxo5XooaFQDs5Ss32nTcNaIyON8zjZBGYaBZV5RtRLhaTrY89XKBZNuZX/rupAUjsdTzYBxzrF+obr4GiQ+N7/PRns/c+tTzyygRyrlj4GQo8QFeJz3fPp4z+Q8h2GgLdUs7y7ivUdUSZJ7Aucy3sE5zvNcAWI6HfGjZdisSwSXD1t0Dk3mg6HpN1wi+W6wbB2NRYEG/c6B946QlGU+Mx4POKdY0kDuV5z/cxn5VYzYVa00eV0iOni8dzUrAMBn8KtYkqSSHuz7mhaMastDiw0V/Hh4eODly5c7ggh50zpNE5fLZYcppe/yU3ZhB1kmpuFQwbr0Pi3lw2Xs5d662S0bOpFHQN6SUTt3EeF4PO7KZAvYf81mu2zSOa1z2lPSOa1zWue0zmnPUTqndU57SjqndU7rnNY57blJ57POZ09J57POZ53Png+fff0MpnZSaev9dzwea59CG9j++uvI1n4wTw+qlGyJZGPI953PZ0TgeDg2/VX3Tlbui+tK0i3CKgKlRLOMo/6MojFkYNqeVaOCzuWD3Pa9Ezeg1jpGciRWc/mtpoSGAGyGZ2AuWFQ+X0/zrEYtkrVk19h7k6pFMiuh2oU16lj6Mar1lnTe20VOrPQz6xjncYNHvKuHEyZVoiqj91guxJ55WiDbxqlPEoLW6zTrHIIm7h8eOJ3u8MNgjhiKBk0MiA0EJJmenPOIi1X3hbhFioNS9dvqzsbscMLO4X6OPGW75XvroNdfNu3H76g2c/UsSql2fa89dxiGunbtYW11nd12mFv7bMA2GI0dguaN0mo9Zv3AOA4gzsp9navdT8szY4zmZ8PYzE3q7qP0Ay6GIIhlhgBrCPiw4HAghg9DLpNWL/XgPe8cqiULJAO+WibOOI6o2sbJmwOTXNEX1d+XZalEJCK1hLNsTIveCqiXa4dMmAXTis+v6/rokL+SXVDIoACx3Wt9nNtD9IottutdnlU2n+VrmqZHz9b8vNPpVOdSpMx5h2GtPXX5s6RzWue0snSd0zqn5RuyvjunFemc9nykc1rntLJ0ndM6p+Ubsr47pxXpnPY8pPNZ57OydJ3POp/lG7K+O58V+Uvlsy820CtG1A6glIOVQ6Paa66N/qnn7QH/8edF8eUZKSXmeTZFHQ91cQpZiOyfG0LYHcDnnN8W74lxbJHZqzEZylaQbcd3PYeUkjlrM2/N2Q/FMOxgtz3Ig4HSRhz718MedFS1OkoRe5ZdV7IaNEem/WCHkWmJ3mI9QsVJRhWLECfN7xPBeWeHjTlXkKeSSQUkVUT10Zjb0RfAMxI1IpkXW0dfotZI7knakLZr9CTSEPbmjGZ/vjqMskVoq16kyaxwrhLylxyitbvrn3ezu7L5nyPtu8vG4nokLbh4Z/14Q4hPjuuRXVwTUCUQNUBKEcX6p07TAUTsEMOrZ1SdprTTadlw2M8lGp7HrNmOAdVAjAspraBxIwOR2ve2TN7APSEkVCOa4p64JGe7FBu4WpdrkmvHXwCy/VvJFmhtqowNtqyB601dC7itLabU4seeiFuy2G0QP7PmrZ53xH217m1mQ0v4LSF0+bx0Tuuc1jmtc1p5bue0zmnPXTqndU7rnNY5rTy3c1rntOcsnc86n3U+63xWntv57Pny2RcDTK2UKFYIoQJnfYizks2nDP8pg/ycs7WZBK3yHx4eALi7vWNdbQwhlOjytpgxRpZ15XK54JxjHEcOhwOn06kabVX+bjwA+/FrdhRVclSWnRElTbvflb0TJjV9FUKUK7CEq0W9toFGR63etvGWWWzPKgsfk0WCD4cDx9Opzi0l6xHqcjlvIQQwQpymCedzFDsDpJWPNg6vWssryzC/gJ3b+JNmAiY/fwMORBDvNpBvdeQsSl0cZ1mWvK7HvOahgpLpxnqFOucYxpFx3Gy1OGdrB18c82ecUzIAuSfG+zWpzyUDgttKwEOINapcMiPWddltqsAi4uXzFqDBHNqbSvG5X2k5kC6pogK3d7c4N7DGREipWlGbLVRAlmbzVTYZzkn2jaLPhBM7TNFJJKwPxPWM6MrorGdwCispRMQquUkhoSFCjEYI2WY3sDWyQeQRvogIPmcEtMB7reOyRiGEulk8nU7mI8272k1lwbiWHErGVCnRLxhYNpZDzowp61hKVrcsq22j7L2v6znPc80UaTfQh8OBeZ5z1sxGQGXNh2FgHEfGcazjarMZuvw86ZzWOQ06pxVdd07rnNY57XlL57TOadA5rei6c1rntM5pz1c6n3U+g85nRdedzzqfPSc++2KLvHbg8zxzPB5r2V8BzqKQwZnyr5XeOlkBI3JpmgGfIOJIOfpflOrz80MIvHz5EuccHz995MWLl/W5tohznbyqMg4D3g91nDFGLpeLKUv3fQc1Kcu6oppwbAvVzq8AxzAMW0RTJB8YZsZS+qymlHAiFfjs2r0h4SxKrmpzQxxtKW0dm26AVPVmT6WUCtr7tmc55ysxPtyfeT0dTJ8JvAriPH6AYdgO3tMEIgb8zomBQVI7uE1zP9h8+CAtSArWE9M1pYt1zR1Otiisc9YX9vbFHeI9MSUIAREjEqeOkCQfPmfvGIcRBFIMDKMjzDPn8zmXMWp1xtPpiB/G6iym8HxgGexJSuxwwRQ3HV/baytPRWfbtWjXaHvGPjrdkkUB3HJZihEvIGh1+OvNkohwOBxwSZE8nuuodnlPeb4BhkdSJIbA+eEBklqvXiAhuaQVoJBVzsRZVyRvklJKufzZSMYPg2WjqNLk8uC9lZYCTAMscQVmSGdkeoEmy2sYhpEYt6i7KiixAqpic7ANktnofLkwHI5VbyWTpNhrq6cy5vJ70cc4jhVcl2WpIG+k5Sr4AszzvLPbYmfjOO6I3UDbId5AuZDO8Xjkcrns7i9SbTRLGW+MkXEc63PLmrdzK/c75yoZF719yYa77KVzWue0zml76ZzWOQ06pz1X6ZzWOa1z2l46p3VOg85pz1E6n3U+63y2l85nnc/g+fHZV1vkWdTeBuvcdhDW577aeysoijz57OaXHaBVEsmTKVG6ECJ+2CK4BZjLveu6orCLMBYALuC0G49SI3hy5bxVeS1oNyWmpYS1Zj6wZQns79s/t7wXqOOyzx7r6VqvRT9i7NFeaWPR9EiPT5HIdlihgGx9ZZXcf1aoB/ntSk7LWFLalczW8VU98VjXYNkCGYh1u8E+c9vf62cUOzJdhRgq0Jd1c85KOjc7MDuNsTjYY9t7Sr//sRu/vc0/naHwpJ/o/rM9SA81UwUeA3r5esq3qk2qkqId3rcsM+uyokkZhxEhlyEjdtBetnUjSrOPIUet23cY6ZM3Klf+myIaAymupLSQ0kyMF1JaUA2WLWD1qXX+khdcd1+aMxq0ZuekGOuBl9f6vdbHNfC1oF3wotVr0VcB0Wtdt9e1+rXrTU/t2rXSvvdLa9UCd4tn5b3X4y7zbXuudvl50jmtc1rntC9L5zQ6p3VOezbSOa1zWue0L0vnNDqndU57FtL5rPNZ57MvS+czOp89Az77YoCpLc8qEbcSyUqNoxfQaJXQKvmRVIcoALWV2rVRvfKeVvEb2G6KKO87n8+W1TDsC7M2MN5puRqVc9Z7slXkk9KATynzrM9uFialxvD8BvTtGOo7tEDb0+/UxihdJqTruVQHyVHIcZyYpimX0LldNF219Gp0WBTZDnVTIGoygHeyN07FylO3l5FiymP/+VIAvNpEWVfRerhciVDvwFezLcRoB5SplagaodvcNmdzeOdZ14Dq5qTS2Eur//Y914D/H7spvH5H++yniCWlkklgtjRNEzGmSmZPPbuMr7U7e1YipYimwLouLOvMui6cL+Yb03S0NcdZH1rnKnCbL/jqg9u785jRR2YqImgy2wvrQpjPaJhJ4UIIZ0I4o2nJQJ8BPIIm0OQgOUj2jKIXi95bH1WfM0+KH9QsjjyQkhVwTYolyt5mDrU6bK9rn3GdIVWeUT4v1xdi8PngviJlU1yev5Wq7zNQbOMa6lc7jqdIo90MlPdN07Qn6S5flc5pV9I5rXPaz5ln57TOaZ3T/iKlc9qVdE7rnPZz5tk5rXNa57S/OOl8diWdzzqf/Zx5dj7rfPYXxmdfbJF3Pp9xzg6our29rcBfjKrt66dZEWWALchfG3xrLZqVet0DMuTn2fMjdnCaPXObsBllSonL5VIjoN77J5VFM57yFWPET+NmHMPjyHuRGALIFr0ui1fnKiVzIQK2yJtvbPPONLj9Vb8U734sdV66n8s4Tjm6CHd3dxwOB8QPkFZCyn0lh4GoUvti2ohz2WBS3JDH4h2wgbmQD/PL7xJVVOQz9PTkoE1X3qLY5tgpO6zQ2oStqU3QOUeI1lcyqZUdhjCjSTkcDkzTSIyJNQSGYQLZskN2jp7BLKk+4qd2Y/KfIpLn2T6nBZWnNhCSyXsYBubzQspjj2EliWPK5cZo3BHgU5uo8o4QIg/vP+K94EV4dfeCH5fVIt/jwBIDOI/kLJMWKA+HQ41QPwU2NoYcSc+bD1I+IHBQRnUcx4EkjiiBEO/xLuEceCYr5c6bDOccSQ3srfh1A1hYGIbEdDgYOSSzk9u7W5ZlIcVUfaaNxJeS0nVda/nnsiw452r/0BanvPes6/poU1meW/QwTVPVQfkeU8RJscMJVa39j8vPIYRaKnu5XHa21vbTPRwOdRzXOi/jKM9VtUMnL5dLnUNb2trl89I5bS+d0zqnfXGKeZ6d0zqndU77y5TOaXvpnNY57YtTzPPsnNY5rXPaX550PtvL/6+9d22yJDnS8x6PyMxzqrp7BsCCICmTxB+k/yzJRJE0k6hdkjKCKxJLA8EFiAWBuXR3XU5mhLs+eERk5Knq6hlyJU7NhsNqqvqcvER4uL9vwPwSg88Gn704xTLPwWeDz35ofPZiBVOvnD5C1kfE9ujXvlDPGXS93lBfVDWkRaiF6vL1Gaq+kMs8l4UGIbSDxkQ8uhdL+eO2bSzLQuyyEHqlilxF36E4vdXqOwf6arzX4OvIXidSfpQyMGoVn4OJXxtjiTKaG6nrqdNFfc4L0kZttrNCNwl/tReZzl3/11bGVq4xVTAIIXbrI/uc6Ai5I64eXKqjU8CpEtuL4zfD3RqCTATx6L8qLXosHYEAvoZF3yFGUsqo5rYelWRjnAhxRkLEyn1msKbkPTwrqVBpVval6+b0ZP7wLNB9aoV2Dj8SfyWaa+A/ALV5Vsbl4iDQH6RHAVP9RHT5+WdCDMK7d28xMx4fH7m7u+N0vmE5nV33pU9sDJF6AKOaIWHPINDn7NJ6srRC/uXAwDLXeZ45LQvLFL0friUoPyJWDm8EEy8rFoE9TaUrKbU9wyjGQthyjNTDXj5fQTNnP+jufD6379++fdt02mcC1E1j/7wa1a/A2q/ftb34oZM7UVbMWZY9k+d6s9uv745lcrDHhld1Q6TWDheEvSTX5yyoZh4e7r6DrQ4ZnNbJ4LTBaZ9YocFpg9MGp70OGZzWyeC0wWmfWKHBaYPTBqf98GXwWSeDzwaffWKFBp8NPvuh89mLAaaaKRBjbCAlnZOb9f37mi10eCQHJfo/rVrMbj8mDRxFuv6mwBQjmv35IdTI214aKxLags3zXLIIKqk8L/s3DXXb35V8pAJrld5ZBSesghpSD6OrUCQCEggSy+LRxtve0z8ad5oKFvW93RubUg8EYR7RRw3LqQF6TrkZk5lheSfWIKGB/rNg1s35AFDQHbAGVojTx/7JRxDE6vmCiEyE8j+MfVz9zSJIcCJHBBMhqwO94SWLDURjKVWVgEkghAk1SFkJtYxVxMtvC+Dr1WibI3IE+vrd8+B6oJwnP26TodnmTsfH+0PxB9PMenlEhAM4CBBMoOjp6FscAKIfTQjC+XwC84jz/cMDy/nMfDr5ONTtLRaiVjOMQpydbx/WtNiu+/y+ISCUnwJAezZPIIoQLIMm/0GR4mCGYbJTsKEF+PdMnYovVY+CtEMJd1fzcvAKml7GnFvkH47ZEdcb0Zp1It0BlvX7EJ8n6aZo//RQ6loJov4IxxL+HtwPgN5tSq1sDKkbYau63fUcYix45weOfnj/zTN2OuRaBqd1Nw1OG5y2f9P9Hpw2OG1w2muRwWndTYPTBqft33S/B6cNThuc9hpk8Fl30+CzwWf7N93vwWeDz374fPZiizwJkayGWibGqTiLK7wudIsoS+hvxIL3tazGUN1JxAHKypOqYyIRwoSGSMqbl2Wpsj4+kpJxOi3c3Lzl4WFt0UTvGbliZq7U4E6PhNKDVHbsLO+EXBZKkQBhiqScQYTzeSZd1mawSFeqGaPPx4woNeJnRIFgHi0XhHVTpvm2ZFsIIpH62iATYq4nwSO+Tm6AeaRUCkE1H0ARFDMthGcIJXtBN0yUvD3y8P4j7+/eMy8nbm5uCTFiAtu28fDxI2/fvGOSQMDIejRWkbo2eEmg7VFrCQGTauByIDzBCJWEJJbS1Yha6WMpcFomljkyTxMTC6gT0GSlf+n+sAbwyQSWM1hm3R5Q29jyhcvlnof7yOn8lnk+YwiPl4QaTNMtScVLH2ViOd2i4BkqwQjRTd2smKrt86iiZq1UtGbLSAhur6WEcLf/cGTrslFxv+wIQiLd9mefKzDZhmrCD6NzGz/fnHh4vC/6i0QECRMpZ7dTAmaCmduoqhKQkhWQCEBW5U9//CMA87JwWVeW5UycZhAvuwwBsETESAghTpxubsjN3idIqW0uNCcmmcrsnYwJgVzm87h5+eiyLFwuGyKBWSK6PTqgByXbhIVbVCYsRLJkJCoiGXQDZkQmRKJnhQBrTsTVGqFXuqzAmDQ5eE6+qTpNJ8yMx9VL15Mmvv72awd+8/LS5eQHNqrlctZkAVYx5sWzf9Qyuu39RT1jwpfQy2YdhUUCE4H06OWwb0435IuX85sqolYI3Tc2VkrGgb28fprYtq0RwLquiG0EC0w2ISGDraRtN7UYJ27PJx4fv+E//+EP/OpX/wb4n57Y9JCjDE4bnDY4bXDa4LTBaT8WGZw2OG1w2uC0wWmD034MMvhs8Nngs8Fng89eP5+9GGCq0a9artWM/9noKgcQqAOiAMEesbUCcgHvGbq/q0YJg+wLWUvQ6jX1/VbAKKXUSjP9BXZ0QKSNpI3LrHe7loGg6odrme2H8B1KXquHlvFJi/C5keUyn2meCBLQegBWAQAR2cEVPBp/AM6CF82cr9V7jPDuEWcfw9u3b90ZRLhcLsR5P7StL5usZcXVvvdsh7JC1umpf9/+4nppG6lWIqh/m0KJrHr5n0dRpTy36tXMMIFsRpBYNgIePXZYtdb7MkggholpnonzTFb1np4ExGqUmT1SS80ZubZXuTbVwzz3CHZ3UR9RLqvUP6/+s1PDQYc166VdKtbKJrdt43Q6ljaGGmXGMwNEpXuOoSqkZHA17lqqWaPpIuK9OKuNYwjBD4uk9B3tItrbtrXn9TpQ1UI0QjCPlreS8hAQ9vLJRmumvlkiYRrQfCHGE1GcrHpdBomkrFjeiFGRKIQQmUNAoeHPVMrmq/9fr10tNa2+XOdvXSlpH8Gv5a2eDRTbZxWP2rVx972KEf5+O/RXjTGyrmuzoXYwXwzc3Nygplh5RsONms1Q5pNzZo7e1zgSydsFCEgQ8rpBUEiZtG2s6wO//e1/4J/903/MkM/L4LTBaYPTrubM4LTBaYPTXqsMThucNjjtas4MThucNjjtNcrgs8Fng8+u5szgs8Fnr4/PPhtgqpPqnb4H/+8q1UArgjmwHvsa+jvccHrpy8z6d1Zlfh85zKUsVgXBfiw9wDczqsAqHen0z+3KKAVfuMO9cv2c4+9nRtu/3B9xdW3fg9GzKSK132k9FK037v3fDqM7UOwAL3J0nk+JT2mfYyPQmo3QkbFZYppv2nVeHln+ZzTAKExAzZ4wVdLmRBFCaPOTEChnGrquQ2ggFa7s5zDmTv+fkx5Mdhuturkif+Sg6+f0dyR1B7gKKNUGd+AJhCBP7vcDCl3rOWsrA65k7WWTqW1czIzz+exTFpwUEdS0AVEI80Fn1wRf360F6IVIqHoo6+yH6AkSg4O3GX4Inzm4WcayR8g9I0B2e4NG7PXdqurrGqWyBmAHe3d/qMvpWSD+nZZr9w2O5iMx9GD/VMdP51/vu75un8F+TQP3olPXE2Vtj2Rzjav78/0/5VbKFH0jYYZpZluVtF34+OFb/ub3v30y3iFPZXDa4LSXZHDa4LTBaXUG+zWD0364MjhtcNpLMjhtcNrgtDqD/ZrBaT9MGXw2+OwlGXw2+GzwWZ3Bfs0Pkc9eDDBdA/z1hCuofU4asBh+yBZHZ6zZAuLo056rOZNVOS0LIYSDQ6gq67q2cVCeeU0SVfqFqaDcE9b1wvdGQQeOxa1QMy9RLdfllLEoTNPcv/Tp380693G9qLsKKEV/bZx4xH7bNtZ1Zds25ugHnZ3ON0iYyB0RPun1aFKM53qNP0c+B60+HW910o5EHx4eWNeNn/7sdgd202LIJeKqmUBsejIyZgnVxHpZEYxlWiCE6gVeOqxGDNZsIk6RqUR2TZ4ZYtX7M2N/kSAO9uMjfAkkngP79vzm+FLs+IJB65+5rivLPBNC9HnqnrGiltqzU9qYp6nYhoNbzomUN87nMx8+fADgyy+/LIfKCdMyA4G0eelrSonz7Q3T7HZbCaIeQlc3WQCXywVMmEQwyYAV2oAQIwhYKb/2HreZyYxsGUzI6RFNJyQaQRZS9nJuJBAQ5snLwc0MTQlihbiAkp2c1q0Bn0yBqRzE5xknj90G1Eibl517ae5+2F6fkfTcJq+uYT/3HsDrWvjzfC2rvsBLUKdpapkcVXLOjjtweE/77vBsIauypQQCimK6k4dq4n59JK33bOs96zoOj/0uMjhtcNrLMjjtuWsGpw1OG5z2w5TBaYPTXpbBac9dMzhtcNrgtB+eDD4bfPayDD577prBZ4PPfmh89mKAqUoFlz4y9t2AgCfG3n/eR9zaO+L+NyLklLh9+xYryugzGNZ1bQqvJODg8fkxWTe2nrjq735MhwgfT0kjlx6VSGA5TaVX6xUJ+o3HFpwCJp8B1RZGfF6qzqZp4u7ujtPpzM3NDUbg7uFxL4NjB1Ufu0eA+2ebQYxyMO7nAOtTAzV2Mgni62ohej/Whwd+IrWM1ScfBLJ4mWpWZSrvURRRxTSB+UFmUwxM08w8LUDEEOI84Xyx20Qfjec72miVmjVzmFUH3LsurhflM2R9rb9CVGLVryJZ9xLLbduY53PRZTnIzbu6OgBNoZSy1gP7FD/ALpPL9YcoNrAsp7IJKrage+Td+wZ739xaglrvq6BoZn44nwLZxyrTRIxeShqDoL5zcH0EvAeyKdEELHPRDcmBQEKiYTniaxkJ0wymCL4JSwpBvRexmlB7oW45cTqdmabuMMnOFy+XS8s4apiAEOPRx6udtMP72DdBfXZSTwaVAGtpvH8n5LRnENVyYyfidMDNy7q6TsoYUkqN3Pt3TtNEygnLYKqcbv3fqpn5fEveEnlLaL7wT/73/4X/59/+32i+f9EGhxxlcNrgtM/L4LRPamZw2uA0Bqf9kGRw2uC0z8vgtE9qZnDa4DQGp/1QZPDZ4LPPy+CzT2pm8NngM/7b8tl3CjBV6Q32UyDZXcBTePAosuHg6BFS/+ZyWTmd9oifmS94fyDbNdFUJbZx2V76+BxA9U6bc24Hq12P/QDshXDMH3p8P7uLGw6aDjIFQAX/T7nIy/g6mKjPq9kC7OOWRli7ox0BRw+GZ2YECYUQdX9mKL1aMddPt25qtdzPB+s8+d1AaydF8MPfKDODGGuprhGmkiUSBCpJq6EYSMDctZuetdCFlEMGKXYgUySEiRDqoYqlRDb4u6tze4mnO+W1fl+ST82739T0drGTeLnPPr2peeahfmfRRx1fXc/aM/b4TOs2RRRbE6h9bQVyToA5+OT90MC+T/AhkwPvA1zLqq+zaq6JL4To7yt2E6T6b9k4OIXsZGi5HLwImKI5gQU0KnEWJrnFj2ss/UBD927ZsSHGUrJpXpprmsnJ+4s6EfhcQhmLaUZCoCY1YBk/xM/nVwG9j9736/3c3zVToIJ+vafPSqj/riTw7Ia44F+PO9fl9m7Lwct02xq4bavWzbFwWmZ+//u/5puv/0AI369kf4jL4LTBaYPTXAanDU6ra+b6H5z2GmVw2uC0wWkug9MGp9U1c/0PTnttMvhs8NngM5fBZ4PP6pq5/n/YfPbZFnn1t3UgUQdTDeE5J2kL01Xn0dzaI8f98x4fH5nnyZVIWYhi+JozoZSz9uOYpqk5tYi0Q96Ml8ZlbbG3tLF8ImPgQCbleokThkdggwil0A7gykGK2feZBLhj14PurBBF1U3lhPosK0DpsFcI0vZSNcyNb9u2drjaNPtybttGViemGCI5uLNZPV2vgr4aSN/XtnzH0/Xu7aEH+cP35XeME4ISLBNLpHSKU3Hk0DYBtZRUpJTnyU6CbgMZ0+zAghBiRGJsBJFNCTJBBQqRBhYhhPb8wO50u9afl2s7uAY756PeZnzieqWb50C/35BIAWQA033zIeIH2bk+DQhuT+qf1O8wQwJYVtdXCGzbiohfs21bi2yHYndqvunQrGh2OzydTm3MlRAqOVTfqkBUSyjNctEDBdgdBAUHW1V1P7RMTgkRdXJJKypKyAkxYTmfUImoGeu2McmMBCFIQIqtCjBPEymrZ5uEQE4JBZbTfIz6YwTTslkwpriXrQpyAOi+tPQ5nKgYUdetzwqoPlgP9gsSD9dN09T8p2IT5d5s2oh1L3fVQ9aRqjLFyTGPQLJHv5/IuiUCQgzCMs/86Y+/5+OHr5ni+D8u30UGpw1Oe84eBqcNThucNjjtNcrgtMFpz9nD4LTBaYPTBqe9Nhl8NvjsOXsYfDb4bPDZ6+Kz79wir06kH0yVEAIeG94VWaOjZhUQrH1uqlhKLMtCzonHx0eWZe6Ioygx54MhVkVWp12WpY2vAdMzfnx0VGnjzznv0e1nrq/XqBkxeHmgYbtTh8Ake7Q2htgctb+uglrwAVTWYx9VQQuTHTyvcCIEAYmH8d/f33N7c0JM+fDNN7z7ybsSxSzGM007mBSwdGOVEnb1aHyLXBan6EsVK6D1uuv1lNV/FMHa90rwT5iEss5aHC0A3mcylHWTkmkgNUqOEQSSJj7evS9EMROIxDBjSDmszyPiYoCWvp0dqF9r+b+VXJMHuB9cLhfWdSXnzBdfvmuguoO5/0e1HE7YNh8FYNV7cQqGiD/v3bt3LMvCb37z1/z85z/ndDqVst2IuAFyd3fnkfZpcrDpyif7MfeSUuK8nBAJaCElU6VpufS2xSLRypbOhG8/3nEqfV11u+P2zUQIRrYHcv6IciIzgyxQ+ySL4C1GDdPEuhb6NyOVDQ4GmhNZM/M0cz6feXx8JKVCLtPk/YpDIITI+w8fmKaJ0+nUMKUnsTrn63J4n5vPcVmWhkE94Fe/qt/FGA99bVX3zB5wvDydTtzf3zf/qj5X8UM1YVmx4L1pJQhEYZKApcTl8Z4/fPPb0gN1Jcb0X2qefydlcNrgtF53vZ4Gp31eBqcNThuc9sOSwWmD03rd9XoanPZ5GZw2OG1w2g9HBp8NPut11+tp8NnnZfDZ4LP/1nz2YoCpL3d7LrJ8MIzgJWe7URvugQ3XaE7XnlUj1so8zyV6KWgpIeul/rtfiLoYNWLaXnYF9s9FCmFfoP4d/TzbdxWYzbMgMGtzODqg918kOLBbcUrbX4iUaG79zPAsgQgHY6B83uutJzQ1L4mrZajrujJF10HtxSolIutGpIg5kHpfSSl9WaV7374m1QGqMV9H1OsYUHMQrz0gAdPyDO9q6jYU9/JSEQgxlEwG2cEm+DWKsm0XtvWCpkQMgXmemQvQWCUog1KtiKnbkPOotej5/98wb509AIff1tk9BmvJAqngXvt4LsviejQHPi+79cwDB/n6MvVepFoOzTuf2rzrOxtolV6iiGfgzKeFWA7Y670jl/6oT3wASDl5Bk0orl03cGWcYFjauFwu5KyoZu8Teoosy8zN+cQyB9QSDw8rlgNxMeJSMk3MwbxuPoq3+YGWNXvDjClGB3r1LKMEXC6wrpdWfqqa3FZMyTm1TVjOmXVdm11fl/T2pafXa+l+tZdF9+tZ/b8+V9UPA204JeIHSl6Vy/Y6rn1UW9l1sYFs5q1lCYQgJJT377/hz//8n3P38VtSekTC+l9ss3+XZHDa4LTBad9PBqcNTqvXDE774cngtMFpg9O+nwxOG5xWrxmc9sOSwWeDzwaffT8ZfDb4rF7zQ+KzFwNMdTDV8X3SFAXa4btQjKheY8UGfCJHAGt/tWfQQMv7/e3KVvXFrpNWMwLHg//A77cC8s/B+nFhPi39vFoPSZEG7DtAl/HQAX3pQQniGQpBeDJzofj5ToIFXutXhzt6Q6prUn/mec++SCm13pA5Z0Kci5HRdOxEHApYlrEg7XvqGMww9chwb+iHRa26Lz1Z65qomUeB63cN6MJxbiJtYxBC8MyGUMpL1UhpI6UVUyXKTIxTOWjPy3jbMkrN7iil02W+R4b9/0heeH7vxPvmZ7/PzMgpkfOeGdN8KQQ04zYntJ61nhVSLKd7fl3/5XRCBLaUCukJIMQY8J6jNJ17pHtm3bbmL9cA1I9ZRMiqEAp51AwVfKPmNmNYgrUQlkfyY4usn04npihsSdF8IesMcSZwYgqgWqZVbKe8uH7omSYoIZRSXsuAoZpZV226hNqTdAfoWMq4Vethhn3m0g7S1xu/w/xreWuHJXWjWu+tz6x6bOtQnhvrZqaAeS81O6H2w8UoGTONVf0dAvcPd/zlv/nXPF7uMcuIZYZ8XganDU4bnPaCDE4bnDY47VXJ4LTBaYPTXpDBaYPTBqe9Ghl8Nvhs8NkLMvhs8Nkr4bPPBpjqjxiUI9KIEfyAtR04DTfKfegKO0QfHyx71N0XJBIDpSQxcimfZTW2bePcjcfMiNNEnOYSxdcy+aflgL1U5aeUfER6LB/rr6vR1HpoVu15asUSpTpqWQQJoYxp4rJlyBnBD5178vxOG7VILhA6XZVvKolaKl95j0gf34aZcXNzQ84XUsqtRE7N0G3jZr5BJGLkNq+deMt7CmlXvaoqERCt116Buh77LVrJUAAHoRACAuQC0Fkz2fayZjNDpUJ10SeUDAIfkGBklPXxgW1dywGCgRBnpuXkOu9UJSFg6uXEEot+yqFsO0j8bYpQio797/b4K13BE0fexQAl5ZWcE2pub35woiLl+TVjIpsySd8LOCO26zGnjfuHe37x93/Bhw8fuL974Gd/9vcAz7ZYZi9XXbfE5bJ6iWT0jIztkjhNp7ap6TN0gFYa3jY9KJkMwVcL3Cwe7h9I20bODpzzsnB7eybME55EYszLiW1bydmY54koguqFy/03xJuJOL0jxMUJpfi0iLDljFQXoRzmKIKqZ5jkvNv/cVNagH7dkLAQCvBfLhfADxqsGUj12lqaXjeViLhv4/2aa9ZSSolUD+ws61UJrWYQhBAOtp80c3N7AyKs68pcMjl6Ym14GKPzXVIwL/t1l1RO5xnVlV/+8l/x5ZeZZZlAR+uF7yKD0wan9XoZnFaUNjhtcNrgtFcpg9MGp/V6GZxWlDY4bXDa4LRXJ4PPBp/1ehl8VpQ2+Gzw2Svjs5fPYJLJD21LiThNBJQQhCjF2BRAkKlkEbA7cu1rKAX8PUCc2VLCVMsi+uFTW8rcnhfMhKQeiV7iDBIgBkwVFcgYcbnB4kwWP5xN8FK9vK5+OFWcECmFnwWQhS7ih0dw0Uw0CEQsGSqFMIgFeIy8+cFYoXwOglomlYiioWRTmCM5uBMYeFRc3MmkzNNLcMu/kTKWor/mQoVIKhiKO1hKW3P6LV2AelCboiXq+O7dO6bofUJzsL3/pcnRoMyKbsxxO3Tlrykxy0SUyBQim2ZEfXShRkBzJm+pRZNFIhkjJSVoJkyBGANbVi6XB05vbpjnCQmRSeZmrFkTMs8+ngr4VuafjW1bCQF++vOf8dXXH0mmbKoQIkE84joFIZuBBGSa0ZLNEgill6YAkU/B7XPSb146Ryh8US05lM9k12sla6M5fgPCnDGslFy6/sW8j+g8BUJYuFwuLPNCOE8o0TMi/Inue6V3LeZEIBiixpYTZrCcToCw5sxj2ngXplL6HVETsp9/h4jw7t0XLMtCCIEtlL7DZoQYMbw0VEJoB8wh0pF6At3YNi+HVjU+frgnzov3Vl3mUnY+Mc0TJj53J+dI1oCaloPwDGMDEiF/C3LCwowA0xTKfZkQaZsyiZGUS8qB+qYrWKFe9ZJOkUDWjCYvV61gXqP3NVrf7Lr6n6rr2nzTauobuVQj+0FQU7aciFMs9qosxb8wY0srZdtbbMR/izhBVeyAvfQVaH2dq+ScsWzkLRNDQLOPQ2Lir//jr/kPv/kVb94tqH4kbwlN4/+4fCcZnDY4bXAag9MGpw1O+5HI4LTBaYPTGJw2OG1w2o9ABp8NPht8xuCzwWevnc9eDDBZMVQpEdkYAx709QPACAL29IC9eqeVv+tnmBWDMaxECpESIauljOqKoQFkaCVy4BFKSj9Mv03ApJSwVueS9k530Nrb0+9S1T0roFzf+7aU9zsy+ydl3dqzQilDbcC+37zfU57bnJTuJeXPGrQ1M6xkX9RS4PqU+r1apmZO1J91XcmlNNENymGgH09vzGaVAPc1KV+UMdVyVieSwxLWW7LuwBdqKeYOkjXivW0bxtlBWSpQWdObv8P2SWJgvjamCtEzNJqe6zMklDUKmHqmRQWpOuk6vk8G86+kj+b2v4/ZAP5+6XSyE4C1+VWAl3ZB0XMh8KaBmoUhXgY5TXPpZ3scdAh7GThaDkYs78opYXhJ5JZS6QUKIcRuHR2QtYDOsiylfBViEAzFTHw4phiyZyoIpfzYe5uiCWwlbcn1XzKKQghM80ycTkzT3JWz5mZvWjaGXpI5lQ2PP9vyCmGFUPvd1o1iKOWaVfdh7z9sdjCdikB1Q6PZN0Rx8c0IRisFrhlDfXmumbWdqYh4T+OyflrWysqa1kyFUHqsuv9au68XEZq/Ut4RCpG29b2yOT/M0P89TxPZMsmUnFZ+/etf8etf/4oQQHP14e+znfm7K4PTBqcdlrDeMjhtcNrgtMFpr1AGpw1OOyxhvWVw2uC0wWmD016ZDD4bfHZYwnrL4LPBZ4PPXhWfvRhgSpqZ5oU4TaiVbAKBlEpJmnlWQS+ugP73/nnOe6mj9w7cFRCKQ2lRoP9Qon6JQCSUfoopZbKW6+gW5krq4tVejY1szBrYH0AQMN2BtCo9BHcKMWvzUS2Hn5X7q0MdgfKpXEepe2BxYyqLXXtBVj1qJSva+6Zp4uPHO9bLI1OJjobST7b2VazZDM5V1pxrJ6PDYDAcEPqvazQZtcYHWbV8HZAQD4+qjnS5PKL6xkGA2K1HtY1KtkUHpSx62zYqWG/bhkgpA4wTSevQ3UE111LZyH+tXIP81bcciONAmPv9/bob5jorgFmvqfPOZb4VwOvDqu1fv1/VrwtBvGesGeu2EafI+Xzi/uGBnPwwymmKzeZFKH1CFRGY58nHococI54skrGcIScMUBGmcpCjmJLzRt42VBM5rwUwZ0KI3N7eMp9OxPnEPJ+hAXguLmfN/93vA9M0Y1pKQ1NGbUPskWATQRayAjIR4oylXNHS4T4AumNHD9R9JoiZeRl5jOSMZxdoOcyw03Utxw0iTPOMxHDl13vUv65LzpkgXsbal8ZO0/RkHA1PxDNfRKSMKbe/KyYBrRRWTJjixPnmTNaVdVM+3l34l//iz/nLX/4LhA0xbeMY8nkZnDY4rc1tcBqD0wanDU573TI4bXBam9vgNAanDU4bnPZ6ZfDZ4LM2t8FnDD4bfPZa+ezFb//mD1/xk5+8492bW26W6KVSbnO7McpTpziAfAF9Uz8MCyrIC7V/ZgOlYvj175yUtHnvv3lemE9nj7B1CyASqKG7g5NVRXeeKN01DVhD80D/HaQZVY1EEhzk6mFfdcFjjIQoXC6X3YHZgehz0hvCcyLUKGNkSyvr5YKhrfRU1f8WvIdpfV7V52FecHA6Cklapys1Q/G+tx6txg0+lki2gITdSNUMUWVeTgXsBaU3+plvv/mW29tbzjc3gDbSfQ7os2ZySqzbowMByv39A29u33E63VTNHvSVc0ZCYHomGvtfK1bAvG5cPLPCM1y8TLXqVtp793e7PWTdmr2z50SAGTEEcjK2LTGJlwLHGP0tbqwIJVNDFZWybsW51nV1gAreE/Tx6685nU+8W05tgwI0vwLaIXO1bBOKeQOY95B1flAul420eVnqul5IaQNxn7i9uSXEmRCjl0hLAAmopuKT/txLAVFf81B6qtYdBAXoAiEYa/6WrA+wvCOEG3+OCVEibiXiNuQnETJNE5dysF/VsWdSHNgX1UQgEGJApoiZ90TGjIg5uZkVEAYxvzbESAwRVWO9PBbd7SQshXjq/rW3uZpdsNuRr10q4F59pP/pN50xRjQr67Yyb5HLds/l8pHHywfENoTE4/17QtiYIyzTVfrCkGdlcNrgtMFpg9MGpw1O+7HI4LTBaYPTBqcNThuc9mOQwWeDzwafDT4bfPb6+ezFANMfv/qK03nh7ZtbQNhSQjCmGF0hYY+4G9YI4Fr6yNoBaLUejKX736peRmdGVj9E66aWrykF068ireLOJ8VZDu+mA/uyMM+CgNQCw+e+2gGMq8WrZaV1PuZqaOD1KemNsYETUqLOjqpGnz3g7wixgLgI27YxzxNoJm1rec6+Jtrp/HpuUv5jyK6fck+oWCRCSdTw2RQClBgIZhAglLVyROwJ2LNDPt594HRavKdrI0Mfk2oF+V6nimZjChFVeLysvHnzM0KIJXvhuuy29KsNx8/rGL6PPLcmYK0n5s527H/7xbteywao2p0/x6rGqWBv7IezgRNq6PVtNUumAP3+gmIXmW3biKH02S1gF0P0sl3Tll2Rci7/9mv3kmhKdgle3hwEzUrathY5T6V3cQzCdFp8XWNgnk9lLL6xqDkwu86PGTsGzNPcKbtsvOq0MIQLaontAvPiIEtQpji355vkA9DCvrGpvVuviSynhMSlvW/btuZXtSeqUEpycy4V+EJOyUuBzZimWEBeOuKm+PyOa7kD8vqOHsxjyUq6vqe/DmCeFjQoicTHjx/IeuHxcsdf/vJf8c3X/5mcHpnmyBShoBNDPi+D0+pXg9MGpw1OG5w2OO21y+C0+tXgtMFpg9MGpw1Oe80y+Kx+Nfhs8Nngs8Fnr5fPXgwwffXNN/zZn/3MTUECKStBzI0pCEG8pMvMy1Z7NKkT6R1GVUsksRx8VQw05z3C7EDnz9ScyTn7giOl1O1pr0h3OKlY097ffmqksSOIAkfF0JvlPAXEAm51cbQtoGGEw3vcuKGifQPtfqj2dEF2sqQbC6C17+qOJdWoBVjXlWma0ZR4vN/qFHZwOJDrPg4z20HkaixtLmZYeKJpJ3eLEIwwecQ15dI/k0CYQsVtRISHhwdfwxhx7OiBby+9NQrIq5N+mCI5G+vFe4SKBDRnLyOsTzEtoPW0TPW7gPz1WvQ628m3XGd1lDWDpmZqAFZLe6XZLw3g+zmz21HxBTMvbYzT1Mojy6Pa9W0gBZRNzaPM68rNm1tEHORaVo4UMJeaSZBADAkQYtFeOfTPrsDINLGtFy6XSyu7DALTPHnWSnCfD2FqBOK6BBNrJec9yFfQnWIsG7sDNxYbU4IkVDe2LTOFMzZNBMnE6YQaKEaWstczOYxbysan/l3faWbkLRHDTCWflLZ2b7XNqm3NGQmCqX+XsmdGvHnzpvgNDbCz5tbbuW406vMc1/JOChXoS8/efnz12mma2nOmaXISUeObbz4QQuLx8SP/6l/+c7760+/J6cL5ZvJyYk0tS2vIyzI4bXDa4LTBaYPTBqf9WGRw2uC0wWmD0wanDU77Mcjgs8Fng88Gnw0+e/189mKAKU5nTCYygbicmKboB9CVCQC+kHhEec8S0O5vw3KmL0MFj6LOU2TbNh4fH7k9nYgxIBLJeWuOtiwLEAgSCNNUIma7E9doXQhetjbNME17iZ5HmusyerR/2zYkBJYYMZGDg4E4yIGXtdW+k7K/b11X3ry59ahsWahasrdtuTl8teZ+3u0tzwBRrzM6o1rXtUUo+3k/PDxwOs2oGZd1Le8QTI04R1CfM9nH7xFpKaT6FORDcJDOGGgmTvOB5EQKEclu2FqixFiJ6luJwJaS4nfv3nE6nYqxmzuRgZnPp/AS27qxXrwccjlNnOaFKSpqE5qFKH6wmRa1qmUeHu453ZyJYdqBWeRZ3f4Xi1rTUzMDM7BcyK371hx/VFOzCxEHfhHY0sY0TWSFh4cHbma4XC68f/+B/+Ef/SNiLAfQQdns7BumChKBwLaue2/OEAAjJS/R9Cj/TJS5AUgFn2maiDEe7ExVuVwe2LaNy+VCjJEYI7e3fkhivd6vrVRTgTQwhdpz1QnE2H1dCwjGGKmH0u1iDewrCaAZLKHAtn4AYJkXchYkTIQQWdOOPdu6sSwLKSXPqugA9rCEqujlsfm5aS5ZF4F58rVRBRUIYoXsEo+PjyzLggT48O03nM/nNocQY1mHTAizk2BHMHUcdXyWczkodCeoqfRRhacHYl4uK5fLyt3dPTEIIpm8fuQ//PtfkrePLItheuFxy4QgxPC3aPM/YhmcNjhtcNrgtMFpg9N+LDI4bXDa4LTBaYPTBqf9GGTw2eCzwWeDzwafvX4+ezHA9PX7D/z0/Qfevn3Dm/PCRPZD4FTJGSS41wVAi5PXgYqVPqJWDhUryu/7nm5pB3NVHzB4eVw1jtr30wFKgNAAqxmLFsIpkbcqbqB+XXlIW4Qg0g6Jswpg9XlSjda/M9wI0rqhuYvYlr6Iff/FBjTioHsNOp8CoT6qrarlYD/vFbptq8+rRCxdf+5Q8zTBsnBalvLuZksNgKHylEdSQ5i6cezld0xCKORgPth2XYvYqjn5KeU+KTrby4RTTqQC9lOJ0uaUcHPz8tSaeVGf+/Bw74edlYMdfRkDMXpENRbbcbDZ9eXEc9TlgTD/q8UKiVmZo79Otd9wVO3i5b0tc0Cr1huoqWbUskf6y321J3B7WskAqEBc/uHkH4XL44W0rdzc3DDPM1vauH+45+b2ljhN5dneezjnjGrifL4hhIBZ5nLZCgEk7u/vCdFhIE4Lt29u9+wJaHPe1bwr+wDbLaPANxI5O/hOYSqZR54J4vPg6Y8aYl4yG0QxeyRt8CDGtHyJcAIWzyIoNnDtSxVX4Cnhx5qBI4FQ1CkGmryvqpujklSxECDs5c+HrCSzVhJbicx90Ymsbsjqd4+Pj208LbuijOm5ElUzK6S7kTbP1DBJ/PEPv+O3//HfYrYBiSAJadhVCXjI52Rw2uC0wWmD0wanDU77scjgtMFpg9MGpw1OG5z2Y5DBZ4PPBp8NPht89vr57MUA08e7O7765htuzyd+/pN3GJnQDnZT0ICIQoy7M/hoKXqjgku/AFCj/H75PM9NiVUhABLc8UWk+FuJRFfHKkbSO3QPAv4+B4P67PruME1I9L6Egv/HOt89/F3mkHPey17b4uylnW3Rupd/KrLdE8P+b2vvooC869XnUUHebCc0Ee9N2fRE9zxrFNd93jmImTuwCATXk7T1K/rq57M/xjMwykchBF+TOhfdbSF0pcn7g+ygW8/uWDktpWfqqmAleh6qifrcXOXWSPtz+7XnwP5TZPvMNNt4nXSpC0Fbd+rSewZAO7yRoz9oQTRfu1qaXQ+dDIe1q8+09ndFQ9djyomcldObUykVV7Zt48t59oMRqeCkqDqY1P7CKSUul8cW6U8ps0wLIUYCMC+n5q++ZtLGIaH4Yd1M7YZO3RhJ0VUtOw7TPrdGWkgxgX0TZmaggqggmlHZMDW2zX3VVS4Ic8sm2XXzdF2r3zU7lNDKaIMEP2jTtLhZT8TaZlI3cm2TVMA7V8IqczHZ8a2Wp9Yx1AyHekBl3UD1Usdadb5tG9u6lhJ+w1D++Mc/8Otf/xWaHegF709boedvZU/zd0AGp9VnDk67eszgtMFpg9PKXAanvR4ZnFafOTjt6jGD0wanDU4rcxmc9jpk8Fl95uCzq8cMPht8NviszOU18NmLAab3H+/4q3//a779+iv+x//uF1hamaPw7u1NuaIHK2l/t0WA1gvRzKiHVQEeVRQIIp4tkFMD+wpaghDnAIQC6nu87LjAAYidw3QofSUhhNZ3sEa5+7nsz/wUOEvrWVgjpn10sBJNELxv5DNj6IlvHzPlcDxtYCB4BPR0WgiyRzbX1Us653kmpUTaUiOyftjmL2vvMffSnSgaaEGhw/rnTlLBP63ZDVLWobJPM3fxkl4zIxbimeeJqRCGO492evbfOSe0HGgWghDiXgYZJBIjxDiBCQV3vB+tdYT7ibV+DuS/r0gZqmWP/ocQ/X0dCZuZg08FMOGwFlpJopBaVo9ep+QgPM9LVXl5npNAb5LVrh8fH8FoZafZrGQ1wLwsDjplfV2nofX6XNeVu7u7VoI5zTNf/vRnhK6EMu+7Ci8x7wAttGXr9V0IOPj4cs6tjHZZvITzWD5aCDzW/sa5+INBDkgG8obECyYbqiuXLRJR4gQiC2bBs4bavU99qc69/jsEir/6OFKxudPpVCblegsBsimWjdP53G2sIG2XsqBaxg0QiHNs86iZBBXcP378yM2NZ3Hc399zurkhdhvFOsZajl71l1L2cWCIKb/5za/587/4P8HuEBIhGDG4T2TcH4Z8XganHWVw2uC0wWlNK52GBqcNTnsdMjjtKIPTBqcNTmta6TQ0OG1w2g9fBp8dZfDZ4LPBZ00rnYYGn/3Q+ezFAJPEyfthhokPdw/8/CfvOM0R1dQiyQEIZiA7OFajFIFcQL4uRAVz71EqHaBUwDGWZWHbNp9kcIN3kKqZBuVS9W6UQUCmqYBjOfyv2GKN7Pef7aB47E9aF9yNEWjlhAKmeOntvkg5GduaES+w83mXUl5DkKdnwDXDrBFH1b1Pajn/zEFYjWyZbU1s6+Z9G+eJOEXuP96R0sZPf/IlOW0YEOcFkYAXDge0j9Kq957VOrYCUK7+3ZErMcOedVFh1Pt5ClJuzGURjFo07P1VwZin2Uua8xnSRggT8syBeAA5ZS6Pj8xTbP10VZV12xDJZDVuJi+XzJYJMaLZ/lZA/FOya04xcvmt+IbDbaICfF3PNhwTLO6R5F3q7ke956cqBAeE87mUUE4zEEiX3K0LxCB+gJ7C3d0d59OJeZrJWAFVYzmdUKwQpPcJrcAB8PHjRwdtgXdfftH1J43kOh8oB1v63FLedp2Ia+aZ/Q9QovN4SbpvQhZuzzfk7GOapoCY8PjwAAin5VwO7Yuo+PzXvFFLsOMkSDAsGCk9YrJgckZibvsFkUjNlkh5BVHMcvlduibLnn1TI/UVhPs1UlVySsznE1JAv5JBva4+p/47xkiIE2p7GWotY62Afz6f2zpM08S2be2aGH3+IXjGgZcVOwjESRD1kvR1fcRQL0tei3WKedaIKuieCTHkZRmcNjhtcNrgNJ+La2Zw2uC01yyD0wanDU4bnOZzcc0MThuc9lpl8Nngs8Fng898Lq6ZwWevk89eDDCtKRFFeLysfP31e376xTskTJALAIYScYze97MGIV0Zhoj3CfTD96AeuCXANIlHA1VRM6LsWQZUpfpqouzfeZQVKAp5ancdWHffVnDb4cmdN4QA4nFuxMmk0ArUdyGYZqJInSCWDTEhENDswIx5L8d6OJ+KEso4rL25AinU1fFD6roZaCaU4eSUaMW5ZqCKZu/JGkNAQ0BCRMKESSxgL6iVHrTqa6UlcyBIaFkEDtxlHR0DWuRYxDVYPxekjFEKuIcC7oZUQixMJWalr+nskewwFaIISIuOupZNHRzmafLelLlkCETPHtnyhh/SVzYVwdhSJpcD0yhr2gCobiSqvp6T59DK6vCtmZBZWdfah1bK7FvJpX9uJTPAgTBQy2z9XeU/5veqZgTzgyYf7wkhMs2LE2fxn358hX49M6CUSIYQiVPEBB7XFQRO5xvUIBS9btvKtm2oOdBKCH5g4RSZFi9xdV8MzfbqhqjqsI2i/JtyAOA+tva1gw2GZWUKkViu9Zaxe/ls2zi5oTtmWPXZsmEMQjBD1OcSLCM5gawE2bwvrkSyeW9XUEJQRDYI2e1RxX3NwBuGSnu+mZdN+ycVqygHQWrLVPFNpeNGaBtRKZ/7Wgf2zSumXqlsZdMjnvFB6Q9bS12t+J7lhJRy2EpCtVQ2iI9f84V//1f/jj/+8T+zpQSlPFXNSAU7fU3G/3P5LjI4bXDa4LTBaYPTBqf9WGRw2uC0wWmD0wanDU77Mcjgs8Fng88Gnw0+e/189mKA6f7xghag+e3vfs8//Ae/KGVXsysqCmGOMHn02hVqrki8/Czl1ePsIbaoWRBjmSKPjyspK5ohnM7uJAiqgsSI4HHcUtVKFLzMrzieO5MbjTZll8wGswL05eArX3sCCgEUJVsiyNRAuJKT+68CugNHTsxxAYWUMrq5IcxhRlfFkiETbuABNHiJYhYpYF89AkSstMwsxq07Lan5hCVGxECyMc+T93JEXBmmBVy1HUYnMZFLeacgZDW2lDA1ljCj6qV7c5x5XJOvg9NJIQfYMyfcOULF7+bVheQUJ2BzQhHK4YUGQawcqubZC6fTmbpM0esFUZXO+A3NynyeSxQ5sywLy3lm2zLr3YqJR3+9bNbIlkg5c57PjZh3Cqc5njwH6OAZJ/3Goq6N1bU3RCvgl2dZreo1TFPbsJj4eKwAaig68nHVjAJ/hZNEJmCcTgt/+OoPLOcbbm5vvdRRPPpstcS5bmbMS7tzSsQQPQIdI2bG/eOF5XTiizdfsOWE4v738HDvWQQizKcz59sbYpyQOJUsk2rxELKWcXrJZI2WTxXoKAdKhkpm1xsv28EuKzfnMybCtmWMLvPGMhKn9vZawo4ZRkaDwmRMEpDsGQMSjFnA0gb6gIQzSwwoAbOJbCtBFKISoqGyuW+EGcsJNUGI3mNVhBilRfIR3wB5JpP7rmomlWyGaZpaGayqOElLKf01MC2bOtOCNzAVG287EFWmIEBk2zYoB12aQMobZhkjt3eZGTkr8zwhlsiXj/yzf/q/8fvf/5qH9YHTOWAa/J1rAnN0uc6JGvK8DE4bnDY4bXDa4LTBaT8WGZw2OG1w2uC0wWmD034MMvhs8Nngs8Fng89eP5+9+P3Pf/4Lbm7fkLPxH3/7Ox4fN3+wxLZ4UoDMSvmlHyhVInNS+1ru/UPdfmu5poPxNO1xrmvnlPJZ/bQ3Kn82bRwV7Hfg5vjcokSzWuL6dPpmtFLSZVkw85K1GB1A+5K1ZtyfEMMju+3BT95V9NDKeZUQ4HQ6oZrJXalglcvlQoyxlfN6yWFoZXWh6Fpw58ylL6VnTAi56kaE/WA4msYsuBFq+TH23+3v8h1C5/A9XRrbtvHw8LC/q1znPSMduB8fL6zbioi0npQpZ++NW9ZsWZZmL62nZgfi/RrU7w9R2W6d9ojv1Vo0stj/XQ+MqwfL7YcZ7mv31AY+bQvX94VQD3zzuYQYCRIaKdT+pjFGpsl7bdYepvUAQ4DzzU3RXTm0Uo3L48rlckEVpmnh7du3THH2jVYBquqLufQ73f1Jnox3B7tuvF0/TzPv0bquKzc3N+0Z/n04PLMeGFmf1/8Inm0Uw+xzDN6XNEQwNlK+5+Hha1L+APJAnB85nWFePFvFLIIuwAJ4n1gDwhwb/tSxX8+tYkzW/KxPuwmHNl+Hk11n13prh+a1EtTq41b8xm1qmhZiOLGtVn6UnGC9ZC6XRx4u3/Lx7muWU+Dnf/ZTfPMpYMF/qD/Pl4IPOcrgtMFpg9MGpw1OG5z2Y5HBaYPTBqcNThucNjjtxyCDzwafDT4bfDb47PXz2ctnMBleZjgLSOC3v/tPmCn//T/8BYEdFMFAPZLsZYpGpjiKGXQGUSWrloiu9yU8OpFH1qtChB0oyhXtWf0zpX6nBnKMJIt4FNq6hRQJJSrePaMSRZubXxdDJG0ZMSFOU5mXT90qEEqHIWYeCQ7+oXXPx8p4iuOYGuZnj7U5aBlniO4YoRDVtm1I6aFZiS7GyDzPB/KlkGzTjysSU/PyuALypX1p02m990qpWHlEDZBWne7f7qSMwbYl7u7u+OLNW6ayvlvO7bGVQDEasB+/88PMTqdTWTtAy6FiIvQHN1an6+9/iYDLDcePDvd2Xxf7q31r+3n3xOLgcXT2Jy9QX2sHtNA2Dg0wi3J9I7Lf5u9w3S3LQgyeRXC5XJjnudlCCBEtfTFvbm6Zppmpfl/nXefY+mcKUrMwCgHVv3vS7MHsWtd1/rXHqPcxFkL0smmQg221zdv1pq5uGgOYKMECoEiw0lt3I6U79NEI8wNxeUOUydcoCMEWVLKXP2OlHBdKXXQb7/U8emIKEhoe9LhUS613d+pBX548p0rtuWrmh4gmVTTvm1/KWovEtoFUVVJa+frrP/Gb3/ySDx++YtvuEEktuwQEsWpzZfc15LMyOG1w2uC0wWmD0wan/VhkcNrgtMFpg9MGpw1O+zHI4LPBZ4PPBp8NPnv9fPZigMlUmeLkkT2U//Q3fyCEwD/4+3+PGMukilNKVzIlwYEqJY+E14V1xRYlqx9oFStoFSQpmFNsUqpWj+Oy/u+nDl0jdn5gWW+g7JkEsoMd/fOA2szUry1AHyPro5e4zdPSnN5Ud5Dvhlz/bIZQx2n7d2KuB1NFgxGsGs8eBZ8K0HsfTWVdV843p2ZAmIPGsixH0LAuyi3S7MCsOsAO6EYBt07XO4RXHXZqquRQnPY5TM058fDw0NZcZD+AzL938AoiHgEvAFttJeWESGBZTuUe250MX7u+P2Wzq6vBPAv4dtwEUN7dT2RfssaE5fP9PddAfw1cu8hBn0YHdD3gmZdQm0m7w/B+uFZKKud5xqQcUni5cPv27Q70XTS5An0ofYolAOZ9hSvIC3IYWZ1fJbU6x0oAPs5+Q7brwDMepvZ53UBI6d3ZdCfSCK5/DxU9REAUJCJiZa3NS1cFTBPruiG6cIoZkRtgRmRCmADvj2qWfO5S7Xv/6QG6/wwKYZQ51jmbeYaDExaHjIvn7KvOrf5d3zFNk5cT++6SGCfMSl9VQvExa2v+zTd/5Fe/+tc8PLwnbQ+IZJalt/kC9Nav45CXZHDa4LTBaYPTBqcNTvuxyOC0wWmD0wanDU4bnPZjkMFng88Gnw0+G3z2+vnsxRZ5qsrbd+/42c9+Tsrw4eMj33645+P9SkpgFpBiXJr90DQKIG1b4vHx0ZUVQlOQFUTJZmQDQyBEjFB+vMunH6HlExLhk9PoSxJbFJTnIsnuuRWgy5M7pdWf0KKi3hcSpuh9XI3d2eoC5i46/pwcxt0NqSYvVCNoGRlYK+f0bAgK8GdSStzf32Oqrdwwl/mcz2fPNuDK+Bw52zsU8whpDFin2AOISyEuqSPan9dnTSA8D27dODwau0fd59nLEL/99lvmeeF0PjWgr9kTb968KetqXppZovRabCcUkK8/NaorHWh+GnSfl55cqyJExHu8Xum1L7VMKT3JMHhWHR3R1mfUDBHp1yfrYf3MjMu6gkgpYXY/y9ntoWZptDLx4gfn8xkRoSSreLvONgxpNr4sC1Zsrt7fxtL5VtVtLPeBk/W6rjw8PDBNE/M8s66r+wfC5fLY9LgDaTjq02rmhvdO1qQkzY0M4zQRguCIsDLPiTk8EvQ9l4ffc3n4E+vlPZovTiylhXFOVuYYQI2suZFyv1ZmntGSUnJf6kmpX79uPVTV+xwXnW3b1p7R6y+lxOVyaf58//ARZcPwe7c1cbmsPD6ubJtyuWxcLpuTtijffPU3/Iu/+CfMi/HFF7d88cUbf4fih2RWR+02OkNelsFpg9MGpw1OG5zW28ngtNcsg9MGpw1OG5w2OK23k8Fpr1UGnw0+G3w2+GzwWW8nr5PPXqxgUlW+/fY9j/MDX/zkZ1we7rm7v/Cnr75h+sVPEPFDp6Z4wLAGWuGwqArUyGFGs5VIZ8TsGHmLEba04j0zvdefQQO4a8eryu3/jqVsD2qUcF+AIyjEdq8IxCANVKszeAmglh6Uwpo2N8QghCk6AIkbcQXFPrJnee8haWZ+eBp4xFJkR32/mi1tZM0IYMEPKkw5k/LG23e3TPNU9BQPIPFwWTmdbzif+rI3c1DHo7uCQJAC1oa0+fv3MewxxwNw1b9tX+PCIYfrVJVtXRtIl5tBHNxyzqTNM0zmeWaeHOwuj4+IePmeg6cRggON3+s9QR8eH5Dg5LFtW7GxWAixrqMcbOKaBHjGfuq8QgiFyUoUnQpGdnxGs9VYelpOiARyViQECOHgE8Leq3dLG+/fr5xOJ06nk69VYQORcghiGQMFiE7z3Ahx2xI5+wFtFdjneebxcmnjNCvrYqCZffxByvR3gKiRfets1DdrG9M0NTvzHrvSAbOvz9u3b1tWgMh+mN35fD6QVgjBy9lxk08pFZ0oSDmSUoQoExL9QsVIacUEpikgJlzyRkobj48bpwVEVuDC+SYSwsIyB0L0sYqBH0ipbWNy2OxdrakJTr7Ff2jL4NH/wx7q2q6gkWWd726HpYdyKaEXQiGwCITS19jXJuWVv/7dr/nTV79nOQU0r6zZfTSELivAADWko/AhL8vgtMFp3T/q8NoaD04bnDY4bXDaa5LBaYPTun/U4bU1Hpw2OG1w2uC01yKDzwafdf+ow2trPPhs8Nngs9fBZy8GmObJD+/aUibGGTXh/nHlD3/8ip/99C1zFByHa//Eki3QIvtFCc1RumyCuvASyjCPDtQ7ai9m9uyUzHaHVFVi4Op+a4e29YsjSHNI9zXvv+jjDe0H28vT9mcE7wmLFcTb51FV3+ZhfhBedRLK86ljEWtjzJp9LMXJp2nCVElp80h8KbF1g6mY4H1Sp3lpum36DP0cCwBdKbESV52DlPm0GUmXdyHXKyBtvABb2jDT/RDFQoQx+qFxKecSSS86DA4O7gx1XY7rXX98bePVOjy1lWpD/b+fG3tdH2kbgl0jjQDs+J3ZHnlX1ZYVUIG6cWLRoQ+jbgBAzVjXI4gixV2t/Jb9PUH2zImakVGzMuhsuY4lhkjfG3P3JStrsffQrITQ//QgLnLchIkIKC2DopJMfc+u70CIkdQl2rR3lDWt4zKjrLuU+UyYeCaNoWglB5GWbxRMIa2oPGBkVDdOp3dIgCATUyy9j61YsNTpH0tTez9pdtdtBOqYa4/ZZkMmzUj7jWe/mbz+rvUfRpq91NeZZZBY+r5mfve73/DHP/0NMQqmhRCxVkbbNgKdvQ75vAxOG5w2OI3BaYPTBqf9SGRw2uC0wWkMThucNjjtRyCDzwafDT5j8Nngs1fPZy8GmH7y5Zdsm0f97+4f2LKxfrznL//dr/hH/8Pf57R470FVBXOj2baEkIsS90i398HcF1sRN7hi4DVSJwjZyplSndK/Ky9XJ5impX3mBilYcRKP+hYgkH6xzXu01l6REhECmBTAB5DOsesQ24o35deIqZn5AXviPRZzKYn0g/j8lhgDiJdq5pzRnFmWiZwyD3d3vHlzg2rm8fGRd+/eHAGpALCIkMq9Wki1gs7B6DqnBdmzIApxSfdcrv8WobJEmKIDoGl3na/TellJKTGXSLeV+c/zzMePH0nb5iWSnaM7oUVCKcENMWDmUd85zG4jV1kgvRP1BvKUiHapY6nrtG8QOmIu/WkbIT/znMvlwuVyaUTs73Xntcry5ZcfLJkJ4JkzqmhOTnQh7GuINNIOMZJS5v7+nre3N8QSlQ4hsG4rOWd++tOfEoqOa3R8mibmafZUFbXDd01HZXyVgNyW981X9ddGHIWg64xCEB4eHogxcjqdujVw3c+zr1dWa3a2b6wK8JZxWR/VL8SvbtAYnskhYQbNWFYUZRJBYkRDRNMjKT2ypsAX777AdMWYCfGMEkG9XLdm+WjJRuo3m3X81tlQ/X4vy01t/s1XrgC+3lP19sQ+jeM7qDao5JwIUyaIMc2Zf/3Lv+D3v/8rDC//F7FSntrv0a7WdchnZXDa4LTD34PTmgxOG5w2OO31yeC0wWmHvwenNRmcNjhtcNrrksFng88Ofw8+azL4bPDZa+KzFwNMddDe1zTx0z/7CUGMy917Pnx8YAqGponbJXppmATUFBGlxq69RMwj7hV0zIyclLi4cwSJIAEzPTi/KzM+ceRPSVW00Tt/pxb1/oTn89IMuhpivd+d3NU5xXLYVlkkrQ7SvSOpsncxpYFI/V5VPRJskNWB2AqLHQBE/AA1L5F1A4xLYApv2DYvXY3RI5oefa8G6gcCmhl3d3fM08wbM6QexlfHBFCdtsxP5PNqvY6g9w4iZoXg8+H6bdsQgdP5DFLIJCuEvZRvXpaSSbA/N4TAXEDTHSy0dXJd7lHg6khuT8cSwn4s15HjPspr3U0iBWib3RRbUgeb6xLpEAK3t7fEGLm9vS0bDC1jVPfBQqxW+mYimRgiMikpB48al3coWso2A0gkbYmt9Naclxkx837Dqgj7/LdtY11XHi8X4hRb+aqYNKI6GFn1TOv4OwTK2Zmtp6eq8u7du7Ypgrp2br8hBE6nE8uycH9//yz5Vt+va3z4Lh9tRkQwkSu/cj+awkzOeHl78FLmtGXYMuUsQWTKkN+jOqEsnOLPQG7QGFEJZAwTO2zQnthLodqnplQj/+FwbZWDT5Q1qWBf5103XVkz5ttZBGn2cjpHtnzHw8c73n/8I8o9ISQ0ezZF9ddKkk4TPfKM//PyXWVw2uC0Xr+D0wanDU4bnPaaZXDa4LRev4PTBqcNThuc9lpl8Nngs16/g88Gnw0+e3189mKAKYaIsIEIy/nMNC9EATnf8v79e5aonKZbbA4FU8vLrLmJG2aQsuDVsmT/qcMsgAW0sixfQeFZL6aAULHa3Wk90nl04mpge5Svvnc/TM3Jpb1ZPFoIOPj3YygGqdYdttdAxz2oGXonmjPqKOvvrZ+bu2AtD5zKAXWh6P39+2/RkgEBO9D5nKwBfc6ZrA4YUwhM0+QHiJX5N0cU6yvtdoKiQEF3fS890Lc5F70d5mlGFO+d2UgPw4qu6jiC1xMfSpv3skgvEfaoLw20ahbH0WF3Qv8UMfWgTwc69br6FI9uF8IvGw95xolyKbeNMbZ1O6BnU1GxBVPElCC08Qt5JzBRTD3aLUFIaUNzdoKh2qe19zpI7KWkQfwwvhj9UMhKitbGUFa3uYl0Y/TvQgis69psupagAi367r1sU+vh2vtTT6baNmtP7cN095seBMEc7K1iQk2IENRw35FIWn0MwfBslSBkjCAbmyZSTkzTLSHO1D65sGcN9H55tOcCpJ39O1DT2WG9cb+/zxzY75GDzRkg6naASQF7LbaRgYxgPDx85C/+r/+Djx+/9c+fEDXU/Zn0IC9PbXTIUxmcNjitl8FpuwxOG5xW7x+c9npkcNrgtF4Gp+0yOG1wWr1/cNrrkMFng896GXy2y+CzwWf1/tfAZy8GmBxwPOL55t1b4jQTBZa3b/j66685zcaXb09lIkbtv2jszqGqRAmlKnWPNEoMEEDFQb7qXsQNQYFQAOk5aY5qx8ifiBCkHpTlKtmB/bgozymRAroUoDdT+hW+jpRWZ3sKjh0oVgfIenBOEUGpJZE7MMU4tXGfTiceHu7971LeWQHRx7D3l63jqSWd8zwT8pFsKqn5CIUQ8MLiKzuRK2Ovcz5c0+utEzf00Eo4fV5KqhkUBehijE5+ZfHd1gKm7lx1nqmsgeGbjxBiAxl/9ieM5HuIGahpB/QFuM2eTNHMWNe1Rdkvl0tnA8+PxdTIlMMjJZTS09xIAC0ZE+I9c7eUMM1XOnSA9Gc40E/zjE0Ty7Lw5vbWiT7nK3CVI6lfjdHBzde6ZhEA7eBDt8ladu7ZOF988YWDfkqN7MCfkXMuMW5pa3m0I32yQarDqr6326OQzTxLIysWI9u6kdaNmyVwPi3IFNAARkJzZluFOL1l4QYJkx/OiJRSWCll83IYU107/3W0fTPKHF2DZvvvui7Vp683mFLWyT/3jBHXeMDo/NY2JBp3dx/4x//r/8x0hhg2gu0r9qxlSQX7Id9FBqcNThucNjhtcNrgtB+LDE4bnDY4bXDa4LTBaT8GGXw2+Gzw2eCzwWevn8/k2nmHDBkyZMiQIUOGDBkyZMiQIUOGDBkyZMiQIUOGDHlJPt0YcMiQIUOGDBkyZMiQIUOGDBkyZMiQIUOGDBkyZMiQZ2QEmIYMGTJkyJAhQ4YMGTJkyJAhQ4YMGTJkyJAhQ4Z8LxkBpiFDhgwZMmTIkCFDhgwZMmTIkCFDhgwZMmTIkCHfS0aAaciQIUOGDBkyZMiQIUOGDBkyZMiQIUOGDBkyZMj3khFgGjJkyJAhQ4YMGTJkyJAhQ4YMGTJkyJAhQ4YMGfK9ZASYhgwZMmTIkCFDhgwZMmTIkCFDhgwZMmTIkCFDhnwv+X8BO14QIfeLWaUAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Run example on ISIC dataset. First, second, third, and fourth columns are ground-truth, \n", + "# our LMV-Med, MedSam fined tune, and zero-shot segmentation from SAM. \n", + "demo('isiconlytrain')" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9d115dc9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cju5xjn5mm78b09871spyqhhr.jpg\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABpgAAAGPCAYAAABI09P1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOydd5wkR3n3v09Vd89suHw65YBQJAiRMxLBgMkm2AaDETbYGLDhdU4YYYzBGBtsDNiAQdiYYDAmGEy0hUUOkgBJKKCcTrp8m2amu+p5/3iqZ+f2Zvd27/bupLv+6TOa2+6a6urq6udXVU8SVaVBgwYNGjRo0KBBgwYNGjRo0KBBgwYNGjRo0KBBg8XCHewGNGjQoEGDBg0aNGjQoEGDBg0aNGjQoEGDBg0aNLh7oVEwNWjQoEGDBg0aNGjQoEGDBg0aNGjQoEGDBg0aNFgSGgVTgwYNGjRo0KBBgwYNGjRo0KBBgwYNGjRo0KBBgyWhUTA1aNCgQYMGDRo0aNCgQYMGDRo0aNCgQYMGDRo0WBIaBVODBg0aNGjQoEGDBg0aNGjQoEGDBg0aNGjQoEGDJaFRMDVo0KBBgwYNGjRo0KBBgwYNGjRo0KBBgwYNGjRYEhoFU4MGCSJyvoioiJx7sNuyEETkpNTOCw52Wxo0aNCgQYMGDRo0uDtARE4TkZ6I/P6Qcw8SkS+LyOY0z740Hb8g/X3SgW7vXR0icl7qm/MOwrV/W0RKETnjAF3v3HSv5885fqGI6IFoQ4MGDQ4tLMRJBxMislJE/l5EbhCRKsm+s+eTgw0MqW8uPAjXPVZEZkTkLw7gNW8QkRvmHDtoc4IGdw00CqYGDfaAg0EUB4ucGjRo0GC5sLdyTEQ+KCJ3isjYfmjWYYvl4BUReWCq56XL1KzFXHO3dt9dDEIaNLi7Q0S8iLxMRL4mIlvThv6dIvIjEXmfiDxjD7//pfSuqog8cYFy5w6Uu15EZJ5y4yKyc6DsSUu8pb8FtgD/MKfelcDngIcAHwVeD/zjEus+oGgUG7wb2AS89WA35HCBiKwQkTeKyJUi0hGRbSLyRRF5/DzlnyUiH0vlt6UN0GtE5CMi8qC9uP4DROTjInJH2pS/SUTeJSJHzlP+uSLyDhG5aEBufGip121w18Hhwkl3AbwF+E3gx8CbME7ceFBbtAAOd8WGqt6KzVl+W0SOP9jtOVwgIo8SkU8nZVsncdLnReTJ85T3SQZdJCIbRWRaRK4WkQ+IyL2XcN2VIvL2VM9t6dp3ish3ReQ1w/ZQ5si0YZ8370tf1GgUTA0aNGjQoAH9jeyhmzUi8o10/imLqOeqVPb+6e/zB8j7Xxb43TkD5W5YYttvGPjt4xYo94GBcucv5RoHAiLyYOBFwJtVdWrOubaI/K6IfEdEdqTNhdtF5Aci8g8ics4e6v6TgXs/fYFy5w2U+78Fyp0kInG+cTPw3M/f440vEiLyb6nOVyyi7JdS2Z8bcm7wHhf1AVDVHwCfAt4gIuPLdV8N5ocYXpbG/aSITInI90Xk5SIy7zxeRFoi8jsi8r20+TGVFjEfFJEjltiGRde1yPH0or3tjwYHDiLigf8C3gOcBXwe+BvgQ8DtwAuAPVld/xqgA//eEyrgJOBn5jn/i8CKVG5JEJFHAE8F3qGq03NOPwTYAPyVqr5KVc9X1VrB9EfAmcCtS73mYYD/xPrmPw/0hVV1Bng78NT0bA8Wfhnrg0MaIrIG+Dbwx9j794/AfwAPAL4iIr865GfPBB4M/BC4APh74EfAzwHflSUYq4jI09L1n5O+/y7V+3Lg+yJywpCf/SnwKuBsmvf3bo/DjJMONp4GXK2qT1fV1yZO3Ah8F5N3dzWF2F0FZ2KccDDw10ABvPYgXR8O4pzgQENEfgO4CHh8+n4b8DXgHOC/ReRPhvzsw5i8Ogn4JPAO4KfAi4GLF9rDmYO1mPwKmHHU3wIfx2TR2zB+XTnPb7+GKYznfr6yyGsviGw5KmnQoEGDBg0OcbwXeATwUmxBMxRJyXEa8H1VvWTO6Qp4roj8lqpuH/Lzl6Uy+8LNVWrj/wxp20rg55fhGvsTbwR2YpbJfSRlxtewjYyN2KbGRmAcuB82yVqdyuwGERGsXxQQrK9/dw9tqYBHi8jpqnrVkPMvTXXN15/1sZk9XGcpeC+2gH4p8K75ColZUD4BW3B/Nh0+E6gXsJdik8lBnIRNcG/ENoLmw5uA7wC/Bfzl4pu+rPgHzMvgpoN0/QOJD2HP/E7gI9gz/BnsHXkEQxayInIU8CXgvsA3sHETgBOAJ2GL0E2Lufhe1DV3XNVYAfw29r58eTHXbnDQ8Xzgydgm7jmqumPwpIiMAg+d78dJkf8YbNG6BniGiBypqncscM2vAI/FZPSXhpx/GSbXblro2vPglUAEhhl6HJO+b5t7QlVvT9dsMAdpTOzYY8H9hw9hnPQK4JsHowGqejjwEMD5wL2wTbFfUNUKQET+GPg+8A4R+aKq3jLwm99Q1c7cikTkvsD3gLeKyL+oam+hC4tIG3gfkAPPUdVPDpx7PrZp9w/AXO+V/wfcgm3gnQP87+Jvt8FdEIcTJx1sHAPsZmSXFGFXHvjm3D2gqgetb1T1NhH5MvACEfm9ue/HAWrDwZ4THBCISI7NPTrAAwf3CUTkL4FLgD8RkbeqajcdfzC2D3M58JBBpbKIvAR4P2YUsdsezhDcDKxS1XJI2z4E/BJmfPGWIb+9UFXPX8x97hVUtfk0H1QVbKPs1cAV2MtyKzZZWwXcANwwUPY8bKPuPIzoL8SEiQ6UWYW9eFel+rYBXwSeMOTa/frmaZuml2Hw2Pnp+LnAczGLimlgK7bxdOw8dT0Q+AIwgW1kfgV4+GB9c9o07HN+KnNS+vsCbFP5Y9gmUByoZ5e+m+8e9vKaJ6V73Zz6+PvA0w72WGo+zefu+KnftXnOjQDbgR5w5AJ1/Guq52UDx+r3/D/T9yuH/G4Npoj4ZCpzwxLbfsPANTrAuiFlfiOVqa9x/gHozwuXUP60JDvfM+Tcn6b6vggU8/TfIxao+0np9x/AFoKbhtWTytZyuH5efz2kjMc48rvY5sVu4wZTRPaA45a5X69K7XrAAmXekMq8cQn1nrvYZwb8BFNEuf05hvZmHB1KH8zKW4HrgPUDxwtMcajAs+f8xmGbAl3g6UPqFMAv8vrLWdev1/LnYPdr81ncB1NiK/Cavfz9W9PvX4B5ESjwB/OUreXPh7DN4h5wxJwyZ9VyDfh6+vdJi2zLyjSOL5pz/CTmn3efl8pcMPda7MNcHNsk/V9sTtFJ8vRPgdYi72WhNl84UG5e2blc98Q86zfS2gcYw5TQN6X+/ynwB4AMqWvR69A5v/vfVH7lMo37I4F/Bu7A5mWXYsYX9Rg9f075C5l/7vhETFbfme7/ZuDTDF8LPwmbN2xOZa9Nfbd6f73jS+yXm9L933vIudekc3+2hPouSb8Zul6fU/Zxqez35jl/KTZ/PHGBOurn96GD3ZfNZ+8+HAacNHD+wlRfhnkNXjMgQ/6K+dcwj8f2ubam8lcDb8Y2oxfTrvq6Q7llT3JwL9p7BsY5N6c+viP19+lLeK7ztbn/PBjCeUOe9T7f02BfDRw7n73bs3wwptRccM9yzm9enM79+jK9c4K9K5ezCF5mgT1d4DjMi/UajFu3pr547Txl/wFb/3SxMJKfAR68HPe1DP1yZLrPH85z/kfp/LqBY7+Qjv3dkPJr07kfL0Pbnpnqeu9ixvlyf5oQeQ0G8U4s1MAqzPX4I9jE+MuYxdAwPBdzVZ7AXOU/BiAiqzFLsj/EFE9vxyzOHw58SUR+fRnb/QqM/G9I93AZ9gJ/RURagwWTK/JFmGX3f2OCq4cRyFyrk0uZtcS9kV1dCC+cU/aemEX3ScC/Yf23cy/uZSnXPBETyidhm9ofA+4DfFpEHrsX127QoME8UAvF8iFMFp43rEySe88FJjH5ORdfwJQRw0KCvAhoY94B+4L3Aq1U31y8DJsUf2G+H4vIqIj8kYhcmsJgTYrIt5KF6LDyhYi8VkSuFZGuWJzyv5grexeJX8Emsh8bcq4Of/NuHWLpqqrbVHUh6+WXpe/3YjJ6PbZ5vxAuB74FvDhZKg3iqZh139DnlUKXPRz4qO5qzYuInCWWf+CG1GebRORisVjK83HtIOprvmzYyRRC5CXYJPJ9A8eXM7ffRzEPlvlChiwJSx1HskAOJhE5Q0TeP9C/d4rFqP6NecpeICI3i4VcvENEPiwLhFA8wKjH6N+o6ub6YHoH6hAYr5rzm2cBjwbepqqfnXMONYRFXn8566pD0fzTIss3OPjYkr5PW+oPRaTANjp2YMr6eoPupcmjdCG8F+PaF885/jJMrv3zUtuDWa0X2CbgILZjc+xPp78/zey8+9JF1LukubiIvB/ri1OwddE7sU2WNwBfEJHFeBfXbb4x/T24VrhgEb/fE5ZrfZFjRiHPwdZc78OMdd4M/NmQ8nuzDgXzrGxhz7gPsTC2Swo5LCLrsfXrr2Abs2/HxsE/Yt4wi4aIvB67/3PT998AX8W8iV84p+zrsLnZQ7FwN3+PKeN+F/jGAqFuDiSOSt/XDTlXHxuai2kuROQ04HRMmbYY78CFrl0fF0wR1eDQxeHASXPxYSwX0kWY5/oMFgZwt7lU2l/7MvBILJz12zB++QPgm2mduidcwPC9qAsW8dultvfJwMWYt8X3MHn7VeDZWIivByzymhcwnMNfj/HlvmLR97QHLGXP8jHpeo/FDA/+IV33f7GQvvPhG+l7t/WZpLyNw9ZOC+DtWBi3NRgvfxRzLvgKNn4XBbGcez/E+vE2jOP+DdtDPn9O2QdgvPsKzKjyHZihxmOAr8si0hUcANyJGaueJiKnDp5I/HYqcKmqbhk4dXn6fpyIjMyp72npeznC1D09ff9onvOniMirROSPReRX5rZ/n3GwtX/N567xwTYQFHuJVw8cLzDrVWW4B1MEnjykvn9K5/+JASs17GXbgWmiTxpS33nztG8ha4CdwH3nnPtwOvfzA8cEc+lV4Jlzyr+aWUuHc/d07YFzJw387i/nKXMDi/Rg2otrvm7OudpK//MHe0w1n+Zzd/vU79UC5++Xylw9z/naGu49c47X7/lLscmuAg+aU+bH2AI5nytvF9n2G9LvcuB64LI55x+Yzp/PbKi48+eUWY1N9BX4ATaheye2yaHAX8wpL9gCRlOZv0m/uQ2b4M8rx+a5h+9j4bPGhpyrPcN+fy+e65HYIvKq9Pd9Ul1fnaf8efX9Mquoee6cMp/FJsXjDPFgYtaq8awhx2cwy7WPYl6+78Q2n3rA+CLu5wiMQ7cDo0POPy1d+0tDxve8z4OleTA9IZV96wJj8aRFPp8ljyPm586npr6tY1K/CVsMfhO4fk7ZJ6eyJebV9xZs7tDB5inzeogdqA9muajAzw45tzKdKxmwogQ+kY7fL439X8VyyLyERViKz7nGstTFrPy5niGeC83nrvkB7p/kUsRk8LNZwENgzm9/MT3zfxoynh4/pHwtfz6UZMI1wJUD50ewSAhfTn8v1Vr8zan8c+Y5fx7zW91eMPda7MVcfOAanwRG5pyrZdqrl/B8LmThOctCa4nlvqfz5hy/oS4/eK9Ynqvt6ZMPHF/SOnTOtZ6Zzr9lzvGTFvrdPHW9J/3mbXOOPwiTtcPmTrs9B0wxpti8bjdZyYBnM7aBqBhPrZ5T7rxh7Vmg/SelsbSUz2LfodtSW+415Nxr0rmN8/z2Celaf4kpDicx/n3mIq9d9+d35zl/aTr/5gXqODeVaTyY7qYfDi9OupDZ9djageNj2Fw5AEcNHD8RWxvsBM6YU1ft+bVbhIgF2jeUP9izt89i27sm9d/muTIFW6dNAhcvob3nsfBe4gXzPZ/luqf5+o2l71m6NN6UOfN/LOxZzdPnznOv24A7FxhTQ383pPwjmF2bDd5/GzPA3I1fhz0HjMevT8dfMOQ6g3yYpet1sDCYg+WOwTyobmfxHt+vYWl8+KwljLnnMfvOfRBbc/4Ltj/wfeCUIb/529QPN2Lr/zdjewolxo277YHsoQ3ZQNv/nlnP4P8B2vOM82GfTwBrlnLtedu0HJU0n7v/B7MqU+CXh5x75FwBMiA8/nNI+QKYSi/X2iHn69A9fzakvvPmad9CwvovhpSvJ+tvHXIfXxtS3jO7iXrunq49cO6kdH7jfIKO/adguoEhoWmSwNp8sMdU82k+d7dPTbJ7KPOdYe9sOncpw5VH9Xv+UmwBENh1gfOwdP5P0kRhtwnbItp+Q/pdxmw4uYcPnP9HZvOmzKdguoAhShxsIvkFbEF39sDxF6Ty32JgEoO5eV+7kBwb0v4xTLk01DWcWaVJF1soPRU4epF1/2H67R8NHPt+up9hk7/zmFUwjWHKhi8OnD82tfW96e+hIfLmacvfpLqfOeTcGhYZcg6zKB/KmcwqZeYqxRZ8HixNwbSKeTZ7WLqCacnjiCHciXml7cA2H84Zcp3BBcxyL2zPX+JnN/kxT731wvMVQ86dzezC4IyB4zemYy/D5mKDC4ge8KdLuK9lqYtZo6M/Xuy1m89d44PFi799zrPfglmA7xY2ceB3X2V3Hqrl+MeGlK/lz4fS33+Q/n5M+vtFDGzCsPTNvPpdGhpKlb1XMN3AIufi2MK/ZEjIM2wdspl5NtDnafOF7B8F01LuaWi/McsDwzj2g+ncfQaOLWkdOuf8Q9P5j845nmPhl+65yP7MMTm3kyHhpAb67Pw9PQdmQ5j+3CKu+5+p7G6h5wbGzW6bhfOUrd+jpXzOXWTd703lPz44PjCjl5oruvP89s1zrnk78KQljPUxjLN3mz9hMqqu958WqKPum0bBdDf+cPhw0oXp/LBwmrWx4tMGjv1JOrabwTM2592JGbgtdmN+KH+wZ2XMYtv76nRst7Dx6fzb0vndFNrzlD+PhfcSL5jv+SzXPc3Xbyx9z/JR6dj/DCnvmA2Vfu489/qTdH6uguEEjBN3M06cp55a5r9kgT67YU/PAfNiVuDTi7jmM1PZ3cLTzxk3T1nkPdzA0vjwgsXUO1D/I5kNH1t/NmL51Yau6bGQ4dNzfvN9hjhtLOL67SH38C8MMVgF7o3JsftgBrLrMWPL2rj46/O1eSmfu2qS7wYHHvdP38PcdL+NbaYNw3eHHDsdGAW+oapbh5z/H2wD9P5Dzu0Nvj/k2M3pe83AsdrN9mtzC6tqEJGvY6Hu9gY/1JTA7QDiUh0emuZmLDRTgwYNlh/vwVzTX8pA2MqUuPF+wCWqOkwmAaCqN4rIl4Dni8hvq+oUtnkbsPxAy4EPYJPZlwHfEpExbBP/i6p607BIECKyDgvZ8n1V3SUhpKp2ROQPMAvmFzAbNugl6fuPdSCJs6puFZE3LPF+jsU22IaGSlHV/xKRVwN/juWS+o3U7o0Yp/yTqu6WjDaFvXgpuyfRvQDzqngZNtkaClWdEpEPA78uIiep6g1Y6BzPvoUznBlyrW1L+P17sEX2SxkIWyEiRwNPwVz3Pz30l8sAVd0hIh1ssTIXj8c26m5dZHXLNY5ejHn1/L2qDuP5wVCFv4x57L1KVa+YU+4yEXkv8BoRudfc8/PgdYts4yAuXESZz2H5Wn5bRD5az6lSKMXXD5QbnOtsSN/vxhQ7b8VCpDw+HXuDiNyiqhcs4vr7XJeIjKd7qLAEtg3uRlDVfxeR/8Q2QR6Fzd0fhYVPfJaI/Au2kaD1b0TklFT+KlX91kB1X8AW3s8SkfU6EPZxCC7ADNJehnmw/BqmgPnUXt7KuvS9FDm7GCxqLi6WfP5+2D28Zp6ITF0sfFr9m9dgcmoQn1LVS/epxXvGcq0vdqjqT+epB3aVW3u7DgWTSWCbJX2oJb9eSsL1M7D160U6PDn6heweIms+1IZD84YkHsDDMcXj80TkeUPOF8ARIrJOdw23sxtU9ULM22J/4M+weeBzgUtF5KuY4ueZGN+fgM21hrXrD4E/TPPR07DQf/8tIq9V1Tfu6cJpLvZqTC58UkQ+g1n4n4EpCS7FjC6GXr/BoYPDkJOWus/1P3MLq+o2EbkECzF2BhaqbH9hse2tueR+InL+kN/UYRDPBK5IYd3OnVPmhkXOZfcVi72n5apnXj5U1Sgi32ThMJGDnNhf+6jqTYtvKrDA3mlq22JDZD8sff/3IsrW4+LEecZFHc7tTMxDekGo6kmLuOZeQUReiO0FfBKTDTdihsSvxUIanoOt1evyAvwdFvrvTzHvyO0Yd70N48RXqeo7F9uGtG6WVPcxmLfwm4Dvi8iT075FXfZyZsP0gRlTfiGNp0sxZdnT2cf9g0bB1KDGqvR9x9wTSfky34R24wJ1zRdTuT6+etGtWxjbhxyrFyJ+4Ni895gw7F4Wi3357d5i+zzHK2jyqzVosJ/wUWwS8BwR+c0BpcBgjp894b2YxcgvisjHsfjLn1PV24blX0gxs18zpJ63q+r2uQdV9VYR+Tzw82lB/vPAij207cGYvNR5JnR1/oMzB449AFvMD9sQunCBaw3DHhdaqvr3IvI+LK70I7AJ+CMwpdcLROQNqjo3r8PjMMOBL6rqoMLjw5g30Xki8qdpI2o+vBcLSfCrKU/CrwI/UtVhBhZ7wscw66tPicgnsFjL31DVa5dYz/9g3j2PFJEzVfUn6fhLsLndBXu4p+XAVixs2i7Yi3tZrnG0NwuYRS1s91SZqu6vDb2PYlayT8IW2J/GwkY8ATgas5qbu6lX8/9XVPWVA8c/KSIlliT3j1hcPP3lqOv5mPz5pKoejLlSg31EkiVfSp86z9tzMIXhL2OW458a+MnLsE3uC+bUU4nIvwG/g1m5vnWBa94hIp/FuPZd2Abi3+iQHHyLRK3Ub+/l7+fD9nmOz52Lr8H65AgWr5B+DbZZMYgbWFxuqH3B9nmOL3V9sVA9sMg12h7WoWChqmCI4cYSsZzrxNXANrX8nXvCOoy39zQuxpnNQXPAoaq3J2Oq12JKnVdgG+wfwzbNrsGMWxaqYwrzyPolEVmLGSl8SVW/t4jr/4uI3IwZBZ2LGdP8BJMlG7BNugWv3+DQwOHEScPWeSwsQw/U/ttQLKG99bpvaD7ZAYyn73PZXUZ+jeXJO7gglnBPe8Ji69kTF813vMZ+58T07iykkB3E6vS9GMPDelwMM7YYxPgezu9XpDxL78fyHL1IVet12JUi8iLM4eJ5InJuMvwAM1D5TSzk7ZsHqvu6iDwdC6n7ZhH5oKpOLqU9SaF+K/BBEbkKiwryD8zmdlrotzuTMe2fYEroRsHUYFmwM30fyZwEmom01zFcKOiQY7XV11FDzoFtigyWg9nNkfk2V5cD9fV22xBLmK+9i8GwfqgRmT8J3up9uGaDBg0OMAY9WjCPn3cki8xfxEKr/NsiqvksNll7Kaa4GWNh5c9qhm88XMD8GzjvxaxQXoApHTam686HekL34PSZD4MTulXA1nkUGUvdSF7sQmsam/h8GvpJe1+GbW68VkQ+Oce6+9fS9wVz6tlaLxQx69tPLHDNi0XkYqwfv41t+P3mou5q97q+KyKPxiZxz8WUB6TJ4OtV9SOLrEeTsu1N2Dj6nWS99KsYH+2Ld9ViMcK+L15g+cbR6vS9lAXMYhe2BwVpY/XpwG9j8ubFmILpQmzs1uN2cFNtO7bZ9p9Dqvw8FtruNBFZNY+V/iCWo676HXzPHq7V4G6C5N3y7yJyX8wK83GkzbzkXXdeKvomEXnTPNW8jAU28xLeg+XY+Pf0977ItfodWbdgqf2H+v24RFUXlbh8GSxvlfnX+qv3se7lxt6uQ2H2me6rcmE514nbgXUiMrIIJdMOLCzN2iXUPxQichKz799iccGglfNCUNU7sHyjr5pz3celf+5RUTSAL2DGVucs9neq+r9YkvtdkLxWlnr9BocIGk4Cdt1/u3zI+WH7bwcTdTvup6o/2lNhVT0fi86xt5h3r5G7Nh8Ow3zHa6zDFFfDIkktBYOcOJeXM+Z4SC2A7en72CVc85mq+plFlF8Q83iCL4RLVfVTiyj3RGwP52sDyiWg72X2f1iklAcyayxZK3t24zBV3SgiV2LGs6djeb/2Cqr6bRHZzu4efwthU/oe29vr1mgUTA1qXMKsi/F1c849jKWNlauwuJL3E5HVQ7T+j03fFw8cq63Wjx9S34OWcO2FUF/vnLkn0uLlUfP8LrI064S52AacJSL5kA20+e5tX6/ZoEGD/Yf3YAqmlwLvwJRLK4D3q+rOhX4IZnknIh/AcgMdh03O5vW6SAv/pXpIfB7bjPnTdI03qepCIWbqCd3bVPW3F3mNHcDaeWTbUhX2e7XQSpaD7xSRh2Eb8I8jWXeLyBFYyAyAj4jIfMqbX2MBBVPCe7A8Vv+IKVU+tJR2zmnzt4CniUgLm3g+GVNYfVhENqnqVxZZ1QewkIG/LCJ/hCVJPxmL2T0sLNGyQUQcNmG/fhmqW65xtD19Hwv8eBHXhEUubPeEebygFsKFAxZtCyL1yV+lz+A121ioiM2qOvgcrsKUQtuH1BVEZCe2KBxhzxsN+1SXiJyNzXOuJ1kaNzikMJG+B/npmdiYuYrhXolg64DTROQcHRLOcgBfZjbkyP+p6lX70Nb6PT8D8xw9oFDVSRG5HLi3iKzV4SHEl4oAtobR4SHttjFkXZXWPGcvw/WXE/uyDj0jfV+6j224Elu/nj2P0vzcJdT1bWwz6ckMV9DPLftUEbl3CmGzLziJpYdsvRDzjNsX/HL6/vASflNvNi40N90jkiHq07ENsi/vS10N7vY4nDnpEkz5dS6Wb6qP9I6cjRko/YS7Br6NGUo9mtm+2BfUHDjf/tngXuPcNdJy7TUuFy5J37vtTab11yPm+6FYWOpjsfQdCxnALwYXY1EmzmF3Xn4Ui9+r/Hb6/llsHb2Yso/GoiTsK17D7p7gC+GDLC7sZSt9HzHP+fr4oIfj3vxmyRCRFVjI+Ik9lR1AHQVk7nNeMpowWg1q1JY/fyIitTtkbR3+l0upKG34/Ru24fqGwXMick/gt7BY0/86cKpOuP4CsTjldfm1wC75QPYB38QmF48RkWfOOfcq5s+/tIXhiq/F4rvYwuglgwdF5Dws1uX+uGaDBg32E1T1Ysyy5CwRqfMxwdIs9OuE1sdhiqnFxjFebBsD5rp9XLrO+/bwk+9iMvjRS7jMxdg8Yphy/twl1AMWumETZrWzNxi2qHwx5j36A+Cf5/lsAp4gIvfYQ/0fxjzUjgM+Pk+4hCVBVbuq+s0U1u+30uG53LTQ7+/AJt/rMUXa3ozDvcXpWF9fugx1Ldc4GlzALLbsUsb7QnjdEj/nLsM1fxEb33MVp/VGxX3m/kBEjsTGyyQW2mhP2Ne6fj19v28ZFroNDjBE5Pki8jNpQ2PuuaOY9QAczH9Xe6z9maq+dNiH2XXFr7EAklXos4Gf21PZReDC9P2whQrtZ/wt9s6+f1h0BhFZIyKL8m5KqMOlDcuFB8brJ4jIE+cc/1OWtuFyILAv69D6me5iFSwiuYickdaee0RS5tfr1/Pn1PUg4JcWU0/CO9L334jIblbbc469LX2/V0SOGVJ2LBnR7BGqeqGqyhI/Fy6mbhFxafNy7vEXYQqmbzKwMSciLRG53zx1PRgLPRyYk6dKRE5Iz210zvEVQ+oZxTYEV2My50DnQ25wANFw0oL4ELa/9ptiOacG8QZsw/lDd6F35AOY8dLr0lp6FyR5c+4S6lsMH8KcyAXJ6+3VS7jOgcA3sDDojxWRuWuaX2Ph/Et1yPthnp5DZesCuCB9/0nak63raWMRNBaLz2JGDM8QkecPaddxA39+Grv3V4rIU4ZVJiIPX+w9qOpJS+TD8xZ5Txel7+eKyFlz2nc2FqVE2TUnWv2b3x6c56TfvBzbY9jIQGh2ERlNz+yEOeXvm54Dc44XWGg8h+XxHTw3VJEqlkvqFzDF1r8PK7MUNB5MDQBQ1a+JyHswoXW5iPwHRlJPx6xSb2NpiTP/ENu4eVWaRP4vthFR5wJ51aDFrVpc53/DwgVdKiKfw4jwKdgk4f7sI1JIoV/FrE/+Q0Q+iVkwnI0lrK5d9efiq1iulM9iG2ElZrWyW0L5efAOTLn0bhF5PJbM72wsB8R/MTw25r5es0GDBvsX78W8T96KLQ5+pKrfWeyPVfVaEXkyKaH0/mkif4/JjylVXdAiRVXvrGWwiLwW+Mu5Sq+0SRMHZPcHsFwwbxSRx6klmqwNA/50KQ1N8vn/sNjmp8z1wEkTr0tV9dtzfysiZzAbq3lQRtaLiFfoPPmSROQNqa0vxcLWzde+ifS81rMPbusi8ggsRNLckDl1uIPpJVb5XswC8HeYTSC/J2vp5cDQDT3oj5McuHaIR9IwLNc4+iCWhPw3ROQ/5vKliBynqnUohw9gz/t1IvK9ueMjbV48ZgleRvsrBxMisnKuZ2RavPw1ZpH55jk/eT+Wo+KVIvKB+t1PXgt/ncp8fNCjUUTWY2N7s+6a5HrJdQ3UOYaF6KxSPQ3ufngotvGyUUS+zqzH4j2Ap2Kea58meYAmRf0T2HPi848Bb2c2l+G83jzJoOPi+c4vFqp6mVgo0sfL/B4/+xWq+n4ReSCWu+ZaEfkilkdtLdanj8Fk08sXWeVXMe77pFjexRngRlWtDfjeiuVv+7SIfAwLl/OIdK0LWR5F97Jgb9ehSVY/AbhKVS+bc/pYzFr/RsyzZzH4Y2xN+Jq0GfN1LLTUL2Ce4c9Y5P18SUT+AuOwn4jIp7D135GYMcW3SWG7VPWrIvKH2IbdNelZXo+FaD0Rsx7/OsPXqAcSo8AdIvJlbAMwYoaSD8f6+Xm6a6igEWxN/yPgMsxbfxTLbViH1Ps9Vb1yznX+Bbvnx7JrHsYXi8jvpGO3Yx7vT8eez9+p6m6W8SLyLGY92WuP6IeLyAXp35tV9XcXdfcN7gpoOGn++m4QCwf2TuBiEfl3zIjuHOwdvRKbz90loKpbROS52Jrl2yLyVSy0n2JG1g/H3vHF5k38FraGeo2IrGM2xPY7kjfqp7E8cc9PCo3vYMqoZ6ZzP78sN7YMUAux9lJsb/IziQ+vBc7C8hD/N2ZMN2xvtjYo+Y8h5+aTrfO14xsi8g4sysZlYrmDS6zPtjF/vq+59fRE5HlYJIMPi8ivYxzYxvjg8SS9hFqUl2cDXwQ+JyLfxIwZp7Fx8WAsWsfRLH3NvGxQC3n/AWyP93si8p/MzjWehRkTvX2OV/K7MEOVs4CrReQzmJL1ARgnBuCVc2TBQ7C19tfYdc72q8BLROQb6brbgWOw538U5lQxl9s+ISIV5thxC9b/D07XqIBf10WGy10IjYKpwSB+AyOfX8cWN1swof/H2CBcdOJutfwWD8eSPz8byx8wg1kP/LWqDguV8jIsL8nzgVdii66/xzYxlkXoJ0H5aOCNzFo5fwd7YZ/E8Mn7qzGyezym8HLA69l1I3Oha14hIk/ArGOejr3AF2HE+WyGK5j26ZoNGjTYewwsPIfhFWp5gD6Mbd7UHhBLjsM9jxxcNqSN4k8t4SevwkJu/TmmaPo6JpOPwSaAD8bkc72g+wi26fIMbOL5aUyx8FwsDv6irIYH8B+YsuRJ7B6+4MmYkv4GzLLrZszV/NRUPgf+XlOi6GT1dhrw4/mUSwn/jCkaXiIirxu2UV5DVecLrbEU/D7wOBG5COvHSeDeGB9tY+neR1/CrMJq679/0L1POLwUPBGbCA9LBPpVbFPsHiwu7M6yjCNV3SwiL8A2Fv5XRP4bC7uxEpvMH5/atD8WtvsTXxaRGWyDbgJ7F5+Kzamerqq3DRZW1VtE5BXYRvWladGzFZvnnA1cjY3DQbwK86x6PQOW+3tZV41fxPr+k6q61JxsDe4a+BtsQ+YJ2Dv0JOyd2IJtTnwY+LBq3zvtpZhn478uJIfUwsV9BJv3v5hZD479jXdjm4hPZIGwtPsTqvrKJJtejvXrauyduglb7ywl/Or7MFn7i9h7mGGbEP+arvXVtMH+Z8zmifwyJm9fv+93s+zYm3XoE7A5yv9bjgYkHnkks2u2B2EbNb9BssBeQl2vFZFvYR7KT8NyG9yJbe78y5yyf5U2in4LU0A9E1Os3YrNC5YSem5/oQt8FGvfz6Rj12BzqLenufEgpoDXYhua52BGDHUi8g8B71yKYRbWbz/B5oPrsDwl3wV+VVXne5/PxmTMIE5OH7CNuUbBdPdBw0kLQFXfJSI/xcb0czCF7s0Yt/ylLkP0heVE4qizsPY+CVtT9zCDgv9huJJkvrq2ichzsLnseczmkvkQsENVO8nQ+62Y/HowNq9+AcbBdxkFE5g3qoicA/wFNucH27N8LLPetHONzxwWLv6HauHYlwOvxub6r8S4eZCXf7jYSlT1+8k47g+xNe8jsDXNT7E5ymDZH4l5v/42xp0vwZRpt2PhA1/H4qIw7G/8KrY3ex42fldgz+TrwHtV9aODhZOceSR2X8/Gxl6BKYI/Drx1D3sWg/g4ZoTy8PSpr30FJiffNYST343JzkdifCwYH1+Acfiin+eCUNXm03wW/GAbeAp85GC3pfk0n+bTfPbXJ8m5PX1WD5R/bzo2PXh8SL3np3IvXUQbslT2hiW2/Yb0u2wRZV+ayp4/5FyBbTZ/E9vc6GIbX1/F4hivG1L+z7CYvd3Ujjdiyh/Fcs0s9h4KTKH1nSHnTsO8dP4bm4xODbTtk8DT5pT/t3T931rEdb+Uyv5c+vu89PdfLLLdt9h0alFln4ht1l+R+ncK27z6e+DEvRy3fzIwPk/fw/ie93lgSoM9PjNgFabc+NQexuJJS3z2ix5HA+/UuUPquje2eXcrtlC9A9t0/bUhZU/CQglcg8Wm34ltcP4r8Ky9eR7L/QF+D/Oa25765jrMQvW4PfzuXMz6cVvqh59iIYdXDylb9+f5+1rXwG++k+p80sHuw+bTfFQVTOG5Cfj0wW5L81nSc5t3HYptQG4GVh3sdjaf5tN8ms9SPg0nNZ+9+WCGlhUwNuf40xNXvvBgt7H5HL4fUa0V/A0Od4jFrr1TB9zbxeJbfhzzovkFVd3nuIwNGjRo0KDBMIjIH2GWww9Q1UsOdnsa7A4R+U1MIfZoXR6vrgYNGjQ4IEgeee8EHqSqex3utMHyY6nrUBG5P6Z8/y1V/YcD3d4GDRo02Fc0nNRgGBL3FTrH60wsh/sHgP9W1acMHBeMDwPwEG02+RscJDQKpgZ9iMibsfBHF2IuiEdhIdqOw6zGn9oIqwYNGjRosL8glrDyKiyn1dMPdnsa7AoRGcHCFH1TVZ97sNvToEGDBktByh/2B1gImc/tqXyDA4elrkPFkp8/APgrXSC8bYMGDRrcVdFwUoNhSPmFL8HC2v4Ui3ByfyxE6XbgEar6k4HyR2Mh7D6lqpce6PY2aFCjUTA16CPFJf1dLF7xWsz18mosnu3bdXHJuhs0aNCgQYO9hog8Bosx/VZVnTrY7WkwCxE5E8vfcYEuQyLQBg0aNGjQAJp1aIMGDRo0aAAgImuw3FnnYMYWLWAj8BXgjao6LCdhgwYHHY2CqUGDBg0aNGjQoEGDBg0aNGjQoEGDBg0aNGjQoMGS4A52Axo0aNCgQYMGDRo0aNCgQYMGDRo0aNCgQYMGDRrcvZAtdFJEGvemfYEM/FsB/MBBnfOZ/Um2WylB8ahzRNTUgqJIVTEiMJoJK9v2KNeMj7F2fJyxTGhlDi8gUREBEUFVme52APDes+aII4kovV7Jzh1TlL0eUilSKSGUiBecs9YUIwUiQlVVFEWBcw7LJwdZUeCcp/aIq8qKXq9i1apVSLqfIvNUZUmv12Vy506mdu5Ao+Vxdc7hMk+e56xatYoiz/r1r2i3iRoIoeq323uHc5BnGc4JgiIaIURQxQmIgBOhyHJEwTtP5gtUIUYlRqWsAnHgWWVZhqKEGAkh2DMRwWcZ3vtdHmmMEVWl7PZot9s45+jNzJC3ClzmcbmjGGnb751j/ZrVTOzcQdXrQVRyAVRRjbQzz84dO+nMzKRxUNHtdNixYwfjK0ZYu34Na9atpmhn/efYq6w/NCoEBQSX/gMofIFgZTMyJLU+yzJEpT/svHic82Teg6q11/t+X4sI3W4Xjz2T+pzGSIwRQh32XBGX6hdwffV1REWJRPI8xzmH1uM+jUuXOXq9HqGq7HBUJEZcGh9VWRJDpD06Ridr0cnb7Bhdxfn/eAHX3rmVncExWb9GAlmeE6sSjYqLnizLCBopY3qmCm7gaSqKEtP3rtjtNR52YuCkm3Mqzv3dfoKqzm1RgzloOG0f0XDasnLazplVvPfi/yFqsXzPqEGD/YbtOI5DmWo47S6ChtP2EQ2nNZzW4DBGw2l3JTR8to9o+KzhswaHMe46fLaggqnBPmJgw3v2gLL7zrQgSdnAnBICIGIb3zGmXXvFKbQzwQdlpGixYe1qAHKBWAWCKkE9pL/z3BQFim2u1xeKVaBXlfR6JRoCWgUkgnNCVZpCwOemUHCYUiJznty7pLCwOj2CF0GclZUI6iJVr4t3HicOdUKRZTiN9LxnKkZiCAhCXuR4n5E5T+j2yFot8qSoELX6nZjGwongFIiKB5waFQqgYvfla/2FACGiIRJdJERFyPDO4ZwDUftNn1ntR95nlMEUDqhCFfqKIpc0J+K8PZdeiQRTbolGqIIdF4f2eiiC857Yq5AQ8Wqkm2ceDRWhimhQqm6XXlIwFZkjA0ZbBRpKCAGnStnp9ImXpABzSbEUY8QRERzeOTIiIqbMyTTOKpgwxRhqBJs5j3PgHWgV0zANqUMyEMFpwIvgUOtvVVRNqaeE/lgVBE+0f6umtpraJmjExQpRmX1WSRHl1OM0WF0KufcoNmGwZw7OO4gBqUpEPFlZIiW4CpxGstQvCoSySvdh704ZKlMfpXesbtXcN3OYQB72xi50Yr56GjS426PhtGXltNyZ0cLZR32So8cvt/vNvPVd4jTUOGiw/5wIGhVBECd4yZC0MIoxdUZahMVkNCBOqEqTg/Vz8T7D+VlOc+k33U43PT+IGnDiECeInzU+cM4xNjpGVfbQGBOHeDQGYlUhokxNTNHt2MKyyDNAqSJ89sbfoxNWLmLA7Rkt3+WVD/0gmfT6nOacTwtFZ2NgIBS0c9a/MUa7LxH7QJ8XVWO/rxFb7KauGTCcUOtbR59bNRm2GK85QqjMCEOFzHtijGZggv3GiYA4KhyVz+jmI3z2ou+zaXKaDlC6+kpQRftNPQ7inH4YxlE68P99x2OBJzbc1uDQQsNpDaftI6fFGChDjyu2P5Gbph/OL539Lr7y02dy/bYzFjEAd8UDjr6Ux598kRniRW04bQ4aTmvQYAE0fNbwWeKzqAVT4SRCqFCNto/mHBojEkuOHL+N6cnJ/b5GWyqec6//5d4bru3z2Y/uOJVPXXnOQWlLjefd98uM5F3+5eKnso4/R9nY8Nke0CiYDgR08B/CEFGOSCqmSmT3gajUvhXOJpwCuRMKHGN5i1WjYwDEXg8te+ZdA0SxzXbvTHhB7bmhOCBWFWW3lzxEAhpMKeHS5NQ7Iff1G2RC0HlPJkIdYVFE8Km+rBb0XlHv0KpCHWaRAOTe4dSTe2+EExVEyb0nz3K8c4Rej0yhcB7vzP4CBkik7jmFLM4qMWriqYnQyioSIrEKRCK4iHeKZLkJYUmdn4R20GgKrOQCZfN7U6j4aJ5RWSrrk8CvnENisDKAhAqp/VhSWfWe2O1BGZAY8E7weKKqKb+qQOh2CEnQ087xAiNFxkx3ChcDXpVer4N6SX1ejx1T/IhGRAWHkInHq3kzKbV1Smo3ibsUvEDuIs4ZUUds4VIvXlwaD5kGvIBTh6jDpTJm2zIrRh0On4hW0lhL0xREzSOptj6pFy6C4CN4rUArUIdPirEqLYocaZGlEQkVTrs4N8OG8TG2Tc4wPTVDlt4Q81ALiDjqiYjqrr5Jc/2U9qQUmjtn07knhpRt0OCQRcNpy8JpeTKauOfqb3PvDV9AUFp5ljgtGQVohBhnOU3AiyNWtkhyzuFdTpblOO+pqrA7pzmH845et2eck/gyyzK79yx5rCZOm5qcMhmuSojB+s47JBOKwiz5vPesXb2Gzsw0Gu15FMlzNPR6iAa2bt7C9OQUACPtnG4c547po8hk1zztK1tbaflOf/FUjw8UJnsrma7G5x2KuS955hlfoO1mjNMyT+5znPN454lltYv8z7xHxBFC6C9Cxc1ymmrsWzgCyWtX+l3q/CynhRhxnt05zZnXc6/smud1dORZhlaBbqebxiw4sY3DDp6uy5lsr+Cmq7/IVbqZW6dmmE7vUNojnOU0td/PZbXBd0yH/GsYduO0eVFwV1q8NGiwrGg4reG0veA0xdOtHBunVnP79Jnc3nkQ995wMd+95TFLGn41Noxt4YHHXMo91myEGBtOG+ibhtMaNFgkGj477Pks6Aiby4dQ9rpmMCGmHIshIKHDaWt/xPYtmxa1RjuQuOfaW3jwsVcQY8SLZ7I3dtDaUuPUdTcz3poGYITvAT/AsbHhswXQKJgOGnSXf/c12unMLppPTcLXOWKsyEXIRaCKnHjUesbyjGrnJGDKj9wJuTpTDqDmzVNWyWrJ0fbpsYtQzXSQEJEqImVF4YRMTNmQj4zgnFkFgJFL5h1FUdAru+R5QZGbIOv1emgZyMdya4d35O2WhZmLggdWj4zS63bQEBkrCnYkMnHiWNFqW10KEzu3I90euc8YabfIkvUEaYKsqYc8QhbFyALzehGswVprUKKaB1BZEaNSaaKEVhuXF+YB1Rf0ilfFZR4vGZkTNFmCee8JIaChRNQEb55leOfBQygtHFshYqHjqhLtKNlotH4XoRQhdDrEGIgCxUgLCQEpe8xMTaFTU7gyWYK5QJZ52l6oyh5F1WMkVmjZQ6uI98JoexSAGEuqsgTVvjeTU48PHSSFv8vxUJN3SAsQcXafGpEoODw4s1iJtadPsHGSAT5a7RKdLUZirFU6pksDnHh8BEQR0f6kQjXiMAVc32wOEJc8sGKGxJIQI4ondnqIwkiaY8QQiCGQF46q7FBW07B9O7/5rHO48LJrueCL36IMs+9N7h2leoJCJJIVOTEoVQjMTpuWIoZll3/J7J0v4nd3FXHfoMH+RMNpS+a0YO3OEUaQQ57TfrLtZ/ina/5xt5HztFP/jfsd+QPaRQswA4EQKlDlM9c8n/+76ckLjrwiTjPiuw2nHRBOa1K3Njhc0HBaw2kLc1onHsHt3Xvzrp/8A5PVGgBe87mP7vWI+8JPH8c3bnoIX3rB8xjLG05rOK1Bg+VCw2eHE58RhdidJPRKOw9kRY5ogNBhZsvWRa/RDiTe8vUX8pavv/CgtmEu/vLCl6R/CbfwWVbyTxzBbzR8tgAaBdMBxmI0kS7zsy4mkFSjEY1KDuSqjBcFpx97PCMohUBRT/DKklBWqFaEaJYAWeYZGx8lVIFup2PpeiBNXJWq1yOGiBdHqCrUCc4Lee7IvOuHZBsZaQNQVRUugpYVQaHVaoH3xKiEbjfVLWaFkGUIjiIvOOaYo9h42+3EqqRVZOZmmqwTMod5FiG0ipxuZwYNFbEsWDm+Ai9C7TLqHIjz+GQlANY3TmPqp4Ck0AKKIhEycUSJxOShpFXPwqZF8F76D6ZotVMYuGD1poclISLRiFNSyqGoEXGOTExo2wWVzkwHonkTEczCQUXozZgVgahReexVuNRHMc+ZipEyxanNXUHuhXarTTfPcESqXgdCiTjFZxljo0aqVQ+6scS7DIdD1GLexspCzkVxRJ9bOwCPkucts4xwDqcgoohEs47wru7qgTAM5g1Ve0G5dE9mRTcwdtVC3InMLmZsnWLeXxZicNYyDjErukwcLssILhKjoE7QEC1kH+C9Q0VwGmn7jNxBO4t0CTzitONZt2Etf/fRL9ItKwKwI9hCScSR5xm9KqTFWExmBTr7Es4129kFtYnPoKA3S6Ba2MeBorM31qDB4YGG0/ae0yQtUjInFP7Q5rRP3PI2rp1+FIOydDSb4AVn/gNHrbgJJ0q7ZVZ6oVJ6auGJHn+Pr3HK2pt5/6Uv5Rn3/DhOlE/99Bf6dcyUI7ziv9+Cd2aDKSL8+WPezZnrb2w4bSj2ltMaXmtweKDhtIbTFrtOG2vt4PTxK3nVab/E1+54Ed/a8oK9HnfpiTNdjjac1nBagwbLgobP7sp8JmyuzmS8PcUqt4PbJu7JmpE7yF2X23begyNGrqeKOVtnjuKY8evY1lnJTLWao8euHcpnqp5bdtyT1a3NtLIOZbdMnrDpeVYu7aEW3NG5N677Y0JnK5t6p9Fq52yeOZbFysXMlbzs/u+mnXURhBgjMT1oEcGJ55I7HsC3bnsEv3H/9zFaKN7P5oMXIKrn7d99PhO7eCntnVx+yNGX87R7fh03KNqFXfJ21WJfsONRY4p6ZIV/dMfJ/McVj+F3HvpvjBcddnbG+NvvPZ+og0obYYpnUHEi63kejqmGz4agUTAdACz1kYsI5sdZK1WkrxmvQ45lTlg5Nop0u2QaKZI7aT2gVRUiNtlzjizL+goacw9NwzSFaNNouXt2CY8mFiqudsnMMxsutbfMYNxRi71MP84zmKWVS1Zh3jtG2m28d+aSn45bHGjpu8YKNrmNIRAEqsr1292XF/VvxawbLM8Ou7Sp/5v63wP115ZaqmqerUJf+Nh1TBA76S9d0n1qbQTW70dN+YYsPqqkMASz108yAVRT3Fftx6K1HEZ1fG27zzqMgepsvXX/x7purM+z9MzVpT52Fh5P1GK81oJTasHWF0TRXHhTDO66X/r9WUtf6Fu4WDzvFF1Udx3TsxFHUx+l+07GGXa8Fujp2SnpvMgu51LwbxAhqhAHhGcdCtEn6w7EYteuHR/ltJUraTsLB1gxq+0njTUGxvVAw4dj8LgOK7jrdK1/mwtX0qDBIYOG05aL0wYn44c2p900dW9u75zWb9MRI7dzwqprOXn1T3AuIHh8zWlR+v25YWwzbV9y1hGXcvran5C7HptmjuIHGx9MJ4wS1HPJHWftMt6+duODqWKLs4/6acNpc4/vNac1aHDoouG0htP2Zp2WSclYMUUvjlJpq9++Y1dcx6rWDvLcnofGSOyHCJd+ewdF+NVbz6QXWw2nDaLhtAYNloyGz+4efKYKTgJOLId7JiHlUlc8JUJEiHgqU4xpQLRMfJYk6ACfiSoulXUaGPPbsNh9xpetPLe9Qgc9tRQXMQbQXgrxV+dfN2TS45Q1lycjdNmFzxw9Tl13FSNZL1WvllNeSWk3crrVCJPlOKevvZqxliPLcgRYWUyTO/OEvXzTKfzwzlO4dttxexynp665maPGtnDRLfdjUMY/8tgf8ejjLuXRx/8wKZjSuHNC5ocomMRC4u7stugFYTyfYmdnnML3mOi0ePQJlzKeddjRXcFFN5/NVVtPYHt3Rb8dgWOYYQU5GZ6Gz4ahUTAdcCx+SOw2MBNqkTU42R5WdqEradLgD69/vnbMP6RFYJ7mzo9hlx92LAnque3Q/ux32E92bYwMHJ+971SHGunN7Y9Bmy9Z6HXeA5Mv1Ne7lV1Uqdl6F/xNarL2yw/rriVMQ3R2HTbn8Ozl0kAYXkZ2Kb+HS+3ezDlCt66tPuKSos3BrgvHhWtexMUbNGgwPxpOm/dC8x0bKowPdU7btfz9j/wOP3PSZwgDuSHmGx8rWzt4+QPeSagi4/lOfvbkz/OC//pPbpk4AR0SDuDN3/oVnnTHt/inp7zJLAXn3KeqEFKOQhHz6h3W5obTGjQ4HNFw2rwXmu/YYclphqie91/3brb3NvSPnXvCZ7j/0RezaqVtSoWqotftmXeQuN025EQcb/jWG9g0feRecdpg45p1WoMGDWbR8Nm8F5rv2AHks6NGbsRnpgQ5ZsW1/bs9bsW1BI0UrsvYCgtHuNJvZXV7W6pc+lcyFZIpCY8eu97MDASOHb+O2hgDjYwUOd47nEAvn2bzzATRdTmydQWj7RHu6J0xcAeRsWKCF93r7XhfkeV+KJ8NC8lW38N9j/gxZ234MZkToDbAEE5YuZEVrRmcCO948tv4++8+j7/+9gtIvbJLTYN//eKZX+EZp36dR/zrPxLV9bnuTY95F6esucW8s2olEmoKpmxAweSkb5vhs4wrNx3NRK/gzPU38aM7TuFhx17GC8/8r3RpoVfl/MHDP8Rbvv1LfOe2ew3hZo8FlVX6fkUNnwGNgmm/Y3AoLhx7McX7RAlVtZs20wE50EY5/ogNbFi9mvG8oDMzg8TYfwUdSuaFXs8sCsTntNotemVFDBFxPnnCYBPMAUHpveAlw4sjc0KeOVQjIVlHrV69krLssX37VtNCC/02Z3lmr1ecfcGiWhtyyUCUXq9LiBVBA1Xo4TKHx1589UBmifxWjqykO9OhKntsnZhCJFIUObm38DltKcgw10YRa4c489iJ0WJAh3IgQV1UYoxUVUXVK2m1W32XScERBrMb9jq46PDe47xDvEteRL4vAJLnLpos45xzKGaZEInk7ZyyV9KZ6TGzw0IpOOcYGxtjbGyUzJsrcqWBKipUEZcVaJ5DstboRsWFQCtGKDzqhSBK3s6ptCJKZCa5BccYUeeJkuGcR8TjNOJFCSESQqBMIVlFwWc5ZXTESskywePtGeBwfffVtNQQTePEEcNs/FDvPFGT5cQAqTqBOtOhiuXSsHqwBITMusca50UiigsRdYI4j7RyYlWhGsDZeJJUoZCbJUm6jEqFr6bJXM5bXvWLRIQ7tu3kT973SaaBSgOhF/Gp5X3bjEWp/4X5Y5kuYEVwdzAtaNBgL9Fw2vJxWjeOpGZHYqwOaU6bu4hTlCCKLzyRQBSlm2KBq5nAoeLRFAceB0JkKq7hx1vuy8vv/y6+cctj+Px1Txs6+r520wN4zL+8h888/7VsGNvBIKddceeJnPeZP0SAnz3l27zu0Rc0nNZwWoPDFA2nNZwGe79Oy8Iu2Uv6Q2WxnGb3HvitB76db9766L3itGadNgwNpzU4/NDw2eHDZ9PlKLfsPMmuVQZ6nQqNgTWtWxjNJ9imD6TdblldKHHawr96qTh51U924bN/uv4DXD/9sP4tPOr4L/GY479I0bYcUkviMw0WFjHJWSc5lQoaFO8dP9l2Ml5I0Zc8Zx19I+95+t9y3w3Xc8PWI9k6M4444fQ1NzCS2V6nc46VxQRtP81FL3wFAJO9Ua7Zfjybu2spJpXjVm7ih7ffg5A6V5zj3kfdRlTHT7ce238nFMsxVqkniuPizfelGxxTsc3Fd55pvwXjM5fxGw/6Lx696XL++luz4dmVMW7kUhuLXM96HssM2vBZQqNg2o+Qge/B5z7vGKiFrgyU0tnh5rAHNpJljGSe0OvisIlpp2cBp0daBSPtFkwnwaoRnKNXlmgwwVsLYxEhhmCCIn3yPLcEdLPTy9l2q01STTlsZlJKpCp7ppgQ12+3iJiyw5mFcAgV27ZtpdOZJlRlimntcWJlfeZxvo7BmoEWOFGqskuv7KKxItThc6qSzHucKJpXSWBbzqEYAjFGQlnNtkXt+lVZ9d1s8zzHew/EvrYboBNn8Jkny1N+osyb0M+UvstlbZOl0UgnuRGrGtHVJKIa6cx0UFW894yMjBCCib06TIKm2K0u94j3uCToQyyp0vPKstzIRCMhViCKOEF83Rqh9vCKmpK2Oo/kgkgkVJGQ2igi5CqUVUUIEVVBnSU8jKiRmvgUSgLq4BOqFuagPyyl/t9ca8L0v+RKbCGPYjo0THA6UEs+KCr9CUuM9YJpdpFT/1vELu0QMlGcKIWHMrNQDcWqNs869yFc9MMruW3bTjrUY3ZuQ4f8u4YM/yM96X5987/LgyfvgpK/QYO9QMNpy8tp3TgDQK/bYWZ6+pDmtLlWf8YVkaghcZpD6lVUtPPGZ4lSnJBlGSE6yh6M5zs4dc0VPP4ET+YzUJjojfHN2x4OCJ2qxS0TG/jnS57CitbM7HWjctvEOm6bPAKAb996b/7p4mfykvt9mZafoeG0xaDhtAaHBhpOazhtX9dpYchoERFbpy2K0yKxF1lRbF8Sp41mXc47+0s4qmad1nBagwYNnx1mfOa0w3i2Cecck9piOrbpdAKt2MNVXXpZxko3gXeeid4a1o5sQmIJsUz7iLN8ds+xrxJj5IrJpwOwcfJ4Ltv0AB5+/JdxbilrNOOzQOIHLBRgFSIxgKoQg0shCi1n1vqRHawdneLYFVsYcV2mei0UYcPIZgpnijvvPXWYwWNXbLa+qwpGix6oMl50QDwlI7MKvCjcMbUWcHRDkfrQFEyQ+Nk5OpUnxgqNQojFLu8BAqta05yy9jaefeZFfPnaBzLRGwUcFcen7hhlkj+gxUfw3NjwGY2C6S4ExdS/YgJlYJJYC3kPtJyj5YUMpdedIUPRGJiZmQZg1cpx1q9fx+jMCFu3baUKJRVKL1SW0BQsJE2aCLoq9I+LKkWR4RQkaj8Gc20BUXa7VCHY8Rj77qC9EMhzcM731ewuc+R5jhNL/Baqii1bNtOZmQYirSInLzJ8MOVIlvk04QfnhXarwKNU3YxYVaYQSe0IzlsiP++IRWGxLoGq17N428lqAEgTaCGWPUIVqKoKCYq0FYo89QPUL3OlXbIsR0MOiYCyzCOtAl8L1/q9T9dSGQjxE00rjioaAp2ZGVSVLMvQGKjK0hYyImS5J6p5GWW5WS74FN+0O9OhCo6g0foxLQCqqsLnDufN7RMgoMRKzVJEHKJmTSI+RyTSnUmEnxYUMZp1iKiAOvDaF/TeWd0uaa9iYo560TJrBFeLullyJwlArc+IQ1y9cBkQdDogRRP59OtUbPKRLO7qZ9u/aj0XEgFnyWRzL+SZsH1qElFltc95wZMexeZNW5mZ7nBHt2evVf2azTZ317oHD9f/q2MSD6A+Mkj7u4nxXQ7IsIMNGhziaDhtT5zWq2ZQhU5nhqmJnYckp0nWYrpahaUKtnEx4ifJXQfFcl74zPVjpoNxGmKWcKqKilpbfYZUkUpLokZOWnENJ638Ke2iDVHZOLWBH206i+lqhKieqI53fPfnFhyll206mWu2Hscv3Ov/CBGmS9vYW1FMU/iy4bShaDitweGIhtMaThuyTotxN0lo4nz/ctq6kR087bTvsrrYQeHKZp02gIbTGjTYExo+Wyyfzcz0mJjokHmhysE5xRGoej1iFGIUqqrC1b4zkkPVoaoivSoiobPXfIZ4KhG8BGKAEAXvKqJ6REvW5LeRZQVarmRzzOl1u3RDhzx0iaORMbcJcRk74krWtLeQ6SSx6hF0BPFZn8/OXvVxPDN9BdONO05horeSBx/zRQq3d3zW33dUhRAJBEDwzlQmxkHCqmKSE1ZsAWDtyCSrWpPEaEYMZUj7kTby0r9s7tDKKo4Z34JqJHMg4nDeg0hSIsGdk2t3GfXGMa6vPVKAaEo7GeS6RBo1nx05tp0X3e8rfPfW05OCaRaR9WznTZzS+iGe2xs+o1Ew7SfszYOVvnAfUMgjQKZQAGOZ48wTTqAQCL0ZvAMVpRe67JzcAcCjz3kkT33qU7jP2ffl+z/4Ppdccgmf/vSnWbViJT7PkKjEZHUgQJY5YjSCabda5Jmn8BktnxOrkl63S5Xc/++45RZCsMl3TAlLoyp5q6BY6SnyvH/r3juKzNNqtaiqirIsKcuSIsvInGNkpEXV7RGCOQ/m6aatfV3aWUGeZ+RjoxAjvZkuZcdcM2fKElGzmqjcTJ+kZqamjCjcbL4m5xytVsuIUjytIsch+ErRUFL2ejCQAE6cQAYalF5VohjhtVoF4yvGyQfcSY18vWngcwsJUJYlExMT7Nyxk+1btzLT7aFAnueEtass/ieKy3O8GImWoWRy+yQSIiOZac7LoIRul5nJSYpVI3hRzAPVUWSO3Dlqx8vMC1k7Q4PixOPEI0CeOwLKNBGq2cR9oecQNTddRQiVENQWXa6qyNptXKttdacFQoxAq3ZHVrLME6MQQiAG6+v+GiOGtJ4xYW0rnuQJFWeXMM65lDA25X9MZBAj5EIK1aCpykgMisvEFkNiNK4acZmnKDKOqPIU/sIxPbmdlz/tXL517S381Se+gOQeweGjEKrAoIWeLcEiLvVnYPDNNTKba9E3KOwXVi41aHAooOG0/cFpU90eoHQnppiqdhySnLa9vDdvvuzTVGrc5iTwsnv/EevHd6QktRYaw+wlEqc5ISs8GhXBclYIxnWhlrwDYYli1UNUOLJ9O2959Ot46w9exXU77rGEsQqtwvPu7z+Hd33/KQC87ylv4fEnXZwu0HBagwaHFhpOazhtedZpnalJZhPNp749AJy2ZWYlD33f3/Hun30nzzz1W806reG0BoctGj7bn3x2x45jKDqn08pynBPG/SbWZ1czMzXFzngPJvV4UGVddjneKdvlLI4tLqGnq9mh92CV/GCv+WxGj+TO7smcvOoytnePYGd3HaesuYKN3VOZLFcTQmB6eorpqWkmdm6m06so/E4k3wlFj1u2H0eWZbgMbtp5GiuzW1kpN/GTO05kXV4ynkLQvevWj3HdxAP6o+MxJ3yBx5/0GbzTZVujmd9XRSRlLFK1cItliUtebjdtP44tMyvRqBaGD/oexjFGculxvyOuAmBnb5SfbD4BjZGjx7dx0uo7EQ1mIJHmFzqQ59eMT1IikKTUrFVRWX2+fpOS8qz2cjKllRsg0d3xq8/8GSRqw2c0Cqb9inkUloZh47Pv8ihkpKGl9r1uxThHrVmJ10CRZeTOAwEvQsgy2i2b8G7dupmrf3o1Zz/qYZx4z3uyfedOfJ4nl1Fzhzdro+TWGCK59+YqiUIIiLMEcD5z9GYCsWeKnSzzhDIwsXMGX3u9OKHq9Jj2U8Qq0B4ZSfcnkEViFcyjxnmyloDmCEoIIVkPpI7QiBcxF9Z0fRFB8oKWz+iqYybUlgsRYiBHqLo9NFlQhTKSZw7vpd/nTsRiQNfuuU4swWqwvrZFz+xDypzHRSWWlVlUOIdDEtl0qLolkjT4eZ6TZzkKdHsVMUZ6ZcnO7duYnp6mKqt+zjdCpOzMkAk2M9eIauob7wglFqt1IIariBCi4/1XvIGerOmHMXBOLJRd8mB6wDEX83NnfhKcJfUz1+BATgRRnFPUOQYTDVo4vYDGiohDg7kcUwVcVLLUH0WR431GlnkqrUMiRNP+p6SBbiCxnYgQk/AWNSu3elwPWqXYs6nHZLIXkGQJEStbnGifHxBN41aVECpUHGS5xZWNSjnTJUtxxK2OLuvzUTYUjjXAthBxQBBHF4eI71sp1HYUTkEJWC+TDPjq9tYPcn5imR9D6aBBg7sdGk5bXk7r9AqTPUAmHJKcNsaN/MJxr+GLd/w+m3snE9XxpZtexAOO+gb33fAD8kL6i5NQ1YtQh3M2pmpj9BhjktXGaTgLu9BfVidrudx3eeY9PsfO7hhVGfn49b/IdBhbcFyXMeN3vvwrXLXl2H4YhXdd/Cwu23Qy/+/Bn2g4bTc0nNbg0EDDaQ2n7es6bViye0nSeP9ymtANBRoFn559s06z3l46Gk5rcPdHw2f7h89WuNs5IruakbxFVfbIdIYQS0IZGfWbGM0tzGpLpnFO2JBdg48dRthG7iwfj+kSls5ncCcr4wzdyR20XaTdmiKWFWPxJrJ4K1WooLONXrdNrFaxRq6mpdvxMbBSLmPEj5BlOVmRk2Welpshd8qRo9cj5TZ6qa+rmBEpZoeLlkjs4LPlW6OpgooZt2uMxKqCEOl1OlQdy4WoITDiK05at4UqWuqPoDEZTETQqs9nY1mH09beRqwio0UXL8rJK25OnLz7W+CzrL+HGu0m6YSM67atBxFWFNMcPbal/xKV0XPDzuMIIaAi4IUXP+CrfPvmM/jqT8/erf61LUceGj6DRsG031FrHOt/7zJOhhgcCODVJuUunRgfGWHtinHWrViBzkyb5tgLqkLuhJjPCvrt27fx059ezQ033kh3apIQKvI86ytcVZUi87tcL/MWs9Q5IXOOTAQvJoxDWVL2TLs9OrIKwZFnMwgVWZ7hvTdLgV4PlL77JFjuoFpTIWDhchSimuuqOIcXQVG0ChYT1UnKr2QWUM5DK8+hKIg9m9Rr2bNEe2qCCDUFisOS4uV+4P7E6qsTBqK7PgInzrTkqa9d/VjU3D1rMnOIxVqViNZRHdRixoYQKMuSqqrolT1mpqepyjreq71iPnNUVUnZs1xKEYujmnk3K+xipEzCO6rSqdZwx9S9+NYdT2eqWjfvGJvqjnL6uitSf3paPnLiyqtTwANzXe1POjQlgI2K6mzeJg0m6HtVsuZIifzGx8ZptdsUhVkXqtAXkBbv1s2uLpBENq6v6IqhDruQonbXixTSAiw9Z0s/aMl3a/K15zv7ctj8JCXpFQX1eLFEvFoFfG2tgFnerHDKkaMFZx69nsvu3Mq0RkqEsl9fGqsa03I4pjbNFcpz/5ZdjurQojrsYIMGd3s0nLZ8nFbkWZKZghc5JDmNOM2G1rXkrtN/Qlu7RzFVjhNjRAanoQNUArqLNVmfz2pGqcMBpf6uLTEV5cy1V6IhUvYqvrfpwdw6fTw7yjXMh6iOz1790F2OfevW+6Dq+PWzP0/bW+z1w5vT9niwQYO7JRpOazgNlr5Ou7N3OkHGyLVN0JyhOACcduOODVx6x8l4n3HPNbdQ+E6zTms4rcFhiobPlp/P2m4nK/wm2q6gR8f20DA+K/wMRd6bvT8R8nwLoQzADIX0EPX9zl8qn4lM0ZYpyh60CqXIOpTdgJSTZFVFLHtk5U4KXUU786yWrQgWenxUNtKmRUZORkEuOVnq91WtbUyVM3QTn63LrmRHtpZt1ckA7Oyu5vaJ4zhh3e27Dq7+ONp7PjMbCAuRXgZlZnqaiZ07rOqwk/FMOXJ0B5VCSAqmOgdTjBGCXbflI0eM7kRDTGFpYV17e/8aM2WbkbxH5sw7t6fjiPN4L4wWHTpVBt02lllQyCQw4rv9e8pilvisZsfIg467mhA9N2w5kuu3H0nUWU+j27duYG1+XMNnNAqm/QTT1O/y53zF+lAcilMoUHPVS58HnnYKLQdadslGRyBUaKjwAq0sI6OAMYsHuW3TJr5950auv+5axHtCDLTzDFHT6lNWrF25yiafClVZkmeezDuyzDPSLqz1oWL79m3s2DFBZ8ZetuOOO47xsTGOPWYDmzdvpigKiqJg89YtTE3N0OlMU1UmZFvtNorQHh0xIaKRUCmhLBGBvMiQqkxCVpBEPgqIFzJvbrVKQDXgU3xUAC1LelEpy9Lc99NUOhspaLVaFEXeF37OOZxzlFlJVQVipQQi3vn0W0e32+0nIDTrhozMZ7RaLfPy0Yh4Z+6ZGglqFlhV6BE6JZOTk3Q6HXplSa/XI88y8jxnxYpRsmz2FVOUmc40dAXnc1asWAGtFrnLLfY3KU4t0KsCP554FJ/Y9JEFBpDhohvO5aIbzun/fezK2/jkC59HUKWKgW5VEcrYH5LOeSQm+zox6xCCInhmpjts3bw1xayFY446mrXr1rJ23TqyVtssFeq+chkiFne2tuCLIeKTCYxqBAFN/SXO9fujXvSkyAv9CZHDkWcZtSduHdk8BhtD+ID4JJZjlSZGQoYQqqpfT+GVVgZnn3gUf/LiZ3L+uz/GLTsnmZKKDmYYWa+3FI85yKZkhMxOTnabnBk79sfqcBF+8AV7gwbLi4bT9gunFcZpmfd45w9JTtvYOYm/u/ZL/cHhJPD8U9/EqvY2qoBtKorxQyubtZ6rESUtVJz0LdnKUBGrwU0t6Yd9BUcMFhvcC/z6Pf+WL97+DD5/+7PnHd3zYbIc4UdbTuGBR15HkXUOc06rz8ShJRo0uHuh4bSG0/Ztnfbxjf/ITTMPGjqAYlSiygHhtD//xvP58288H4BvnPdn3OfIG5p1WsNpDQ4rNHx2uPPZymI7G1ZOk2UrBp7w4vnsGeteyo+LZ/GJTR8F4LsbH8+VWx/IHz/y/yES9ssaDRzdTo+Nt99B9/ZrADjmqJ2sXbeWqtq3fcdOKPjxltM466ibWJVPA8KP7jyZXsgpfOAhx17DzTuO4o6plXhvXkQT3ZVcNr3S6k3eUzhFUsqmGC0A4COPvYKHHHEtL/z0a5gu2/37PP9zv8jDT74Pf/PicNjzWaNg2m+YNROYb7tZoPZONc21QO5AItzzmGNZMWKDNkxNUzklE0UlkKJAkqnDp7AD7TQhzEQIGojdEnWVCY4qEBUycRQpcai9iAohDKjPhV5nBg2BqirpzMwgauEMAKanJiw0XZYx0mongetYMT5OCMr01AyTO6cA6HUrYoSRkRGL0SxCq10QnEuxqlMitdrCTWbJ0dVutV7w4hjLcyrXo5fiUXqEMi/odTpoDHjnyH3G6KgtFLx3Nskd6Pk8t7jPds9CCJEYA6rKSHKvFYF2u8BnZiHRauVpgi3krQJVpQoV3bKbytcWYOOMjLTNQi6UOHFJhAp5UfQFRkSpQkztgF7PwkaEEHEOiqKNc/YcP3HbO/jpxMOYf5Yw33iDOyeP4MUffz9ObHyFquqPs5F8mr965G8yXgScywhBmbVQy3DOcjiZrRrMTM+wXbZTliWr1x9B0W7hs4x20SKEihAqMnEp3GlEicRI33LFO5fyAJqArGPfgllxOCeod2QuS+MwEkXx4q2OVFEllVm2EGbjhVfBXhbxgAcd7AWlnNyBqmNVEF7xnCfwzcuv4X8uvhxxnono6KTh0cNIJKrFTU/sNMdCYPAvc27dzRRonrldo25qcGig4bTl5rSZsgXAyMgoK8ZXHJKcNjqye3i6GJUUKYdut6LdLsjE49MzN09bI5La0k8kQ7wt7LKixUxvJoVUEny0cBhOfCqfFjTO45znwav/hyPd5Xzw1j8iLmHaO17McL8jriWXLp+++qG874c/y0ef8Re0/cxhx2kNGhx6aDit4bSlc9rm7in88zVvY1P3NHad7Q+OGwuJc6A57Vc++3KedcZ3+aNHfbJZpzWc1uCwQsNnhzufgWdTdRarizsYz7Ysad9RgKLT2mW0KEq3W+Kd369rtB29o9nWs7XipjvHWTsN9yo37rbvePP2Fdy+cxyNcI81dyAErtq0ntPX3kw7K4nquGLLSWa8qIAK12w+Gu8CIHRKQSXSU8ePNp5MN+b27FALyyuz7CQSIYS+x65qtLxSUUEcLd/jzee8n6jCpulVvPFbzweEH916Cr9ywZ9y2+QrWS0fZIP7u8OSzxoF0/7EgPDq/1kfSQLeLIME0SS8xbFmfJQVrYLRPDcNedVDQmUTw0zNVV+EzElyWacfbiBzDpWcTARN7p6a5RAjDsEn/XGKfAp1rE/U4mFqJIZAVVWgSp5lZKnuqiwpfRefruec6fBHW206eZee75lWGlNqlN0uZa9HXhT4PCPPsr4GOGrAibnbmlGDw4taPNTknu8wIZt5j3qPT+1wrcJcPMEEvfe08pzx8fEU5xVCNJdPVSVGc62s8w+FKtqLrElgQP9c0coSWXjyzOO8hXPweYaq4oOD3VwZoSgyYlSzMlDMfTYoRTGr7Q9EXFDbXIvmUaTRrL6882Q+p6aILeWZfTdVgCNHb+Ko0Vtw/VANgktuwZsm13Pd1pP6ZctYcOWmM4YOyZaf4as3P5kHHfVjTlhxG72yJFMbE16EoijM4iTWiVwjMzMzVFVJMTKKCrSdQ2PAXDwVl56LTVocVb9PU+gFTTFZVZKpSHofLBjr7HsgJFfaJESTm/Dg80GTQiz920kEFeJAvTUkBjKUcec4dcMqOjPHMtPp8tWf3ECXSEjvZg9z6LXrU3vx7lrX0N6cYzE0gLuwzG/QYO/RcNp+4bSiyBkZaR+SnJb5OaGDFKoyoLniM28LI7GFhyT+sw0p+7emFXFtmSYujYF6sYptiHmXoQJlWfXHhBMhyzPWjUySM8ODO9+hCsJkNcZVk/frN8lJ5OHHXkXuK7pVwbdutY3DrTMr+eJ1D0IVvnbTWVyy8RT+8+pH8dCjL+Meq249rDitQYNDEg2nNZy2RE5Tt5JbO2czTIrmrse911/C2pEdB4zTrpk4g609C6N+1ZZj+dqN9+L0dbfxnHv9kEyadVrDaQ0OGzR8drflM+dyZnQt40WHdl7twmcqykRvjbVdoZJA9JHoItGbD2mISoxCcGvx2RStYnJJ+46haJHnc0O9CpLWZ/uTz8o4RgimkpgsPW5qhh3btu+279itPJO9Fk48SEbhAyvaFa7mexGmugVltGCPDpipisG7MQ5RmCrbFlMvKW9UjLX6Yf9inbdQzfvKCM3CSGoEKk5eY+ED143u5LEn/phL77gH2zoruHLjScBJeLmNdryVlnzmsOOzRsG0X2DaTUv+mQZo/dIlbW9MgsaLkGUCleIi5M5z0pEbKKKi3S4KjLQyQs8Ep/MZTsy1tHC+L+gLl3TXWYZLGuZoxkKELDMhXlbEnlky9ROTFgWS3PQsH4LFKlVVvPdkhSdzKXZqjFS9HsF7iqJIwlEZbY0wU3To5V1LjgpIjIQyUHV75D4jazkjDFWiQAgm1NW5dAfOYrwm4qmNG5xzKU6rfQB8q4UURZ/cvHMURcH42IjdZ4yUpaQmR8qyJM9zfCKnbrdLnucpDEDcxVoryyw8QJZIKcssEWH9OrtAP1ZmjNZPtaIHSW6O0SbssYpWT+rrSiM+JK11tOs6HKJiMaodfUHfn6gn3Gfd93nc8Z+iaGW0WjlZYZYOAN+44SHctP3YgdEnhDgb73UQ3TDCWy9+Pb/7oL/myJFPMz01Q8tnFC6jlWWMjrbIE3ECdGam6czMMLGzpBgdQYnkmaenEedM4OaZkWiMESeKxtIWKwJVVRr5iCTLi1kLgfoZSxLYihG/4oi1C/LA5KjW7dfHvYBzoDESNCJulsxFhEKEXIS2d+QSecjpJ3DyySfyzSs+wGSoqOruUSMW5wUlmhVf/1x67rMhfucQgbBr4buH4G/QYGloOG1/cFotr1qtgtGx0UOS0zLvyKRH0BxN/NbpKqHtyHxOkWfkWZ4WcjX/gaSY2ZKsukIMZGo7VCIyIGqVKgQyH3Hi6JY9CvFkzuF9RrvVInOO3JecN/4Ber0O10+cyLVTZ1JpjhNlNO/w6of8Fytb09w5tZrv3HYqUYWrtx7Hy7/w6l3ehNd89RW8+THv4qT73IyIQ5O3r5OG0xo0uPug4bSG0/aS0+q+ngMhMF7s4Pn3+mdG2oL3GQeC0z5w3a/yg60PJqiNqW/fcjqX3n4PHnv8q1g/VjXrtIbTGhzyaPjs7s5nuDa37jiVseI22iPbduEzBW6auSdV8NS50/syzWHPF1Py5EVGq1XQKtpL2nccaY/Sag16MNkVWnmbPJMDsEazMdTrdeh0O2zdsnW3fcdYVQhKnkGRe1a1AvcpNtLthuTwY9wmajzja6uIui/rtgv4lEQw1q+PGM/VcwcRRYgIsZ+z0BNt3acQNCS+FNaMTvG7D/sPXvd/L+SSjaMEtXGyTZ/KzvAwzuBz5PQOKz5rFEwNGtzN8ZATfsBZx/7YyCUqmyfW8ub//X10Hv03wLsufQXv+/FLk9WaIXORTzzxZayS2w5Mwxs0aNCgwSGNY0av460PeTxvv/wfuWHyPkQ8H7npbZxz3H/xmLEvHJw2tW/gj+7xct57y+t46Im38leP/WfWjfVwDm6ZWL+kuq7acjzeR05be+t+am2DBg0aNLir41HH/DePOe7zOAkcyO2V5xz9Ac4ev5D33PhHB+yaDRo0aNBg+eCl4rTVl5MPoQ5BuefKy5MHlFLF2Fd+qSQFU2UGE3meJw46NNHKKh5y7E1kPuymW/ESuf+Ga+ywE3Jxpqnp6+IkeVgJRZ6hIkRRSg1k3iMILnVdVZZUZUmk4todJxDxnLH+Oi7ffCrTvTbD8PuP+A++f/spvOWbz91v9393QaNg2l+oYycOuKuqzh4mKSudKj4dH2vlrGy3ycXcxklWCKGMoJEs82gMhEqRGCklUsWIRu1bLOSq5H2XO3MD9U5Qway0XJY8T8TaE2Pf+guxeKxSRmKoaLdbFHne19g7l8IQeJ9cLdN95ZFWq2DlihUUhWm/y6qi2+tRliWtGMx9MkZI1gpZllE513eflAE3VVJ8UOs7YXp6Gg3BwrIlWNzsHA2xb7HR63YtpqdI381T1RLuzf4dEYHx8VHGxy3e59TUpLnmYtZXWebJi8zakwSTproGleG1ZVyl5nZqSmVLXqdOiRL7XleqlkhRnCT32dlYm6qRsuzxg01P5MbJewGwpXu09QuRhx7xaU5e/RNa7RatVobPLExeTM/cu8ComzGHywiMlTzl1E+Q5W0EoaoCvW7JjTtP4apt9wdguhpjuto1N4YQec8VL+QxR36NB6/9NgBFDNa/wI4d2ymrklhVbDhyAxoTubnQ7x+Nkcw7YgiEGGeT54q5pkbVFIvXQlGIExyOIGL1pdB/3vm+1RyAzxzgURXjCjFrHQSCmNupSw9KELyzdJWiQAgUmaPsdcjLyHMe/1C+9KNruPK2O+0eUfOSdUIv7G4JIDAkZ95gSr5Bl9XBfzdocAih4bTl5zS1JLkhBGKMhxynqSo7e+v5n9tfxPbeEdYvKGev+yonrLp+Xk4DRUPsc5pqnRi4AFIfp3Ac9UWjWqzxqgp473CSxLZzOO/xuRXMYgGUOKZ55Jr/4sR8ko1buvTiWtaOTLKutZ3zH/PvfOjHj+HqrccMfRW2d1dx6+QxOITxfJp21kVDbDitQYO7ExpOazhtLzhtlqfS8EnrtDPWXcGa8ZJWqzignDYee6zodXdpUxk9f/3tn+Ppp3yHRx1/ZbNOazitwaGOhs/u1nzm84ItM0eyJp+glc0M5bOd3XGmyzZr2hvZ3llDxLG2vZUYFPVC9IJzlveoigVbOkewsthC7jrz8lmdEymq3eMg6jxWWa4HdI2Wa0GW5+zYsZ2d3ZxbJo7krBNtfFYV3Lx9FUeOb2fEl7vx2UiKiNeLntsn17NhbCst3wPg1sl1RDLECXmWmUeugHrz0FpRdDmivZObd6yjVwZiZWEiy9hGEe6Y2mCewmJeYYKNHeMzx3je4fS1t/HL9/0qn7zyEUyWI0RGuYPX4/koXi8GDg8+axRM+xH1C5b+qMV9/7wDREGiTWrHWwWrx9p4FKezeXCqXiBzJrCrGKmCTRRFPISwS6jpOkZq9JLK2CROgcx78jzbRVETwfIgeOm7B8ZoE+OsndMu2uRJnW6RTy32cqgCOuDaWuQZeZ6zZq292VPT02zdvp2qqqgqE1oW59kErc9yvM+YjUUKmSTny2h1qwi4SGdmxnIEpUVADBb7s91uEaqQCEfp9Xo478mSK22NLLO2xRgoyx7ioN1uMb5iHO8cIZb0evacqqrCZ448r3NDSOonTaRh8Vnr5xs14pLrKVjCuuACpHymIfW1Kng1llAsyXmldp8hKlun1vGdO5/ID7Y8Zbbd0mU8387DjvpvjlxT0R4ZoygsSAEyS+6qkZjcjVGhnU3z+JM/x+joCkQ83ZkuU5MzfG/jo9k4fRI7uquZda8cGK84PvzT5wCRk1fdAkAvdBGmGG/dzsTOHXRmpgllj/Xr1uK9t3sJs3FnUSVzjirdVx2/tj4vcTYma9RoY1fVFi4iiAouK3DOW+zvZErgvNgIVAcabeHiTNwGUSKhfqOSG+ssRJVMI77qUYTIMx7zQDZOddg+3bF2TEwxo4GIUrKrmBab/ywSd00h36DBcqHhtOXltLoHY7AQC4cKp2kMlJX1w/buKj5388uoOUck8tAjv8hRa3qL5rSo9QJPZ6MdpFjsQEpQbr8LMRKcjT+vNh7EeXw2e4+15d+DVn8Vj+eKm8Y4ntVklBy3cgu/+9DPcPmdx3HH1Cp2dHc1xAAz0NjaWYuqsqa1nZG0Wm84rUGDuxcaTms4bSmcNl2tZnvKdwR7v05bbk5rF441xVZ2lisJmhHU864f/CxtN8PJq27j+NWTs+O9WacttnCDBncrNHx21+KzMijdXkCkl/hsBZEWnUpQ18FL1eczn2VMhiNY5Soy3xvKZ6FcyYyuYkOxhV5nNZVm5PnEUD6DnIlwBKv8FFlWzbtGi5qzszyCUPWYLFf372Ek67B6dIJWu8BJdUD5LIttyMeZ2LmFiR2j7Mg8pxwFaMQR2LhzlFX5Dlqtan4+05xNndWM5VOQR1Th1ok1lDFHxOG9QzG+d4WF3F0/OsHKYobbJtfSLQWN9Vi3e71t8kgbERKx4Hq1iYXi8SjCUePb+Pl7fZ3v3n4qN+3YwHQ5wmZ+j1VsZKXfyLoVk4cFnzUKpv2AWigpipMc1WBCqtaaA94LPiiioAFWeFg90mb1yCixN01GLVCUKvSI9cTXQbczQ6wC4602WRJseT/JpmnxTWkfzA1QCpMBDpzPkpWRIWSOGCtCqX2NfKxmiUAl9gWszyyvTwgRnGnonTh6ZZmEoCfP7cUeGW2zUleyZdtWOt1pxCku93hvL7V4R2uk3Z/EOiIecDFSdgPj42OIKlWnS57lOK2zN6T7SBYD7ZYJdFUlhECIFd2qIqZke1mWMTY2Rp7niBTE2KKMJVu2b2Hrjq0W/3U23DQjrRYhBDqdjsWCzazNACFUpu3vWz8ky4eB+J5Wl0/Cy9PtJgsBhVCWqa2RQAUuIAJdXcn5l36BThjdZRzdc9WlPOsef8/xx66jKLIkDMtEnIGYXGDNIM/1GyBe8EULlxUWHzRXWqPCI0/6Afc/9krecNEb6YVd46wO4iM/fQ4fu/bZdW9zr9VX8IFHv5TpzhTTU1Ps2L6V9evWsWbNGkZHR6nSfQG4ZGVS91N3wLqjjkXbL6uz+aZiTDF4BTJ6aToR0dBN48MmOo6MWGEWFwJBFRUhK3IsLrihDBEnuY3RzNGreuSZ54jRNtl4i1c89ym86Dkm/v7w7e/l8htuYboXGW0L3Uop0yvQNwiat7esj2ZL3DUFfYMG+4KG0/YPp2UxycC0gDhUOA2vhCoQq5Ky6uwylgQ4+tgNrB2b3GdO02iWYw6PBlsEujwQVMyjKFrdmfd9AxWXt/BZgS9yOr0OnU6HyYmdrFy5kmpFReY9VVnyWw/8FGcdcS1/8n8v3u19OHbsdu63/gq8c/zcp17PimKGDz3trxpOa9DgboKG0xpO2xtO+8QNv8e37nwutXxc7nXa3nLaSWs285cPei1/+cM/5Oap4/v3+rbvPYtPXv0ILjnv1Xhnm4PNOq3htAaHFho+u2vy2XT3CDaGo9iQX8SW7Vu4fes01+24N7jjWJFv57jxa/p8pjrFSeOXkmeeyHA+W9++nSNGNhIQjlt5CwqpXbvzWVsjp2SXpbbOv0a7ZfJE3nL5F63cgBT9mdP+j6ff6ysgwcoeQD4LrOf68EjW8T90OjPcPrGRC686nfseN8PDj7227/0Vqvn5LJOSB264isu33IOdvbH+MzOVkPGZqqKiUFZE4M6dLe7YfgxQmrefs7bHkPKHqXliqQgu89DnM1PECt7yd2WONz7hg3z6yofxL5f+DAA7eAsbjvxt3vv7/3xY8FmjYNoPGLQY0GhJ0CyDps6Oh+SqmAu0vWPtijFGMo/EKiXF7qdSQ7yRRQgVGtRcTr2n1EDhc7LMU2TmihliRYilXS9pQTVp24NEeiEyPj5iNgEitPOMiYmdhFDZSyGQecFnBcRI5nzfOgxN9xIt2acTEFFCVVIn3ut1O/Xt0SpyMmebNTOqjI6NkLkWEqHs9cjcwIohmFUCCk4jrTyHEOlVU7TywsIW1B0c0v1UJVlepN9F8szhgyeqo0iuqa1Wi3Vr11L2SkIMVKGyjaSqtGR3zuHr8JxC383Re5+S3ZmVQu02OihUvc/w3lOFCFVlVnIiqCR3z8zjooVlUFVc0oBLImyVyMV3PI4Lb/l5umEEmCXgR264gHuuuYqxsQzxQtCKUEWy3IG3CULdH/Wz7AtLtbZZmxORuhxEGXWBlz7o39Do0ZTkVoP5t8YofOTqX2S6GmPQY/OGyZP4ne/+Nd1uh3uPfo2nr383t912KzEGYH3fWsA5S6hYVZUJ95SEd5Ac629gtk9TiAufVpAOh0tCOyQXUUtAa8kpJVlF+MwjBETNZbsMsS9yRR0hmrusBshbOT6zxI296UkKVzGKjZG1MbIKCEAZlKAQFFQcs9RRv9uDglyZFfILU0GDBndnNJy2fzitm5JyZ94Snh4KnBZDYLoc5YIr/5IyZnSq8f44OmHlTznnxM8z2ppeNk4DIc9yiIIGtbA7wRbWolhyVu/692ghDayeFStWkOcZ01OTbN6ymWuyMXZ2j0Cc459/9DN867Yzh74PH7z8yfzvTQ9ABC7ffBKZC7zoc7/P35zzTtaN7Gw4rUGDuzgaTms4bSmcFmKkij2TowO+N5mXZV+n7S2nOSK/cPLHuXTL/fjKbY9PI9wRorOk8hqbdVqDBocgGj676/BZdCvZzr2pOoFOaQr7FStXsGVqJdt7K8mLFs5Bu2gz2h7l9qmTWNmaYnV7gju7p7A+28S4TAzls53lEUxXqzhm/EZum1hHFRxHr7h9Xj7zic90Hj772sZf4Udbn5xUbdYvTgIvfMDHOHntdVRh+uCt0VSZLM6iG3oUBWzavJlr/Owa7fgVm1jV7u7CZ87n3LDjREKdwgNYP7KDY8Y2owjXbDuaMnrqMIa18YlLI39lMc2Ro7fz063HEtXbKwTJAMPhvPFNUAshWcUBflFBiRAsR1aRe3I/yDkep47R3tRhwWeNgmm/YGBw9CdqMiswk1WBA3LnGckzxlotCu/MTVUsfnEyDsA5T6hSzNOoFHmOE6EqS1zm8XlGVmeFK82F0Tl7YZ1zZOKI6Xoiik8TPxHpC3KXLJe8QObEhHB/1jh7Pw4xSwCLpJxuzeoSTCNth4RMhNx7emVJ1etR9Xq0spwIlFWJL4p+qBoNIb3kijiHx1zx+21zu1wRjcFC5WlyxxTInInSGGNqj4XVy7Ocqqys/QKtosAJhLRA8U76NYcY8OLInCdLrrQxhqSBl/69AmQ+I8tyNPaorEeNy/sS2AS+daHiY4YJ+ggCVRQ2dU7ksq3n9O8rdx2OHL2eU9f8kGNWbiTLRwmxoooRCGRFG+fMbkz6CikTfLWQByx8Xf23WLxsVHAC9z3ySlAHCrHs+62iES7Zej037jiWLTNr+m2aKFfytY3Wxm7IOXXsMqQrnJV1abUmGBkZTf3hKYrchDwKLrUrRtP499+F2fFUT4rqxYsJ8QyrQamnO4o1M6JJ4DsUl64VcNgEoa7Z6tb0PAQvNqYEcGWXQgRNlhj3Ou5IOr2SSzduQiOU0a4acclVVS3UQ0KcI+4H52+zd9egwaGEhtP2B6dZUlH6fx8KnLZt5mhu3HEaP9r6BCqd9ZQ9evwmTln7E85c/0PQNr1qeTgNsfGEOHAg+D6nSYypb13fKluUFAZEyaSFoMRQ0unMsHlHm+nY5pbuaXzz1jO5cusJQ9+GK7bcgyu23GOXY1+47sG88ZF1iIyDw2kVylS3xenHHN1wWoMGC6LhtIbTWBSnaVB6lXDtzgexs1zfH0FHj9/EkSvuIMuzZV2n7QunnbnmGoJm3Dh5AtdOnExUTyfkfOPWMzlj9bWsH51q1mk0nNbgUEPDZ3cVPgsUTId1+LiNzJWM513aRUHWbZHlBatbXbwTxvJIq2gh0zkiOd5lTIe1RJ1AdYbpskUhEziJ/TZXOsZ0WMdMuY2p7mqChnn5LESh2x2hlc3gKXfhsypEJrsjXD9xP66ZeGR/FI3mExw9fgv3OuKHjOQ76VUHl88q2QChQ7s9Qaczw84JaGVKuz1Cr1VS+bLPZ2XImCoLVIqUD8z4bGVrhrF8BlVh0/RKesFC2U1XK0yplZRHoqbszL2ystVhumzRizlO6ttwkBRALg1RpyH1y8Brlsa8S+NzEDPdnEt+ejwnHHHiIc9njYJpP2BWt2hxGSX9VyfEtEFfUogwVhSsHh9lJM9oZZ7COZwGs+KRNOnKhEhAK8F5od1uU+Q5VVUlDWmG9yleqXfkIcM5R6uV472vFcikcMjkPusLK0KgnRdI0WKk3QYimZgFEgrTU5P0UvLQrN2iKDJarRGcmkANIVC6gBN7Yas4awFVhciKVovJqqLbK+lOTlOIx3lHp9vBtUJfIENIMVkdRdEilBWiykirjVaBLPO0EpmFnrncV8Rk0eBwzpNlGTGKWVMEC3NQ9oTpqQnKvjutM6LLcqL3aeFiZKdq8VT7pKKYUgZP5jwhhQrQ1HeFL/Auo4wlEuqEd6BOkWiabO9N8Nmk3RGxuJ0SAtOd7i6eQqCsad3BC07/C8bG2uTZOCLKxMQEiOIzYXSsbRZhMhi92pT9TqCqIjFarNmyspij3mWIm21HDElzT7IccZlRhgZ+8yEf47NXn8Mnr3z8nFFt93zJzsdxyc7HAfDy3ltYpf/Mhg0bbHzkGeJgZGTErCWiQn2/aQLTX8yook76ixmXyEWcR3yRrujIBsJeVKEiuGjWcelenXMInlyl715r+n2zrHDOkbdalCHY3xGKQiD2aCV35lf+wlO46MfX8Ofv+3c2B6gQAkKlZrEiAkGr1A8RZNaVuzZvSHONvm1BgwaHEhpO2z+c1i7M4s2LkIk7JDjtO3c8lU9c8/tzRg8847QPc+LqaxGRA8ppkBaodQ7HKlgM+dyhvYh3QlF4tm7ZytTkJBsnxnn95S/ph6iYex8GYRiyzJOnUCAHg9OqrueK247gJc/+OS69+kcNpzVoMA8aTms4bbGc1u1VbJts8a4rPkqpI9zVOe1+66/k9JVX8rvf/Ssmq3E2Ta/hmf/5Bt5zzh/wxBO/BwKjo806rUGDQwUNn911+KyiSzdMc/L41bR8x7w5neOYFVs5anzzHD5rcY/VN+OdgLSSckXoVSPcuOM07rnqStp+OvGZkLscoc11W0+lLEvafgexF4byWVUV3DJ1Cies/iljxQ4kBGa6Xbq9kunpHtdtvQ8TvZW7jKN7rrmKF9z3HwmdHpPduwafDa7RRsvrWNfdxhErj4AZmKzy/r7j9u5Krt26noccczUOCy0omIJLxJRf99lwK6pKGTzfv/PepjASB87G8o7OKnZ0xnjIsT/llh2ruHH7GqLElJYFqqBp/Hi8DuR6BEh8Jk7I8pwQzctplm2Em7au54XveRWf/90xHr79i9z0vo8dsnzWKJj2E+yBm5bTYwOTJHwcSg4csXoVbe/IiMSyC5IhWWYJ8dLLgYLErK+h9moxLWO0l7WqKjREJEvDO1kMoNoffAIUWW6WBEEpvIf00le9HrlPFkkazUogKjEEHEKeZSb4gHaRmzu9gGq08ig+y1Ls1diPiyniyEVYNT6OYMI5dHrEVg/JMlwVia5KWnehyBxaVlQVFK2cUAWb7KqSOUFDRaes+8+IJFaBSmyR4XKHxoosy2i5nF6vZ2WdY2ZmZnZiLCYQWq0RvHNmdWAtBpK7pjNrCQf4zEgxBvP0CSH0kxVG8RYiIC2IMhFUIZoPLzFGWq2iL2BDCMQyWgiIKvC2i9/LTROn9cfMY0/4JPde/x2KIifLckzWBvK8QJzOJsAb+K8ebILFIc2dxQQVl6FEYhScZHhyREzjPjEzSe5BRYlVJLhInYovzzOecM9LecSJ1wGWtO+m7Ufwzu8/d7cx/rGbf4Uv3PFssmQt8YAjr+Ydj/s7nLh+THQRT5SAajQ5K7V2X1LMWbMwELF2o5KeiSDOkyUXbKVHTA9fMkssW/VsPHhxeMlAZnNBBVWzanAOoiWrdN7iokagiBVZEtJbN23k5LXjvP6Vv8xbPvhxbp205I4doIoWlzXWFc/dV5RkrtCf3g1a/DVocOig4bTl57RudzY/0aHIaQDr2hv5+TPeyVErd5oV20HgNJc2Ka1bHJoWOhCRSlANjIyOEUJgPG7lz+77Wj58wy9z9cQZ/ft43hnf5NUP/hxP/MifMV21h78jGvuxwA8Gp7VjRRErZhpOa9Bgj2g4reG0xXDalukjCFEok0fu3YHTitjld898I1+6/Wf55uZzAHjd936bv750BucdH3/mX3Lyms3NOq1Bg0MEDZ/dNfisJV1WZJeSu5IdvbVs6R7JqeuuY2Sk2COfnbn+Gtp5xAvcN78WCd3U95Frd5xKJeN43+PUNT/h5p1HE6K30HRD+GzURU5bfzWEKapeIISSstMllAHRwOkbruPSyQmY3HUcichdjs/qNZrLMnpVyc6dO62vWy3aIyM4caxrbWX1MZO0c0eMfkE+EyJnH3ENN00cy9buKjO0cLB+bIaT127h6i33ZKIzqyEVn6I+VVUymDBOCwOedhElzzMQBxpxAk8+7TLOOuZ2fve/X0AvzKpcXv6Bn+OhJ5/A61/ZOmT5rFEwLYS5D3Vvn56Y0HWpgkyEsdwz3srIneBjReaEPPPkeYZISiKmNrxCVYGqhQOgnoDbS1tr4utYnZkTi2dZt1fVYpdqfTtJ16npEyPeJ+EmkDlnAiLFVvZZhk/xpvM8T5NRE2TS7x9FNVgbUpudCM57VIR2nqNFi06nQ9ntEapgid8ynU0SF83ltHZNdZCsLzCLM9V+3ZVlGrX+GXi5qyog4vCujsudWtfvSwu14ARzCU59YYuRup7a8mOWJF36R23ZUd+3aiQk6ypxgkSbe4tAlCSI60qwMBCVwpapDXz/9odx4857MVmuxkvJmWu+wQkrrmRNezNJhZZ6wAS7d0KWpeSrkmJYp3pFjEhtoWN3VpZKSK7NiMUVV4zcNUaCVqh4BGfDAAUVvMJoMcPKthFlCBWeLo898fsIysapdVyx+WQAtpXr2TYQLkIl51+ueDLPO/2brBvtWlLENOaTUcYsOaVu0Xo4xmgWGNizUhU80neZNSVWhhJ2eS1V7f7EicVMT9dyA/1TPx87aJMkkhsqgNeKFa2Mk8dW8OizT+cH193CT27bTEkgYFZ2dav7ra8fUZLvqrMXE5kVHfGuJvEbHN5oOO0uxWkxBhRT5IeqmpfThJzJeAwrWxPkrne34bTMK6va21k/chuZyzlYnBZUkBSTu/YaFjzOZwiK10hrxCzjpaw4MruV+6/5LoWb4bId9wdg49Qavn3raX0L+XlejNnheFA4LSKiOC1Z0ZZl5TR078VFgwb7DQ2n3S057e62Tqtx4sorOWnVT+7ynJZp5NhVWzmrdzmlFnxvy8O4ffrI/lj4yBXn8LgTL+PRJ1x92K7TGk5rcJdDw2d3ez4rY85Eb4zCbSdETxlHLQzcHvgMhKneGLmfwfnIdDnOqJuhjBk7OqOMZjPMxJwyOKbLMSIZ4qrkcbo7n0UcM9UYvW6LUFVUoWSy08LFHeTZTjqsRZnl4P7Q4a7HZ/01mq5laxmZnDKjhqKXMyUjnLi+Ai90tM14NslEd5SpsiCmvGOtrMea1iSbpleT0oPhfEYv+DSWIqJCGTwTvTE61QhRIt5XKGnSUL+M5nab5huzI7z22uvv34qwsj1Dq7iTJ516CRffdjK37lwHwE1bVjPaOp3v3vwUHnzvKxm9+bpDjs8OawXTLhOg+U4sIOyHnpLBM6mw2lDJ09GWd6weazFWWFI9iY7CCa1WQavIqXpdYoh9Qa8h4sViVHuxybeIAw0Wy5Q0gQNEPFk2G9+yFpIazJpLYySG/swb1YAXsxawkAPOJphaE0fWj+GaFymxXQzJ4kiJokSNhFASg2mkATJx5JmjjEorzxGFqtejN9MFEfI8JzlLWluCuefmzts9utkYqN1qJrUnEd/AtWQgbmfZqxBM0I+NW16gGCNlWRJjxGdK5h1ChBiIadFRdnvEGABLeGrSNUlojWg0RYlLKxiR5OKvkRAU50CjWFoj1dmfEtFa4IMRsCq37DiZ91/2x/1xk7suTzjuXxlpR5IdCDHamHECMQbyPCfPcouDmki2Fmz1c8uyHCQnqNDpTVOFgAbwc0L9xBDRGHESabVGIAkyVVJivNk43DEoa9rbOO9+/0UmgW/dcl+u23as9VvMCDrrMnvtjuP4vYt+kwcdcQ3j2a0UzgS+G3hpam2/vSXJmVsTIaVFiAlzE541mZlHFPR6KWcUKZQDKZ53Sjw7+x5I6vcIYu7JCoQYCclSgeTu28otPrhnhuc+4aFk/5dz+6btTJUVDo/DEdzsO2MzoDQco91JfVsW/33gHueR9vOSwJInl7vHeG1w+KLhtLsZpyUBEaqSsurNy2lBHVvKU1kxdj1Ftu0uxWll1eK6bfeal9N85rBQOHcdTrPdLUF8lmLPO3AecY6SLhoD52z4CseM3Mw1E2fSiwUX3XwvLrr5XiyEOgnuweI0deAypV0Ibb/8nKa6u4yo0XBag/2BhtMOTU67K6/ThnEaKIXrcfaRF3HfI759t+G0Bx35I44fvY7LdpxNN7SIatuob/rO87lj6ss8cMP1jLeqw3ad1nBagwOJhs8OfT6reqNs3H4cY/kEoHhvnrN74rMQ4LaJoyj87YxkXW6bOIqTVu6gG3PumDmWM9f9hDtnlDum22ycPoYqCu1ssnYq3Y3PKs25Y/JYJqeFsoxUIdDpdFiV38aa9u1snLkP02ENc2E6wLsmn810jmFnZx3aDWnsCcVExvqxzfRocUd3HRtGJtg8tYrbJlcTgaCRte0JVhcT3LTjSGaqDE1eapLlKYWghfbb2W0xs23c7tGDcxlVVVJbWdhYS2HvktJzFz4bIJSazwrf5Tce+kX+4ds/y5aZFQB0q4wrbzuOt/zXc/jIKy+naH/lkOOzw1rBtGTsScgPnDQLKodUgQwoBNaMW8ziwntWj7YpRPGYFj93gmpFSCEXvUhfO+lEaLVatIqCstM1zbETvMshhQGo40AKNln3gM3/IhIiU50ZTDPvcLR2sQJwQj8sQVVVFiIgRlasWEHm3WySwGR9oCiZ93SqaXq9Lt5bPFa8WRsAdu/iyHFmxRAi46NjzHQ6oEqRZ7SLom8V0W7lEEtEK2Ks6PVKYmUxTUeKVl8DDvYy5T7rt7nf7SIURc7ISHvAZdYIVMX6MWrAEfv3GKMOCAft12dut6XFSvWeIsvIvSc6S1wIIEnoVkHRqjJLh6gEwHtHKy/6Fh4xRGYmJ+nNdOh2ZnYZOyLCihVj5FmvfyzEihiCLbKkwvs2I6MjQEhWBEKWXFfzLKOd4t6G6Oj2lMmJ/8/ee8frdlz13d81M3s/z3POuf2qV6tYli13G+NeMI4BQ0gogcSUEFoKJZBAIAVCyBsIpNIxvYVqIBAbm+KCbVyQLVtWL5asfiXdesrz7D0z6/1jzd7Pc869kmVb5VzdvfS5Ouc8zy6zZ9as38xea/3WMaDCEfAOMkZnkLOysT6lDjV1Fcha0lmdJ9Q1bYoGJL12GHLFlAi153PPu57nn30jTuF3rnsNb7nlJWyVN/zJj/L1l7+V//ziN1GPMiEExHuCAFn6/ottQweuIdQ4b9zePgSSRkSUXDhILVouL1BoaClG6UG1H09rs5LJkMz4O+eRrDhvXLuhMtAqHIS4bLy9oW2RquJLXvJcLr/0Ev79m36bY5qZCVC4hrOWGpQtC0ZAyz/bPHXtAPAyx4ZPabMHGeTxlgHTnlBM63DKOfewmBbGY+JGJMYW9bqtMO3HP/TjXH3/CzfpzsmEaeKtuOukqlleWjFan9bavWvPIf77nn/DD131fdw3PYNPJX1E5xOEabuWMi/bcbvNuyyPKqZ19mDAtEG2tQyYdlJg2nbep50I08ZhnW9/4X9iElZtTDl5MG33Hvi503+E//7hf8hV91/WP9NvXPsq3nbbc7nqa/45uybplNunDZg2yLaXAc9OOjwb+2M8+6wbcQ4OTgO5TWSNaGwfFs8cLZfuvIrKWb2nZ51xPWjLSt2we+kmvAh1rFgawaV7b+KWg2fTRHP0nQjPnK5x/uQD/NeP/TjXPPg8zIkDQi522xO13qRTOWej+tvGeLa4R0sxMl1b48o7z+G83ce4Yv/VaBtQTYgT00m1LCznveGawijAi869jVuPnMUD093gA1kTIuYANdUrmU3FwSUolmRV6PLy/L10Tx+YATHqWM1qlK/OgXf805e9m2966fsA+Dd/8uXccP9ZxOz5up/7Pr7yRc/m+77hD55UeDY4mD4DeVi/3aLh2XKO0AGAzA2UMPcEqv2v+062XKv//ATtWTx2q/T8yfZHudHc6/pQz2RpsLLp2sbDfOJ7bXou+6C0bfF69IaKE/ycN7FMV9WHnCAiJ/Chdh2tmw5cOG7xerp4yCOWeVvluL7f1KMiXVf33zzU83R9vRiEUkap67Dj+n2TDi20qYsEEE7kytbNPaB9AxcPOf6zcm3vMpW0OOCl51zD2csHySnx57e/iDtWzwSEtXaJaay7G3QmcFO7u3546G5/OB3b/Pydfh8vJ7qD9ME+8z6b8wY7oPLeFiElcm/xXg+VkTTIICezDJj2BGGadm2VTdfb3AgILnH28p2M/XRTO7qLPBGYdrTZzW9c/S184vBlzNKkP+qCHdfyjH0fxLt8UmBa/9Qy17suOtG7liW/wZc/5c/48IPP5G8OPO8E14Cn7/8k3/jst7N7vHrC7xfvddynjyKmOQHnMzHN5+eAaYOcijJg2vbGtO24T/tUmLYUVvEuzRtykmCaE1iqprz+wr/hrOUHeOttLwWgyRUH1nfxfX/9j3njFe/hZefduKndwz5tkEG2hwx4dvLgmQDeWfGflWoVv3J3b6M+FZ55SRS3CF4SWUFECYV/bdf4KKOqwUti/+QBmqgPj2cf/xZuP3rJJjx7KHnxOW/j4t3XwDbHs8U9Wi5Yc+bkAHvqGV4S4Dl9coQdowgOy2jzDQKcv+M+khpbxSeP7me5nrFr6T7EVyVgQua1oDSjWP2rA2t7WGvHjwDPTiSmM6MQWV1f4rc/8kIOrO4EwInyz17556y2Y/7P3/x9VH8XKWxdJzueDQ6mE0k3mFsXwI/oXAWRRd8iIswjerDIAHMvFuVxVtC0W9DllLdELcwV2j6wY524PqV04eBySElnxNI5NW+pG9DbuvmNtl5v08uh/v7Hd4jruCg3gUJn1AuwFY5QcWb8euBh4RZ9f5sp2gRQC3d25dnn15U+quBEICR2kl1eLXJCO+Mom4Grv4bM7/9QRmQOu8qCze1Gy9KCy9NooQs4NDuDw7PT+2uM/Rp7xgdO2K/Qq9O8D1QXQEk39193r4V0TXEyHxusn4IP9vmm8+zcPjJwUQ/KuPQttG7j8v138Iz9d5DaljvXzmYjLfHAhhnNI80ynzh6FpfsPYjrxrHn8NXNi4iFB1QKQHXzaHExsuXvrj9O9F2HX8fjXPlCF/qvHNQVHAxOGFeeM/fvoT1yjFkb6Th+u/lr0QG6OAycaCt2IhPysCblMwCSh3bBDjJIkQHT5vfdRpjWP5tbwLATYJorm4lNvfcEYtqRZjefOHQpb/3EVy48rbKrfoCLdl7Dc097F07GJ7zHdsa0+ea0RE07i6Z++Zkfwju49dj5HNjYiy6ETJyz40Fecs4NfOtz38Zs2gyYdlz7Hu7LhzplwLRBPoUMmDa/70mIaVtlwLTHHtNeeOYN7B6tc9X9l3FgfTdJA22u+IWPv55zdh3lvJ2HuGDPA8M+7QR/P/IvH+qUAdMGeRgZ8Gx+3ycRnk3cjKVqihMtyS2fHZ6tVOus1Bu0GSbVBsEZtdsjw7MTi5fI7vFBnn3G+zlz+baTBs+g7NGc57TJIXbWGdUaVWXnaI09fop4R8zaZ+SesXwYESERuObg6Zw7Oszu8QbiA0kTwSuT2o5tk6PNNj/W4xJRPdN2Tk13HJ4tPNumJ+06FGW9rfjbOy5krbHaV06UF190I3/80Rfw1o+9gDN5Kip3Iqyd9Hg2OJgeThbH7dMZAFUohcU8sFQFVsY1K6UAnBfBKeSYqKrAuB4xGVWk2BKbho31dSaTSeE8FYKzxV1KCeeE4D1VCFQ+kKLMwQMQtSJydRV6PtFKPM1sg5wyotoXIpNufagZL1BXVSmYB23KVCEUALLHakrRv6orkFcM7MrScinwJ8SSOgtdqm03GaVwhHZp8mleVE+s+J1iftuumF6XMtsZrO6247q21FgguArvLP1RnFJVm1NYnRN8CFRVRUyRtp1Rj2qceuM/TUpVVf3xPdjmTNKMVVtT2u6pRPq+NoCTfjGuCFkjCcsgnrUNs7ajCFIC8AvX/Wc+9sDn9KrynNPfw+df8Lt48SB+od2enBJtbKlrb8/hoIkJ8YU1oDvWCz54YpuJMdHGhCJUoxHBj5DsqFzV33P//ppZV/SwGMesmaZtGY3G5JxoS7sr763AYqHUdiKICwiWIhu8hyryL1/0dj564Ab+zTveCMDv3/xq3nb7i7j6H76RPSstVS3UPhDCPI4jp1I00Dk6Tm/Nmdzm4pCaG03nBF9STY2GoZtqJaqt6KL9LSS1IrEi2utND9oZ8FZGD8AV/mCHB4V6Eti9Yy//7bu/kf/663/Iez5+I022/vDO4X1FA8SspGwRDmYnlKTzV4+m7pZW+9htLoZNyyCfhgyYtq0wLQS772hUs7xiEWYnA6b9+se/hbdt2bgIyhsv+1H2jB/AeYdRFJy8mAaQi35q2/Lyc67iBWfewDe/8z8wTfMXjb/6JT/DK8+/Dqie9JimgKhHkAHTBtkeMmDagGkDpj1iTLtidD//+1U/zne887u4e20e7Pgf3/sV/O71L+Hqb/weQphPpQHTHg0ZMG2QRygDng149gjx7Nb1c5lG4fyV6x8Rnj2UnLZ8L//ihf+FrJGcOKnwLKdk1I2jEZkp0yYSM9TKw753DE54/lm3cMPBs7nh0Bl0LsXTV47y9DPuA+DOo/u489h+RITL99/BmSuHueqec4rWlK5YwDO0ZJxtmZba/ULmvN3380tf/Sa+8w/fyHX3nUPMji//ue/u23c3H2ePfgVjffNJj2entIPpkXTRYmraVpu/6fwFjfIUqhKFiReWxyN2TCaMK29KCQQU0YRkSG1iWoxfSsXApWweSyCXknSqihchp0hG8VVVuCmVRb5SAUJlJdYcxps6qivIGbQrgmqSYjbeUC0vw52gORe/M7aQLF7U2LYE56jGE1LTlOsb96NzDhUthT3nBjHmDJoRDCDcaEzKiaZtbKFanmtjY1bMfAZNIGa0/agGyThxuOJUqqua4L1N6mwGdzwe44Nj79497NixgwcffIBFialBUarK07YtXUBWx929aOhTmhePQ6xVMRkwLUYrzBfdDl+MPuJoU6LN2bhMZw2oZS790rX/iVuPPK1oQObvX/omzl6+FYjEmBmFUigRiDGRVEkp2gLcGQf22saMlckS9aii4wm1jQ/EnGnaSNMkXCnI6Jwz3mxcr8uu9mbks1JVIxBHLvzWsX/2UiAXKXUbFUkZVBAczgkxm3b60h8X7bmP/+9Vv8n/+OAbuH99F+txzD96+w/yrc96M69/ygeJOROqYH0GhKqyYq7JAMd548/VnFARUEoRxG6CWaRDzqmPcOyicyicth1QL0ZT2CKgpNOmhBWl1T46QEloLrTC2ePJ1KkhHlxlR7PB/jKnofSxtgUwBIeQ+tAKa6ZT35uDXLRcGbYYgzz2MmDayYVp3tuiOZTNyMmCaSmmTQpy1uRmXnnW/2HX+DDOK0oiRj3pMU28ZzJZpvVTYowEnfK9z/tF/vS2V3Hl/c8A4Hv+8h+xe7wG2Oblyy57D1/3zL980mFaLmXgB0wb5PGUAdMGTBsw7dHDNOc9k8mE73z+H/LuO5/Jn97a1dEVbj+6n9f99vfzI6/+NZ55+u3Ak3ufNmDaII+3DHg24NmjhWfn7LifmJXK1cfh2avO/A3OnXyMX7z2hxcUxd47LleHe8dHPRqxNMqE4IiRkw7PxHvGk2U+Ob0UTTNiilbzq6vLOKrx3rNzPOXCXQd6PJu1cP2DZ7OeligKgQoc2ljio/eeA8AsjawfUD5xcC+lwJKNSdH7LgPL8KxMS6E4KOd4Zh8qpSgh3/rit/OB2y/htz7y8i02QTjGDzDiFezhO02PTlI8O6UdTJ9KOqP8cN/r4l+CGVLAYwX0xpVnFAJVCPhuool5uCVZBFCOiVbUIn5yxhcqsU2IshABJL1y505X6f2zYsbXOYfrfbJq1yxpohR+R6uoavdMGmmYEXxAsUWhF0fW+aJbU6b4aUEpxteXO+gmUOzSWjvwyjlha3uHd7ZwtCxKOy7GiHeUYwQXPN456lChqbVicOVenXG2la150b13pGzcmVaMNMy95Dkj2i1ihSbFnmLPlzTbTQNdHrDj+LSoLSt6KoDMw7IgZ8SHfqikPHtOmRQjsW24Z+0Sbj763JK5JCyFo5y7ciuX7b+BJX+YlAS8RX6FYFMypdb8zznjXLC+EkVzxDuhCp5cGu2kgKx0qqKEEAyQyjiRLG1WFFADAB+CgZzzZhhlDnCdYewhTSClbGt9zUiwgng5ZUIx9GO/zrNP/wTPO+MWrn3wXO44ejrvvefZPHXPbaxUx3jZeTfMwREYjUZ9X/Z0DE4gzbm2N80/nfPjUiLn7Bnt9600DYLNsy6ypAOsnDMuz8FMXJk3TvCaUY1oSozSlMvO2IPGzIFo175/bZ1PPHgYXyZeKnqlRU8QsQWTlugBNUAa6BEG2Q4yYNo2wzS1eznnHhbTnK9ZS7tZChsEF58QTHtg7XRuOXwBGxsbPLBxVn/pMyc3cdHOj3LRrmtYGi+h6kkpPykwTQS884ivcApBlStO+wR3rp3Nepxw3aGn8OF7n8Ki7BmvcsGuA7zsnKvx4cmFaeVDBkwbZLvIgGknJ6Ztl31a2ypX3/85pxymXbbvLo62S9y5egYfu/8pZHWst2P+6vYrePutz2IWhWefdvOTfp82YNog20kGPDs58ewJ2aMV23Wi944pCSnPX/Ef/94xg3eMxhOqqgbCSY1nq+0uUmqJ0Rx5tPZdaAKhqlCNtEuR0WjELFYcngVgjmda8KzNgSPTat4mMVQ4NrO68t51Pb5lDjCfI918cQt4pmQku/7cy06/h5grbnzgHD561/nsXznK/uWjfPyeC4g8CyGQeD2Xn3Ujq83BkxLPBgfTCUQWfi7O/61DtYl7s4yrZFPSygnLdc2OyZhJVREEtLWJpK4rIiZoyrQx4bxYZAGwNFnalG5pAVeu/6yuKoJ3pNxS1lw9v6Qvx3nvEc1INqMTnE1CJ46cWjpOzSAVbduQU8usbclV1Xvqq+DIWci5M2zmiY0xWlSD97jRyKIYskUF9IXXcqZtW9oUaZoZMUaqui7P4Khr3xtHVUioGfPgCF7wAsF7RnVNO5vinesdTCGYs0lE8GJpqordb23NIokX2wFKFaoymJlcIi+cDzgvqOb+GbtrqSiucmiy6AsA3RJ1ZdcG8bmkSVrEQWxamqZhOmtoZg1/8ckv5533fm2vN+esfII3Pu3H2bV7NzGOmM4aqvGYqh7hfBcZB7HQEYxqj3eK5hYlIWLtrLpCdGIg1PHt+hCYTCpSi4F4TOQmlnYLqOC8Z+wrQCwiRRxVDbPZbAFIu0iyjOBomwZSogXSSHHSLWqsDV6EOgT+2fP+lLfe+nx+7iNfgCL88rV/l3ff9Vz+8u99IykrHe/veDzuddp7RyhpxhChPEs3jtqFWyxMzn7RI9IDuRlyCOU4V+ga6BZHWczpo/QLk8VIEpds0ZFTZv+k4stf9VxWk+P2o5aq+r5rb+HX/+r9SF3RtJk2WW2pqBktUXiom0cyFGD8TGTzQvJTHDjIIA8jA6ZtT0xrCoWAc0IID41pTaq47cjFXLTndnb4IzzemNbOIh+863P42Y997ybtEZRXnPHrXLTrWsajMTtWlogxPekwzblAqB0+WATfF170Nzxz/01817v/VVeOte+VP7zpJbz7ziu4/pu+mVDJkwbTyK7s+Toyhk9fBkwb5NGSAdNObkyD7bFPO7Ze8VMf+29spB29zrzijN/gol3XPOkx7XPOvoUr9n+Sb3jrd7MeR/1M+nfvfiOvu+AD/PoX/jDj8QTn9Em5TxswbZDtIgOendx4tnWPpsqjgGfSO2/meGa203nlzmPnErPngh2HP8V7x8y5K7fwxqf9tyfte8ete7QU25K11pJVmfmG2WzGeDzhwekebju8kxeeeT23rV3AAxsTokZwQpeBBSfAM4pzSQqFowqeedCEdtOyBEz0eKZi47mIZ87z7HPu4hlnvZk3/sa38JpLrubzL/0o/+R3/zlJPS1P5wBv4ftf9f3cdd87T0o8GxxMW+RTDYuy4ODXhUJwXYomUAGTKrB7eYnl0YjKO4IIvtBROhG8OMR78+Y6RxVcyf0ufJ4yL6pWOW/GHqi9Kxyn1hBxXaPK/UVxhSvTi7eEuSjUwVJXUWVWonwUi2qo6wrVgGZT+p47NMYyMawdks0oztZnoJai6L1jMpmQY0Lz/EV6VDUu0awE53FBygLR0hxdiWDrwYyxRUuIPWMXaeAFRsvL5r2fdzwpJbz3tLHFe89oNKKqKppmBihnnX4GAE3bcOjIIXJMFknlnRUJLfftAKkzEF1EXS5pvW2KaDZahG48unYoxiGaYwQxb3HbJqbTKU2MbDQjfviq3+fB2TmbdCgEz85dK0AmppY2NSyPVsi5NWMKtHGGkhhPAuIzoRLqOlDXAcjkFHEulDEXvK/YiBtG2RAzdRiTEXKE1LRG/1CeMUWoQ40r3LIxxjIu5rirq5q6Nt7e2XRKShHvTQ812b1D8MWQZbTQIySgzYnKB1594TU8+6y7+Zdv+zqmqeb2o2fzst//NX7mtf+F551+PYJx3opITzdh3LiWkovbXDwxZUvl9lI2N92zJFscuS1Lsg4gBDFahkIAbNEjZSIWdU0xlZxuMT1VJafEsmSQxM5xYM8e4yo/cN/97AaOzBqSQsTAUMpY5Bg3tUZV+czcS4MM8ujIgGnbF9OaqW1AcqGgeShMUz3MSy++l8oFYlx+nDEt8wN/87+45fBlm/RmZzjAl533b9m/Y43JZPSkxzTvA1Vl+uK859zdx/jJ1/wI/+PD/4ibDl+wqW+k9LuT/KTBtI4aI5EHTBvkCZUB005+TNsu+7SUjn9TUlXhlMG0HcvCT3/Bz/OrH3s177j9mX0fvPuu5/Ci3/x53vE1P8xZKweflPu0AdMG2Q4y4NnJj2eLe7QjG6dz7b07uHTPzYyr9jPCs41mxF1rlyNO2Fkd4ZzlOwA4sH4aB9b3oyJErRiHNXMsPcx7xy99yk9x2b7rTwk8W9yj+RTNkZkzMUWaGLny7nPYiGMUx1UPPA2lRrAABkpmWZdlpChZs+m0GJ4pFDrH4x3Bc70yPOsKjwnSY0+n1yllcCCS+V9//9cZuQ02pp6t8rSz9jKJO09KPDu1HUxbFqKy5eNFUTYb+fkXcwPfGfmlumKpqqiE4s23SABfFlPG3yibuTVVbUKIfW6GXheOn98jtRF1MKorAwNROojqvMmq2j+Ic7IFEBbaLIJzVhBMSuqppasLzWxWUhPnz2oLP09qE7mAkZboifnKvjs8k7N5vr23BkiZcFrSbLsCoXWoESk8rlK4l1FEM1WozMFUgCQXvk4rslfSckWoQiBnS71dWV4GETY21jl0uFyLeT+60v+WljtPcnRiRQGlpH/67MlkoiYCoYxjec6cS7uM81ILv3TWzCePXcbfHngFB6YX0uZR3ydX7Hs/T9t3rfVvOyOlaIDmLYKh0yfnoJpUTCZL7NhhxRizxj57y3vXg5NgIKpqUV2xTZA83o0IzoHLiJTFu0LTzGgTaFLG4wlNjCiCd0psI7WvqQoFRHQOzYITRxJFvBBKtFlfLLGP4VZcMcDj0LDPHel1OGrgztUzaVLdG+E2tgRfCiUW/VKyPYuUdO1yrBaeVunSuF1RSzEu3tRzgNP3iQUNlMgPU7bCXbt54scYwYM4b5ucrHigFiGTSbmlilMALty/whe+6ArefuV1HEyJDMzUFlgiQnbzqJr5/x5RPMAgg3z2MmDaSYVpIVpEoQVFyUNiGprYt7MumMbjimk5Zx7cOI1j7a5NfeFdYu/kQUZVRfDhSY9ppVuKfjnqKnHGyhFeff6H2Ts5xgfuuWJT/9gLtvSkwbTu2k88s/cgp5QMmPakxLTttE+LavQ3Y7/Ks/f+FXvGDxZMePJjmqKctnyUl5x7PcFF/vwTzwGEaRxxx7HTmTa5UE09+fZpA6YN8rjLgGdPSjxb3KNlSeyuD+FJbLRj1mc7WZK7Pi08C0FZqQ8SvGepmuKLrV8eJ/bKKlnh8Gyn2fCCZ50dPTg7m2la6ftkR32EnaOjOLfypMezxT0aeFz25uBMI+5eO42l0VHQzLG8w95HdtSJzhX8EQTlrB2Hyalh1mYemJ42nwhFiXO2emZnrRziyGyJtWY8x8Git1o8tD2eLfA57h0/SBLPoWYXe5cOQVKms/mYdeLT7KTFs1PbwfRpSIbNSLDFPdil7NUi7BhPGAePRyGZB1cAXyKAeqPdKaMqMSZGdYUP3ornocanCP2Cu4seiKnFCUzGo87uUTAEXxbFKSUcndfeJgwY36orPMhm3M1IeGe80n2KYlaaWUOOGV1I4wzem0FtGyge9xxjH9WkC7ZeVYsX2pwIGQOGeeFOpTOxVRUIrhjfXAyTZjRmo8NDLW0e896ial5vP49G8FVF0zSIwng8BiClaBEOUtLzFfu9gK2ogJt7jQ2QisGWDOqJQNtaAbvgPKGEhDhVkioSs0XQqRXYW293cO3hz+XNt31Hf10hM/ZrvOjMt3PJ3ptBJ8ymUzLgQyAEb9yipa9DcEwmFfv372L37mXaZspsuk6oPKEK+BBswc0c9BAhpUzbNGSEpfHIOHC9I+ucNiLFSI6ZnDLLS0vkmKwOo4dmOmM8GuOlWziUhYezAoBOHFXlLYLNO4JzPR8vqmhMpDizInM5s1JPidnTFi7Y1bjC4WaXLYqqKdRK7SyFOWtCESpXgRMzzt38W8ha835xgWR2tSsS2M0vL6437r2ulygSLYa+M7+Wei34IOSU8GqRPs4pTU7lmayvn7JvmX/wmhdw4y13oMdWadpkdrxMatPfNG8IbLbzJ/rsBF8PMshjLQOmPfGY1tG+uq4w6jbDtJgdR5slkm6OsaxkytivU4dAVcYHzU96TCOrbUTLMV4Dr7/oA+ybHNnkYMoIh6c7EDnCKMQnCaZp/3SbZMC0QbaJDJg2YNoj3acB3HLsWZv2aRN/lNec9euMxx7n6lMK015y7nWct/NAcTDNpW0bYopP0n3agGmDbF8Z8OzkxLMdaY2zlu/Ci3Cs2c39G6dxwY67Py08G5M4nU9S1zXOVeAmeEnsXm5YmTSQW2YHx0zjAg0s2jteFsWf4nu0nBPTPOHO1TEv2HEbDuVYuwPnpNA8mqO1dzAJnLv7MKldY3U9c//GvjmWlTWKliyqM5YP0KbTODqtLcuptKtz9YhYuzu3XIdo+yYHmWngwdlOcko4la2+UgCatQ0uOmPHSYlng4PpU4r0itIPqH2MmzvrGYsw8p6V0ZjdK8vGfRpb6uLtFlHaxqJrjIfT4UKAnMmpRRRagJwZVWGTouWY8ALiBc3CeFQTgl9Ir3SMqo4n1CZVapoysY0bk8LbiRMmk0n3CH3KejeZu+KlADG2pBxJrSlrcJ7Ke8b1CC0ecCdmQJwzY95FKAXnqKqKtm3tXiIsjUrNHSe9J1zVHLtBjNtTRC3dFHogmW6sU3lP8NY2h3n6PRYV5Qt3p2L8qTklbr7heuu7nNGU8VVAc4muyvPChSiW/lk6XNToFFK5d5ciubFhnOFSC1WwaLcu8qOhpcmRpJkmRv7Tlb/GJ49dskmLdo/u41uf9q/ZsWycoRvrazSzKaGuqatgZidHcm7LOHbgnQnBU1cTJuPA+mydUV3hSvQYgA8VLgRG9YhR1ZJ8QhN4ESpxJK+EMEawsYrLkWbaoDmRYqKuKlBBcCRn6bKrx44V5ct4hOACkUgVAsuTJdp2Rl1XjKpgRhXIOdHMpqRsxRXrMONnv+hn+bkPv4633/JcAL7pbd+Pk4x3mXd/5Tdz8f6jyEjxlcdj/Og5Z0Q84h0UffQpEXLG9ynPNvYdJ28VAqlsXoR5oIACSTNOXCk0qGhK5AVLW9c1eMgiVHVAkkK0HZEDJCaY2dytXGCfH/Ffv/sbeNNb3s0f/PWVrGHUEEnmZn3YgAyyfWXAtO2CafNIRHtZst0w7fpDT+ffvvt/0OZ6kwa97Izf5nn73kbloa4CXjglMK2LPHT4EnnJXO8X5ODGCpf97P/gF7/45/mqK97/pMC0QQbZvjJg2oBpj3yf1qZYos63aJFwSmMacjxFT9M2RsfzJNynDTLI9pQBz54seLZv9CC7qgdIOSH4zwjP1vMeDswu5en7r+eB2X4OTfdwya5rUYGU53gmKP4EXoo6+FMSzxb3aJ3eXnn3BVR1RT1Sq4eF9llgFOcnPvDhA5fSNjNi0+C86amIOaQ63RMf+NgDl6MIoZrjGUBS7bMA0eK4XMCzmw4+BXWCOKGqKss4PAH+Xn/nGUw4xvmnPXjS4dmp6WD6FG9/ddNPnUfoLPxw5V93qZEPLFc1u5eWGXkPmm3gNRmHaO89N2rGqJnUtjgRQlEw1Dg+q8kIjXFuyH3HhWq8ot6Z5x+R4pmeF+YDs4e+eN9FTIEFMequzlguevxzJmMTPdtMKxPIUYUKX2aNlvNjjObxFhBnhey6NnROIFWQGBEx8OkWl748S8659/ba34kYy0JRFoq0Zi2GF1wJU8il+OcsWZRBds7Sd0tkWs7KWtuU5xTEe1KMiKOPeMgxkVOGHIEa58u1U0kzRfFCX8SwqiqcN6+8lIgQVSXmxPpsStM0tDGSVWlSTdKq798rdr2bp+3+AOPQ4F3VG8bghVEVmIxHODJVsEUBQIwzckpsrK0Tdy8TgnnkR6MRzlnKbncsYhEcPnjquiaNFKJnPKoR9bbA8KEYOqhCwI1tvOsQKN1MLhTXmjKxcLJ2C4DUtr1+zmazMmZKTBmhi0rTvo9SjsSmofKJ1174Ec7acYRfveqVNNn6RZLyA+/7Zr7yGe/nK6/4kEW+2OCbrpb5ItmiJbwTtDIw78Zk/jOT0pw72NJurUhkp0c2psbNmykhkou7m2xRCS44UCu2mMU4vsmZesEGqGRqaXnFsy5ltDzhN//yPRxVmCo0Cn4h2H8hYG+QQR47GTDtpMS0nHK5tkX+bjdMm7WZJo/7fvXS8Pln/irnr3yckWutuK9Y4VUb1yc5ps0asvfUlVFGVL6ibTIX7bqHb3v+m/mVq1/PsWYJEKax5meufC03PHg2P/CqPzzpMS3PmTQGGeSxlwHTBkx7jPZpKWd74aNbX80YLc+pimnzsHDKHHD84JN4nzZg2iCPmwx4dsrimZeSW/QZ4NmB6dnMdB+Ix3nPrvExKr/BxnSDFX8ndZV6PMs54XWNf3zx9/Gu+76Sa468AoB33vGF3LN6Hq887y2nFJ4t7tE6HVQc2mdWpX68TDkVsPfLltTlEF+h5RujItReHyzrba7UvQ4tzAlFQZNl2Ul3JVB1aMYyB7uXhlL+LWCS4NiYLXP3ocD5O9NJhWennoPpREZezDu9GCzQqYh2A96NevnhpBj8MphBHLX3TOoab2sfFO2VkmI8uttbimcieId3VhAzJ+OlduJIC0017mNTTCvUZ+mDlEm41dCDGgeyzvMppHuObrb3V1d0QZu7tuYycZz3/ZozthHUCnaaN9b6zLt5oc+uHcbzWu4uYt5+5whdKmjOPRes3TeTk3WuebzLTCmTWEVQ1wGOfR5LhJtm7b3KfTpmWfA656i9Iyc1/lPPpqgqNOFzQsQiMzQVL3PhtuzEe0sF7focLN0/qxXr6zYt+QSz++ylm3jazg8WKDXdUbU+8sEXT77ay7py7Zxm5plvmhJp58FhQCpSRlBK/9L3uffeFg3irCBeFusvp/2BzjlccHTct6og2Z69WxB0oCregVrKcWdQY4xUlfVXXuD97Me9pELHlBBVnrrvTvbvmPKe2y/nrmN7mMYaRfjTT7ycy05/gK/QDxbO3JL22uvoPMJQRPHekTrj3S0Eis72HOHSzV4pmj0/ptN2u+DixkXL1JZSULLMNqFfbAS3AABkyC0Xn70fnUz4o3e+h41okUBCpybS33NRujFbnHOftnyKheogp5gMmMZJi2l5vnnRnLcVpt117AzuOnZ2f/zEH2NvfTfP3P1uaj+lp9Dg1MO0EIyv25X7758c4dXnf4T/c+1rOMZSf+4nj+znxoNn9bh0cmMavT1ZlAHTBnnUZcA0Bkx7bPdp3Qu9rWrn5NTFtDokLt5z4Lh92t6Vlqeffg9XnGE0T0+efRoDpg3y2MuAZwx49pnhWdOMyRJYqmc4L0zcBpWLHJm2jGSdEDIxdm3IOFpefNofcdOx5/UOphsPPRMnmVed95ZTCs8W92id3i1VM+qqpQqpOEpBxCHOowJRPbNcs1Q1NAhTNQdod7feAesiQWZM04TFCa4Lx/TvALvtn+jCoUpwmXGIzPJycWgdL/ccOw0RTzisnH/2dScVnp16DqZBBhnkcZczV47wM2/4Zf7V2/8hH7vv/Ce6OYMMMsgggzyM/Oq138X773lF//dlOz/A689+E062a0L+9pMffvXv8MZnvRdjyh/eOA0yyCCDDLI95aH2ab929at5y80v4NZv++cEH5/AFg4yyCCDnDpy/s7bqesRk/EyAOmz8BEMAk/f9wl2TGA0rkuWW6lPJQEc3Le+ixsePJvnnPVJ7ji8m08c3HPC6+xfOsqFO+/mQ/c8jaTHU9s9EtkzWeOy0+/hw3c9jVk+0TWEH3vvVwIwqRo+7zn/FvzsM7rXEyGDgwmKx89mbedopPs5/5+FNRWRDCujmpXRCIAdVc1SqBh5yDkimnHBeDlLRjnz9zKWmldXgVFdMRmPqENFTsZMujQasx5j7w1HFSleVtUEYlFaOSWcd+SciSVqoArBUkdRK74tUgKAzBufFUs97z2s5llHoW1b6rouaa4WMdDOIjmWVMRShCxn8yqHKjCqa4IPJUU005QUxy6TxwrtbYkwgwUPdNfbvkSPOUZ13UdOiGZySub5L5EEqY2gyo6VFXJKFkmQM5Wfq3OobFxc4WQ1B72SNeGyRYhkMs0skdr5M6YU6SJGnPfkBbdy50WfFp7nlBJtjMzaFs2ZTxy5jDd9/Ps4sHGOtUFm/IPz/gunje/AA3t27iRUgjhQTezavWJec1WSGufpPJIg0LRTjh5ZJcXTSGLcqFUdLIJjIUJPys96PEKzw0tFsxGJbUtulZwiszgf72Y6xfuK4OvymRLblrXVDYtSUOPBBXASyChtTlRVRUqJ9fV1xqOaelRTV1Wf5quq5BgJXhAfcFWgaWZUGgDFB8+3v+jP+MCdF/GmD78WgN+65pV87MBTePNX/FdqaYkZlIQLASe+j0hzUiIcFlK4RYQQCmc7x0sXCSKlr4x8QY1TeNGXnzNeAiGM+ggakYALHiolaks7s/7AOZwqbrbO7qWdXH7WPn7oO76Zn/y9t3D1rXda0q5YR3eBO4vrgRNmNXURFif4bpBBPiMZMO2kwLS1XKKs25bUxm2FaV/91P/JFXvfwy9c828BuPHoCznYnMlXnfdfqNwU792AaSFQ1zUdr/u/f+lv8bZbn89bbn0RAP/xXV/Gn996BT//hp8+6TFtazBer0MDpg3yeMiAaQOmPQr7tJyNcie1bR8x3Ylzjt07d1KCsU9RTDt+nzYPjshGQ/Qk2acNmDbIEyYDng149mng2az51HjmVFFd4Ueu+TnuXH9qf42vfsbvcPm+a1ga16cgns33aKDcdPQSzuYw59WHcU649dBeHtxYAbFMrp1LLc855y6ue+AcVme11Z1Kiafsvg/vMjc9eCbeew43u7n+wZpnnH4Hdx/bwwPru6xfFjKmXMn6MjzbklmUlaPry1x336XE6Hnrdc/hT659Dik7NotdbxYrvuqXvo1/8pr38LrnXHVS4Nmp62B6iD497mNZ+CnzjO0gMA6elbFNliUXqJ1ASjin4IpRjJb6qApd2T4RIVSBpXrEqK4ZjSqCeFLhS1VVxAlO54s2SwEsKbIiJSVONmefLoilr7qSMqd9G4DeeAmQEYKz43JKluJa7tcVsuvrjpUJ2rZtn6aYc0YqQfT46FzZ1Hn0aaR9uxcAwJXn9M73HNjSXUWtKGDeEgHsvbfU0pLWatfsOFmNqdbutZA3qPa7dw4vQva+bxcomlPpSt0EQ1bcLaMpoo2Ufsy0MVnRUlU24hI3H3nWwvNnzhrfzNivIxKYjMcgCZEMAkvjMW1KTGcNCUracjfmDjK0bSRHUzgzCN7appsNGWqF7Jz3OGd9ksq5rkv9Rfr+p6TTeicF1+z6fkux8s44Wh90ajBPwc6FtqC3bmJcs+IcznmSKwVhVUkpc/6uB5ilildceD0fuONi7lndy2oz5g+u/1xefO51nLfrfpQMmhFdXFl115zz9dLrkKXcxoXisZ2qSAFFca5cszP6zviJMb1H1HhgVe3kTkcV40hdePacMpITY1HqkeeK887mhU+9mByVqz95F21ZSHT9dyKRBTDY9KHd5CHPe9iLDnJqy4BpJx2m+VKPr3uQ7YRpR2a7OdLs659n5KfsqQ8hmpHyzAOmZfu8XPeSffdxYP1mHthY4UP3XM4th84kZeH3rv1cXnvJtezfMV1QppML0z5V3tqAaYM86jJg2oBpj/I+TbO9PKToyKa+KJgmEk9pTNu6T5ulilkKvPn6Fz2p9mkDpg3yuMqAZwOePZZ4hjndb119NhtpR/+cZ6/cwzk77gPxpySeLe7RNtIKs7RGGzMPTnfw4PqEo824nO8RL+yJLUdnE2J2iFhdsFarUsrD2hy1ZjU5pvHIPIOpDLnVdKJ3ogqFcrcfYWtbGz2r0zGaEw+s7uDmB888Tp/neq2cs3I/+0fr7KwCZ+25lOdc9NRtjWenroNpi3RT5kR92PV/Z+RFYeRhEhzLlXXhSAWvmdxGqkkp3ibKTK34ZAaqYti88ywtjdm1soPKe7wTnEKrmRShaZqeOxSKoXeC80JVjEEX7ePEFW7UzgiagRZfYUXfukW0lsk9L1ImUnhVvUdEjDe5TAKbmDaZOu+/d462bZhOp9aGcs3RaISW64ViKCy6QUtNs2I6u0UfoFkJwffGyoEVEixRDGaXy3RUxXXcnkDrfZ/N1Iva9X3XXp2b6RQjFoFhg5mxe3tvkU9t2/QL1tQZfFFyoji17JpNjGhSYiyZVNmMV44W8aHpBNPbOhOHbVzadgNQqiowqjxt2zCbrqFinKjdhsdRwDMJOSqCJ4Sq9G1EUVzuFNOBy3QgAJnYtNAKZEdwvhgeM/SurmljJms23lQSoYLJRBHnewC35negammk3gt1FajqyiLHbOVR9Mnhg4eUoOhVVdWIOFLKTGNiPKq47LT7+Hev/L+88fe/lQNrFceaJb7+/347v/TF/5uv3vUAmfmioB9n5xDnCCEUDvXcAw5OcCHQF5OUsqRaMPIWcQOSM15s06J045jJRIjR+r8sknJK5JjQlAwQy5i3KeM1U+eESy1JPH/vpS/g7H37uO0338wxtQKBiwsF6/3yU8A5IXW5zv0mzOZ1SolBBvlsZcC07Y9pVbHptsnZErn0BGBam4Q2CU0j/PEtX8uV97+6b85FO67hi8/9JeJsNmDaFkyzgsYQqsCLz72ey3bfzLfc/71MY81tR87ga//oX/KOr/8P7Jjcjoqj8sleBpxEmDYvqTuXAdMGeTxlwLQB0z7rfZpmyGb/FqPcuzYvjce07TqnOqY99bQD/JuX/z++7s3fxP1rFceaCd/0p/+UN73hpzn/SbJPGzBtkCdSBjwb8OyzwbOkgZwdMTkctdlUHR2nSw7wIqc0ni3u0TKwNk189P7T8WGE950bVjkynXDtvSvzjivOqbtX99t4Oea45QI3Hjynd6DZ5/R6bgETimTF0eFZcTCl1vyOKRbde3gPTu0jP/6Fv8hZp0NqdnHfXbt53QtewgVn7N62eDY4mLZIcUDa74vWX02nvVqnnbZzhSXvkbYpxzo0Z5qNderRTkBoY4vLGe+tyFxdHCRVCOxYXmIyqs0b3c5wSaiCZxQ8x44dY2ky7g2sFRvNpKTkUNO0tpBaXlkqUQCyqf2i4MXSLFUVDwUAMMOUYmmzMKosEsI5x2g0om1bUvG9N7MZzs1TTUWVrMVA+ICqRRWklIxqYGGCdADhvSeXqANVZbqx0YPUSH3fxw4rppdUidkio1w5LngDkLoYOrqCrW2L5mSGv64Z1bWlj7YN3s+BQFVxJbJDBMZL4wIydi3nqoXJlog52WI2lhRggboe45wVputiHLyzKIXmhG7h0g9d1EZSXFK8ZlQT2kJqpkiOBIGmbYmzWX+8AzzeosNyADeCMKJZP0LWCE6o+2fMRU8CwTmyc2iM1G6Ecx4S5M59LkJMCjkjThjVI2KzhkOpRxXNrDVDX1Kfc1ScdywtLzEqaaaxipZa6wNVVRmIdi0p1AU+VHjvSWVh4gRSOyOmjFNHKhuPRWliYtq0hKoyfWlbUgEcHzyu6JMXh6gQY6RtW1vsONdfr9u8WFFI7VN6u+iXmMGKDxbdC8Hma4poNooGewYPuWyeFprqnKAxEtePIc2MrBX7/RLn1sJZQMQK7ykwshPAWWp5trUROSt1VZNy6g17B66DDPJoyoBp2xfTRowQYDQe9219IjHtj2/5Gv7opq9EFdbiPAKOMgaSDcuUAdO2YhpACBVZlZ1LLT/1+T/Gz3/0S3n/3c8E4Mt+519T+cTIR977Tf+W03eun3SYBlBjm5IB0wZ5omTAtAHTPuN9Wnn8UKLWj9OtmAdMS5mr7noKP/a+N3BkugTA7tEx/vwffBtn7Vxl2qQnzT4NBkwb5ImVAc8GPPtM8OxN1/wAVz3w0i0eSmEjLW/WL824fGrj2eIe7f7Zmdyzups2KZlIUsV7o3FV6PHMeW8ZWs7h8Gg2B2jMsWT4HY9nuVyjmx/zjKs8nyyAOA8CSROaHXnTGB4vG7HmlT//I3z/a/6Qr3ru+8irh9m/M7G2jfHs1HMwPcQgmh/ZZMFumqO0fFcLjOvAzvGYlbpi7D11WRT5bJ7JXDm8E1TMCNRVTV0F+xfmkQTBO3JOvUHLObG8tIPJeITmTBVCb2BDcMQYiTGCWMoqziHmb4as8wVeSU9HtaR2KpqSPaEI3gdCARBBem9+9y+lRC7f+RB6YwuQNeK8MB6PzJOviqjSNDM0q/1biFKwAmoBLXyl2mk59Kmwks0Aq7jyTOb1zSn30VHOWRu7azvnCd4mrS+GpKoqHJ1hWEjJBEIICLlESNm12tiScyK2Ld4vAKXMeaAJAiokNZ7YGLVkpnSGwyZsiqlEVmwWASrvqZ0nOCG1DeSElDRcTdl4pTHQICaSLnBII4xCzeGDh0HAoZb6LCAosY3lUPOwIxmvxiqrMYHLSEl51Th36OSoeBW8eBwlxTYnFMHSm5UuzVjKWHWLDs2ZFFt84UVNKZGLoXeucG2XyBVRiGXzkAVyhtAFeCh8xTOv5AN3XMyVd18AwJ/c+GLW44RvfP5f2pzrdJmFaADnbA6gBmoplTa6TWm2rhjuLorGQN5msjghqvYp2xlbxCTNOGz8xXlcCFSjEd5Jb4RT4eZVMpIzkrLxHxM5b88KX/zqz+F33v9RjmzMDKiAVpVUUpq8kwJgSsqp6Cm9Pg0yyKctA6adtJjWnWv9F54wTEsa+JNPfA0fuuclHJ7tP06TXnzGO7ho8mHibEYo0X4Dpm3BNABxhKpGnON03/B5F3+cPUtT3nrzCzi4YQ47L4kfe8+X8Hef8VFeecnNjyqmTaPn3qN7OH3HIUaufVQxbboB61jQRBowbZDHUgZMGzDtMdqnOXGMQsXb7viHXHXwZf01L1v5MM/YczU5tgOmKcxS4NDGSt8/3inn7Z0xrtTa278oO3n3aQOmDfK4yIBnA549ini21izzR7d8LTcdfjZHmtMeUu1WqlU+/7x3cMbofiSf2ni2uEeTMMZJoKYpkZM24Tp93IRnIoZndqo5+HLJMRPB5Y7LsZvDnb4VzBDLFMRJcXoWhxRAhlzw7Dln3U6Ta37/4y9AF2lniwSX+EfP/Auu2H8rlbZcsudBVsKUvI3x7NRzMC1IN7116wflw87Ae7F/kyDsGFWcvmuFKmVG3vXeXI2RrCBVoCvs5ZxQh8C4rhnVFVUx9CIGIDE2eCxdNGlmsjRm985dtE3Tc2oC1OMKZjMzYkVhLfXO2qlZyW2ZbMEWfKrap4vHrGXy2HddJlCZU4U3VXrDmIuRDN4bB2v3OWZk63FVeD+NaqBpZqDSF70D8D5QVca5nHMmLUxIEUs11aQlCglwFv1g6XqUNPq8KbKpu7YZACu85oO3AoNVRY7R2ioyj1AoIKA59oYeoI2Rpm3IbUs9qjZRJZg9WeBRzZmNjVlJ7wTVrRuXyAb7WW1299cITFn2B6lc4TgVIbUtQsY5RVBIuaROmiMK1b7on3ize8EHDj94GO+EOngkKK7oUV+gVs0YIwmnJWG1jeAzSEZw5DZuMvTOB4LYs6VUig2K8ZeCzsk8xYC2L96aIjG2jMZjmralaVuSGuCEEAhVRSoc2oIQS6HEJCBZSb2hV/7eM67EO+XWQ/s5tLHE/7vlc7nt6Fm84bIPc/qONUYyB2sDIOkpGERAveKizCMSq86cFeOepV9c9PrgbUGRk9J2/VH+JzlTiSsLKZtjVV2j3vXRN22MxBJNIznjXGbsBNXIWbuX+MJXvoD333wH9x8+SlblztX1EsFg9/fOFiEZSir4fGEpelwgymYZNjaDfAoZMO1kwzQt/eqeUExrk/AHN341RxdqLgF4iSyFY7z0jL9kh36CjaNTxstjOpqLAdMWMQ1UBOdCv0l+2fk3cvaOQ3zgrss4Ml0mqSep53+//4s4beeMV116y6OKaett4JZDp1FVG+ysMxXpUcO0YwdqjsxgDMwGTBvkcZIB0wZMe7T2ac4HVMas57382V1fwz3rFwDKij/EM3a+n8/d825Su3vANOW4iGrFcyiexf7qAUZuw8bc+006ebLt0wZMG+TxlgHPBjz7bPHs2HQnf3XHl3Ks3fuwujYJU155zvvYWR8qDqZTF89OtEcbh0TK0ZyupELJ55AyXhkhd2Pr+sHF5/k88aEvFmZNz2nutSm6XiaBZeKRqVxili0pIUgkZ8czz/wk5+89xLtuvYzD02WatNk9U7nIv3jRH7FvucFJzSV7D7KhQrWN8eyUdjA9IimKWQmctmMnK6OaETByDlIiNpaqSo6IKJNRTdPOUITKBUbeslecGAdmd9EUW9qNGVUIjEej4iVO5JRYWVpmY2ODWAxXl8afUsIlS+3vr5TUMu90/nci2QJRMIOiQFabRDmzY3mFLiMotjNL+Ytxzn+pAIKrjEe5jw7QUqTOe9Q5sjZmwNUiAJxzPRdql8Zal/TRLoVxMl4iBI9zjmY6K8Zc+nNystRecR5Ri7CYzWbFuFs/VD4UXtaWLBb94JylClvERYuvOzAzL7h4SxW0PvV9pIGrAoWwtbTZMk/UGcyn0r8hBGazKW0bey5UEWfHtJGfveHf8eFDn9+rzdPGf8Vrd/5vXFsRo0DwkDLOO6QUB8xNixdhUo/BB1I2DzNAVJi1kdWjq5YaXJtB3LN/F94FfOV7g9/raZsgCrQZnbZE2cCFEZPxMjE3fWE8r0BOaGpZP7bK+tFVUlaqesTS8giV0PPltoV3VbNFAzjvWV5eZjweE3MmzWbEEkkgapuCjsfXe49qJMeIFqPbpkTq0mCniS+49G95+VNu4h/9zjfSpsA195/P03/6p/mrr/8PPO+cT6DlWM1AMk5Win72j67zIo4dp203IVQt+qFtW7roOhUhOw/OgGFtY0pWCC4QVPFtg08NSzqyCBXv0ak9ow+eiorptEGxyB7SDI8QElRN5nu/7stJ4tiYzvju//mLhNQyE1hXSNHSZ71YOnJmjkWdHRfvjq8xNsggj5YMmLZtMG0WLXLMh4CvwhOGaRvNZqqJTs5ZuoWvu+RHqYg0Gw3NxpQYrA0Dpp0Y06oqUIWA+ECKkbNX7uEXv+S/86///Ju4+eA588fLSk7p0cW0UIEPfOSec9g7OcrTTr/3UcO03/vQi/npt8EyUDsGTBtk+8iAaQOmPYJ9Wpq13HTsCn70ut8gqfVBLVP++fnfxsStEhs3YFrBtJjjpil2cGOZZ/70j/GTr/8JvuzydxOCQ3JGpSvmfvLt0wZMG2RbyoBnA549DJ6tcDc/+rxX81M3/CRXLbx33Cr3b+zju9/zw3zv8/4nz9h73SmNZ1v3aEsj4YXn3cFs4xhtySgLIRQHk+n+HUfP4K5jp5tjNTP3yhSFVdUFFFvAs/J9N5dELDsti2M8annOObfxgdvOopKWp552L1fffSmzqIz9IX7lK36O//yOv8t7b7/suPHUlNF08uDZ4GBi7mG0dMg5T6KoddDICXsmY3bWYyYhUGlm5KD4gwEzqs45xpMRaXUDxYqcVT5Qh0AdrKieCBapg+Bq8yJrigjK2uoxcjKu4pQTiCnFrMm0MZKxYmiKoCrGKakZQQi+FKJDC22xWIoo2gOMRXLl4yKOutRC+7tMkjJ5FguvTUYjgnPF6y6kJCRMWXOMJXqhpPbZJWzhVyZ9KB7/jmJHC59qZ2SbtkWVkn4qSDHASRVhkfPVitw1bUtsW0uPzMrK8jIxRpqm2ZTiqKqMRjXTZsZ0ukGQGpwQqgpzSkdsNIFoRQRFLM1eS9tSMmORYuo9+N5bJlXlA0pF1sXUf6VyCdQXoBVi0xJJhCBMxrV5551DQmCyssLaxoxZY2BmhdQTXoQYE820Ybo+pXL78d76dK59oJKhacmNotOEV0oaZ8Yp1KWvVZX1ZoZ4bwUFoxninJSEI40ivgqlCJ91TdZMSongDTS7MVW1dMt+E6FiAC1iYxg8VTWCYJEncbZOyrmrxYd6h7jEcr3Gv3vN/+MPrn4+H7v3XGIO/Pu/+od84VM/zD//3Lcu6CX0rvf5La1QnoI4sUJ6aum3XeFGhT4lVESNF9ZXxoEK7N29D5yl2B49eBDJiVAidlI0gHQlnVmS3cNSdzMpNTj1eFcRBEKOLKeZAYnLfP0XfR5/cdXHufr2O1HdHL3UpZSnLS92h03LIJ+tDJgGJwWmue45zMY+EZh23cHn8pY7voH1LTWXXrT3/3Lpzo9SSSK3LZoi3mH6NGDaw2BaxvuCP87jXcb74wPD5pP1UcQ0RkhVs2NlJ1Lv4249kzOqmxhL+1lj2ihH6hD4ur/zWt57zZUDpg3yuMqAaTBg2me3T/uTu/8JVx18JUltHC6sr+S5y/+XMas4jZDDgGkF0y7YeSff8Tm/yy9d9UUca5YBIebAz175Bq65/yn88Gt/q4DePIPgZNunDZg2yBMlA57BgGefOZ5NQuKLz/45XnHaHzGqKn7p5n/Dg7Mzj9OypJ5m1hKb9pTGs617tDZ5bjx4DrONDXMOOukzyMoAMU1L88m6iGfdZ4plvxVFV1WjjIV5TcDicM1OEBdoxHPjwadQ7dyL88K9nMnMZ0QUj+J95iuf9Tdcuv9ufuXKV/d93aSKb3vrd/C1z/lzXn/plScFnp1yDqYTbvKl4wLd/LFTGAXPchXYMR4zDoGROAJKcGKGuPPyixWl875401V6j2rwrhi3+SLMi8NVgGbjKlWlmU3JKVLX9SZjHGMkpS6aSPqFWJ9WTklVLNfu5sGC/e6jAbampYqzFEnUQKfrIwXzHuuc39R7Xzz2CUGNI7Ms/lSzcUv2Smtpnjl3QBQIPhReUwMo7+cg05ZUyqxa0iQt+sKMfTHw5cqxRFxkLcXUUiL5RFVV1PWInFOJWrPr5JyhRLF193BYAThxUODWjIV3Fo1VjFtOxmOZky2G7Z+1JJNhgT96k+6IUeOJqhmmnGmnM5RIrjyjKhhHq7OChOPRiBi1v0zbtlTOUYVgRVBzJjYRJ37ODVuKtCmWMhynU/I0EzcSxIRmIWtEYyI4WxyoqnGw2uBbUT8RMkYhEdvWCtuVPvfOQVYDVDeio0BI2UDQOden//rgScnGoePlDoWjVjWTY4OS5pEpIsWrHnnpBTdz68H9rDYjbj14Gu+67QqcZF547k0856xbmXhOOHm1LEwULRE30utvHwnXFYBduIDzASkp25Odu5F6RBbHRtMgsykSo4FRNl3QHlg6vlhHztEiYcg4ZxEalShZW1QdWeC5l5zP3YcOc3hjxvX33W+LIhamyaMgD/nicpBTQgZMO3kxrZMYSzTfE4BpTao5PNvPvLdMzpncxAVL1+HdqHCM64BpjxDTbDyMGkNRe4GwdaLKcWpgz/5ZYJqjwvlAvbzCaOxxk50EuQcX1z9rTAtq2VbPveR8Dq3eN2DaII+ZDJg2YNpjsU+7+dhzueHYi/o27g738rSl9+MpG4wB03pM27u0zovPv4EP3PMcbj14JgfWdgJw1X2Xstou88WXf5hnnnE7K6Pp5u3vSbRPGzBtkMdDBjwb8OyxwLNLVj7MLO3ggXwFFy99FK9TDjQXHqdqdxw7iz31IS6uHjhl8WzrHi1meGBtJ207JqWIaLa5KJ022rV9x4B3IlyDMn/Ll9n0M/fPbbR9ncNKvEMlcGg2YrRrF66qWEVw4weQtoFCX3f56Xcfhz1JPW+75XM4f9cBztpxkOdfeM+2x7MnkYOpN20n/GYunQGYe+s6w+dKUU/zRtqRe1aW2buyzI4QmKhSiTIK3o6VgDFP2qRXMc/7yvKEnC0tzYm9lAkhENvZ/J7OsbS0RE6JdjZl1kbatiWlRNu2jMfjkuqnNLNZ76WX4o1UzTSzGUHMIGox3FUwehYwKjlUibElpUwInrpcd56mbimwAC5Y+ieAKHPjthhloNA2ETThxFGHmpwz6hTRxUlsoJLbaByZIRjfZ86kbKmrodQpUFUrHh4CVYkASG0kqeK8tUuV4rWGZn1KinbdKgRCMZbee/bs2QPs5tDBg5tGfTZryFkZjUYQihddM5ps0dmlMPrgidEiN9bXN3oNysk4WsUB1gyapiEn05muDzvx3jGuA71fJGeOHT0KOVLVHk9mVO8hyLw44tKoZlLXNhYiVnBRHOPRyBbO2QrzSUdGXcZK24Y82+DYA4doNyJxmohrDY4APjJTx3hlBeedAW9q0WxGtg6epaUlNqYz1tY3cBtTnDjTHSgRKbC+ts54PAGFjY0N2pRBhOWlZSbLS4hYAcPVtTV27d495+OtKqMLAnIbgdwvqIJYKnIbE0rijc/7Gz73wlv5p3/wRkB4xyeexXs/eTnXfvs/5ex6g/7EBSvpRMgLUQWmotrz+TrnyC6jUctppl8hBNxoZM84qnErK7idOzjdwcb99zM7eNBAzXnTvTRP4xbxOBIxZZIm6jI3Ku9YHo2osqJksiprbeL1L3keT738qfzQz/4as6I+LRBTZiHLvFO1T1Me4i3lIE8CGTANntyYFks7NzY2WFtbe0Iw7Yo97+HipQ/w3Ve+l2Nx//wGxUlS+4qY1cZlwLRPC9NCXRHwTNfXFnbeC7P3Uca0URhR1wFXVew5XXjGMxJ671lMHwVMq53ZkuXcDJg2yGcoA6bBgGnDPu3kwbQfeO2f8rtXv5Bf/tuX9bP05oNn87pf+yH+6h9/L88/+5biLgLFn1T7tAHTBvnsZMAzGPDsicKzShyfWL2M/3LDb/DfnvYqrlt7Eb9wx389Thd/5/av5dojV/Ev6x855fHsofdoq30Np7nT1x2HZyKAc8Vhpgt4ViyBCIIDMSepzr1W5iytAiC4UCGTCX5piT0Cs8OHaY8dK05KXXBWbpY3ffiL+PNPvJCPfPt34bc5nj2JHEyDDDLIk03aFPiiX/9BvuNlb+ebXvTXT3RzBhlkkEEGGeTTlu952e/z3jsu51c+8joAfupvXsu7PnE5f/YtP0GQR5ee4PJzH2T3aRVZJ3zs9tPZMdtgHwc/9YmDDDLIIIMMsiCff/HHeOreT/Lv/+IfEPP8tdE3/tF38iVP+wA/9Hm/+QS2bpBBBhnk1JPfvOM/8NEjrwXgxz/xS0zz0gmP+/LT/hdP33X149m0k0om1YzLz7wFcZsdTKGqUb/ENQ9cCMDKaMrTzriPq+85m43kH/qCDyEX7jvI8hJcd+8ZD3vcz37g83jfbZec8Ltvff4f8MZn/jlOHsV0pMdIniQOps6b9qk6XBYibQRFESeFwsQ8ka64IQUYOagFgmZcTrgurTsr+OKd7O7piqddMI5hAVVT1I5TtKNZnTv+BFTJuaT8lUieqgqMRjWh5ObFpiuUpsZ1Ks5SCVU3pfPZg5n3Xxzz1MQSJWTtEtqUzEOMFcQTsGt6e0LNGXImxy6KoKRmpmT91qWkFgXv0h0R6Yu0oVbAOkmXMujIKeG6qA0nxJJ+27Wvm9UxRkQ7LmahbePci19a45ylbDqZp942TQMlfbJ7bjD+V/vbflbBkzSRs+lNrzViepHVit7lcjwqiEqfppq7gqYoGcXp8Z5cAZwKIrnXJ1Ep0QtW+E9TLn2txlub5ynFtfdQ1wjC6voaMSc0Wy0M5zHv+NQiU1JsiBszNCYkZVxWJCuqiayRdjajHo/A+fL8jjZmcrLoFVQJITBZmjCZLDMaj6kLF6p1kRU0VM1ktTGuqgAlHbUqhQ0zxpubckKS4J2lZINRGDRtSxV8nyZtKeLO0pHFCvXtmsx43VM/yvtvfypHZ0sowg0PnMvbb3o2uyYNX/X8jxTNLYqgxivc6Y6NV5nrhZbBOU8kktXmt6pCiuRSvDGur/aUht38ySmR3XzcFudYp+IirtgDgWzzJHghFl0TgUlw5MpxxsqEVzz3Cj58460cXFunY5A9odVa/EC2/L7lYBki456kMmBaL09iTIvRjrfznxhMO7B+Hh87+FJaHW3SrI4HPTgHOSM5D5j2GWCaAOKFs3cd5FntbXzexR/hr2+7gvtWd9HcFfjlD76E1112LefvffCzxjQvU05fOsjG6phQZfaOlPXpDkZRyPLZYZorWjWpHOMB0wb5tGXAtF4GTHtc92kbecxVh76EB5tzuwHgaZP3cX593YBpjwDTdk+mVO4An//Ua7jqrgu459huAG49dBZ/ffsz+M2PvpK//4z3sRIyOSc8FV1093bepw2YNshnLgOe9TLg2ROCZzccexH3zy4A4J7Zxcdp3kjWePbKe7mwvpo97m5yWh7w7AR7NJXAWlpm3+gIlUso8MDaLlyuUWf32T1ZY2XUcHRjRCHTml+zx7PuM8tMTOWrDgM3ZgZWOSZ0toGTxfphJXPPCfcd28k9x/ZsGksviS97+vt49YVXc8m+e9C8su3xbFs4mE7UzOMefpNxPNEZwjwNVfufsnCylX2031PHfymUdMzS8Xk+ACu1Y8nBSBUXI84bf6YpgaU2akl5deKt2JcTXHB26eINzTmbkeoUsNxBOqOSMs5ZMT7vHXVdsWNpqS94Nl1bI2pGNeNUqX0pkhkT4MuTzxU9p4RTIYtxQBo/ZcA7a+OsnRvYZjYjlAJqI+9xQM5KjpnUtvYMrjPAxvuZYrIEXQEphlxLuiULxtWAwegnnRPa1n6GYJyq0431UshP5vyemEEPzpcifY6N9Q0ELUX+IDhn3KqhssJs5bz19TVUJ1RV1RcItL5vjJ9WbEyqKkAqY6diaasdmIkVEIyaiTkWnlohiCPHSIpKoUIlZeNJXYD7uTaqJTG7Lq2y/J5TIouSm5YcWzQ4JHnyrGGxeOM4BMZVzfKSENuGjSaRUyTPZggJNBPX16y/mpZmugFNRNqMi4pXJbWJRGYaE5PJiI5MdFz5wnuamK6vI9WIelQzWVlhZXkn3gekM3pOkNkGMcVSANKee2llbPruPePJqH/opIk2NiiZ5DxL4wlaxmE6nRGWl5BF1l1n+pEFcJ4zdh7ju17+p/yrI1/L9fef3UfL/dE1z+dDdz6Fr3juR5BgU1djMpAHKx5YAB2wtFTny8JNyDpD+k1LJrct3cqraWeEjXV8jCAOzbapSa4sYKyaRrEoGVUz0yIBh5bigQoOgveE1BSdEHaNPEu1Y+dkB9/8pa/nR37x/7BeDH1GioWa26sFDTrBZ5s/7qyI2b0Tp9MO8sTIgGkDpj0STJuVhbZzHv8EYNqsCVx/+Pn86q3/33HaF5PVOhx5DzFCSta2AdM+LUwTEUIVyCly2Wl3ctHee/jbuy6lSRWHNpb55t/7Gt78j3+a8/c8+FljWgirXLx7lavuuIClw5m91TGcTgzT9LPDNFdsz87aUQ+YdsrJgGkDpp0MmHaifdqxdhe/dvuPkhdeebxq169zZn0HngHTHgmmLY8z/+Klf8H/+uvX8eD6Ck2yvrzy7ku5+r4LeelTPk5VHUFcpKLUidnm+7QB005dGfBswLOTFc9W25389h0/yOwhMpYAPC17qwP8g9P+NyltkJMb8Kybl1v2aI2O+cSx81ke34T36+QMtx46g6g1znnqsXL2riMEl/joHWfbPBIKDs3pHOe1/7ogiXlNXnLmnoMTzFPXwGqDNmNzIjsBTWiK4JXKRyofadN8vVL7yA++6jfZOz6Ek+qkwLNt4WD69OVEhn6r6Al+N3Ou/e/G9yumbwSgKlf3Ipy9by/L3jMSocoKuTXOYCesb6ybwQml2ByK847gKjTFwhPqyaVwZcyZ0PFLiiDiaZqGWPhPxXnamIgpUXvPZDJhMpmgqmysrrG+tsbGxgZJG5h4fOUgSIksgFDG2XuLRiAraALMEAsKap7eHOeGXqMZY0OwmrqqcKHGqxV8izmStNQ+ms5AzUtdhbovJpjbWIps6hxwRFCxgoNOxBaQKdM0M9ooIHb9nju11PzrIj1wpRBbm8nZeFW7cXeuKkXxbNEYNVmUkyoxJcajeSS2Am3MpNwQgqfyNZoMsBxCzEqrcwORVEkKGUfMiRQjOSkuR3L2Bb1LZ4t561Ppy60a6gBfJqAAlRPEeQPLJhKb1jhpU2I2nfaREQDVeIyva+rRiD07lllqK+OQbRvydJ3ctsyKobdIhAxNg8+eShz1aMRMIm2biM2UZmMd7+3aGUflBC9Cmq4T6hoXHL4K+FFlhrg8T8yJrGq8vWpOvsloQgiVRWOkSFrQp8lkTKgqckrMZjMcwmQ0xrkK5z1tm0jRQLiqPN47nA/E1BJTwgfw9Yj//EW/z7tueRr//V1fuGkax6axSImyocg594CzSa9VEe8Q5/DZIxsbFuUQQapAbiM5ml63KdKurZOOHcOLo51OISdUA1KidroFnWD67IMDyVZ00Goulg2UjXsGRDPt7BguJyb1Eueu7OLsifBADfc3MKsCGzmzkboinK7Yp1LoVspDL2LBFnmIjwc56WTAtFMR07o5L84jPjzumPZj1/wutxx77kNopCJkRBVfFssDpn3mmOazN+7vOSX+fExTQmPLo4VpF6/cgBPH2p0VTw93EvMGufnsME2K9sXpMcbBD5g2yKeQAdMGTHv8Me3E+7TjrYpXwQ+Y9mlj2j992Tv4/Muv51//8ZfTzfEmBV72c/+N733lH/DtL/5/tk9zbtvv0wZMG+SRy4BnA55tDzzbEe7lP17+Qn7tkz/JNcdee0JN/Dt7foOX7HgrtYeoA549kj3ajUeeiuZIbCOpq3OWM7mNXHf3Pus3sKAHMdzSUqPLMgsx1opuDVHqmOVsEz5Hq5kG8NRdt9Lmmk8euIAXnHcbd/kl7nYTXnDuXfyn1/4Bf33b5Xzf276iH8+NWPPiX/hv/LtX/A7f8Lw/PynwbFs4mB5RY3Xxl88MrhUlLZwvAprNUHqgdrB3Zan3TjtVJOdyoOK7tDrNJeVy3rAu+kBjQirz7uecceJxbp4iC3bP2Ea7FljEQDYQcmLnHT58mNXVVQDaZkZKEdVOISw1s1Mq7+bF0bw345tiKk4ZtdRSUVArYucXcLLyVgTPO49L2VL3irGLbUNbCuMB1maUShxRWzQlknMWaYQV7Owjk7rFoyykB2sxSBky2hfktGfKqPO9p5qUrD9zpst87wJIsmZIStRskUs5o9naLbMZKSV88Zqr5j4SwpzImdmsRaVkHqndo0uxNSBUcsqkmInJCvKlbMcmVaLOQWE97+Rt934nd0+f1mvZy3e+mUvHV4GKGRYMR1GPlwBiBie1CZFoC+qU+vRbsGgNVzWEpqEOFU6VNsL0yFGjCChGr3QIkrXEyZTICu8YVQ4nCc0zcmpBzdA7XxHEgfdWQDV0BtGiMXwI+KJPTYq0KVKNRzhvpXC7gpBtMc5tSTnOqqVQX8UsWuRAigoZRlWFDwHNSir65Dw4X9KO8RbNkZXKO1YmiWeefRf/5IV/yW9+5OVMY82R6RLf+5av4h+/8N1ccdbduC7NXMyZk9RoLhQl5YTPuU+hFtelIGdczojz8zTaznkZ5/QMVhiy6EixE0VT8V7IiT6VWktEDdgmxnuPL5ahzkrMmTjbYKNJvORZT2dpeQfv+PB1xJyoRNCyWIyWIQ74AkIGSv3SdIvZm0chHFeyb5AnWAZMGzDtEWFaOTarklJ63DFtPS4zy8sn1K3xaMRoNKaN7ZxKQhkw7TPENHEBkuAkb5o7AL/yoZdx15E9/LOXvvNRwbRxKJgQI04jAUU/S0xzZaNYhQA5DZh2ismAaQOmnQyYtnWfdvWRV3PVwdehxYm0rz7Ay077K3a5Y8iAaZ82pnkXOXf3g3zrS9/NH3z0edy/ugMQjs6WmcYRKo6YM55Cj7WN92kDpp26MuDZgGcnI54lVaZ5hbcd+A7uOwEtnqfltTt/i0vrDzNiDTQMePYI92hKTVZPxhW6x6K/OZOjUfN19JadMyfrnMMq54TDG11rwTRVu7fNRy0OHXhwbQ9ZPZoSdz6wk9VphVBxx8G9/NmNz+Q9t3fvlHtt5chsmVmqTho82xYOpk9fzCge/5nJcXEG3QfapZYKItp/7BQqgZEXdi2NCIU33+VcogwsNsaXiJycczH6c/PtxVn6qHaeezMaPlT9eR2XJygpZrw3b66rKtpZUwy4KfXRo0f71M3aO3K2qIDe8+6ktE3wzlsqJp2hF4s69nZMzmYYczaj5ULxyAuMvCe4YCYidinpCqrEtiGmTOoMWxstaiHYs2SRwstqzh8tz2xPWEjC+iim+ShpVmJHYrkwRAq9hzgXvtWcDbAW1bgDlBRbculrNSJRZk1DSonxeFSG3NpAZ6hzJjUZ56UYCtnEQaq5pOomJSUtkSD2mXGBKjHPwWyaV3jn/d+0iXrhuSvv4Izqk5Yeuch/q4KTAGJpxzlmkkRyFmIbi6E3AxGbiA8NuW3ZsWc3Ih7VlvXVVWQ8svTgEtklxQg68T2wCoJUHkRom2hpw2VsxBVg955qVJO9J9vOiulsnaAj6hIs0aaWRDJDX+ZFzpm2bZi1DW2MPaetiMMHT/CBGQ1ta5EAdQgWTeYDKZfokQLooYy5rwI5xwK45v0/b89h/t4z/5b33PZ07jqyh9VmzE+87+/w1NPvYe/yOufuOdZP7i56oDf0RUdUQbwBQ0fNoFlxupDcKcUdpNj8UHDOk1Mu66+5/jpnkTqSU4cf1gKhAHgiVKNej2uE3BiH7ayZcsVFFyCu4oZb7mTt2CpJ6FOINacyBzyZ3NstV56jmz/Hz5ph03Lyy4BppzKmdYvQxwvTmlRzcONs2ry57tKijEZjRqORcZP3i2UGTPssME3UNjdn7DhKzJ7VZgzAn1z3XO5d3c0XPP0aztl5kNqlE2LaLHq8ekY1NHlELH3g1DN2jqpngXj0Mc17jyDUoxGO9QHTBvkUMmDagGmPH6Zt3ae1GQ4153D14dfwvgffCMCO+hjnr9zOK09/G9Mj62gaMO0zwbSd4zW+/DlX8o6bLisOpq7/BHGObKfaHqsHk+23TxswbZBHLgOeDXj2xOHZp3rvCDCSdXaHA7x45U8YyYY5DgY8+7T3aOJyP3fRTB1aolb0uXIyx7Pca0nnPCzO4VIrKql9ptrhmTIKLUc2dpHVgOmeIzsRhayOD9/5FN56w7O48u7jnYdAP19PBjw7SR1MDyUP/8AKIJZS2nlfPTBxsFR59q4s4Qp/iQL1qDZOSu9xmgnBFj9tiSrwZUEEFu3rvS8Gw6yeU6hLSqVmpSfRFCGEYOmC3orcpTaSUktMGcmJqqp6A+mcI4RQvOAzlpYn+BIOUHlHVXhU7ViAjA+OOphnOTbRlCglUMWreexFBUmKppaEksilYF1RIVVijFs8/va8KUYzkgWolPnisRPnbOKLs/TOygcylGJsqYSLdRNmDgb1eEQQR0qJ2WxqSq79chWwIqxNY1ER3jsrTCgGaL23uFy3qirUGUjGlMkp47O3yegB6cooQgiOpBlxAZFgICMgGWIygF4s+ncicWJctRa6UHRPIMXMqPI48YBFKlimZZnIxkMEQJxNoaoY+QBtizYNeTazqI1ZY/1cDL0C4qwPFMgJYo7lPoo4+vRkAE2KnwjBBYIAlUe9oyUTvEXL5NJwXzucBFwIxKaxeYOwvr5GW9KsOyoEXwk7VlbAObz31FVNalti29KIFZE0Y5htIVVeWMasjCYjZjPjzzXlDDjvGY8TP/kVv80vf+Al/NaVLwLg2//o6/i9j93IO/7Zj1t0g9o1q6rqAbvr8yyK947xZGL8r7FEbrQtqbXisc77wq/r5sUigdha5H43/xb12go6Wu9nDLg1m66Mx+PeLoRRJswiYdaSmXHwyEEu3rPM93zDl/P9b/od7lldJ5bUXaPqkL4QH+Vn90+BdNw387k5yJNRBkw7NTDNXrY8Xph27+wK/uNH38J8u3u8LC0vsbS8xPTgwWJtBkx7rDAN4EN3XMhlP/Kfufp7fojLT7/7hJh2zYEzCcHz7PPu54ZDT2V95ozCImcu2Xs3ZywfMT19DDCtrioQYcfKMk4ZMG2Qz1AGTBsw7bHfp80a4cev+xOOxdP69nz10/6AK3ZdyfRQM2Dao4BpW2dy8IGqrucvvMu/7bpPGzBtkM9eBjwb8OyJf+8I8Oylv+ZL9/4MEltzxA949hnt0UI9d77UbsYLzr2NGw6cy8GNHWV8S1ZtzlQh9HgWiyJmLMihHtWmG8kCBnNMjGWN5+y7lavvv5i1dhknUqaocmS6zDf98bcQs3/IMRYxCsiTAc+2iYNp64T5VJ6xh/ee9d/Iliv3f5S6AsDYwWk7V9gxqlkOnionRC3Fctfy2KIJckRyJkdTznGoiLFBVHFdGuesIZcFTQhVf7PURlM+VUa1Fc+T4lW19EFbiAUfirFWfJmc3SBOpxsIgvee0Y4RIdhizTJRMzkZByiYoXdlsZVShqzEWUuOTd8zOcXOkhPb1BcbxAm5vHISwLvNSi5qkUNtKn1UiveBGf6sWtIZy53KghIFHGRPH/3QpR7aYRal0UUuiUifIqsKUji0c5pHKeSczOB66zMRQRZSeduyKBUpKYqdoRMhoYjY9yqC+C7tGFwlpJhIMZkHvqNO8J7UeavLsR84+A/44MGvIhcAOnP0Sd5w2m+wWw6QUzYgXVBKex6hhIuQmtY8+AWQNOUePFQVUaENLbO1KSlFUsxUvqKcQl04X8U5iwrwFWgxnk0ithHNEe/pCxLaWLW0M9ODjDDasYKrPC4lUtMAmVR0r16a4OsK5wLT9XXj7S3j20WfjccWhV2FirquUXEs6bgUbsz0ycoCdT1iPvDGQRtzYqT0hjup9Y8IvT69/unXccHeg/zoX/wdsjo+ds+5fMHPfyc/95W/zvm7H7DIoJ5LVKnwffQHKK4OFiHnPW3T2L27TUpMUOZN10uqSh3m3MWV6/hvrYCl9870IGVSE0E96nJZvFgUgKr1dZ7NyE1LjTKhZRSgXq74+te/nA9efzMfuekTAKxKYF0dx6JxFotS9LIr7Gm/D3IyyIBpA6Y9Akzr+jpF4x5/nDBtb7iF73zG1/Bbt/4Q922cOFIqtS1p1qApW42gpAOm8dhhGthGo8OtE2GaqXgGMk8/9wHuObLMfUeWuWTXnSz59fnYPAaY1sWe57aFNB0w7ZSTAdMGTNu+mAZb9mlOFnqJMs4NqRkw7VHDtC0i3uFCRWpbVMUyydopo9HIoq632T5twLRTWQY8G/DsJMKzcmzKeSErba59X7r7J7lgdP3coVg65InGs73VIfaP7ueGIxdvfzzbskdTEaLCx+89j9OW19gx3uCW+/cVR6zpSO8oVCUE3+/PAKTyVg/MO2JruioW1sLFO+/m0HQnd6+dWeonWc0w7bgZH0KcE5yXkwLPtoGD6USdqZu+3SqfCgZOeAsoCmET3wNBYLkO7ByPWakrapRx8J2Dm5H35Vwz1GaEHMFIHDe1RFPquSDNFhTvsCqidvNFwylYdIE6cFI84ZRCWynaQrO0I8f5U9cFLFJO5Vk22xNBwJXCdKpotkJ00vGhurlRUyAlixQyw+zmfYWap12kX/SKYNfJGelSdRcMU5eC2F1cu8+xgnlZM65M4nLSfHAKGFJAIHeptdotSLfwrOZ5SnBvSNQmnzjpDXvXxti0BvzOoUUHFOPP9MH38yeItxTVpNbmLi3VFR5O7Wcf98+ewi2rL+77fuLWuHTpo8SNjfI8W7Q7d3pUbH0ywy8CiCsgUDzjmskkYinKZ1HKCR9c74Xv+DZtUR/wvrLzFXAFeJOl5fbmQSGTDewVmtmUUYpICDgs/de4gU1XvTO9FWfmqwNbKYsaVPsiks45iwQQa9+orlGkpMnaeDrve31KqUXV5k0uXMCIRadoN65lMp67+wgr9ZTnn3s7N91/Boc3lnn7jU/n7dc/nZdceBOXn3HXYg3Eope2aZGy2UStz0yn3VyfcrJjsyKqdFrpnesXHvOoEe3VVuhUoVt4FgBPEVVLr45tQ04RlxMBGAdQrwRtufzs09hY3yCWRcmNDxylXd3Ao/M5IjYnu7nUzc5NVvChsWiQJ0QGTBswjUeGaVn7n48npo3kGJfvehcTf+whVSx1hVRTtnYOmPaoY1rt4/GWon+ppifANPq5uHtpgyY6YnTsW1q3vs9dZN1jgGndRrttCDpg2qklA6YNmMa2xrSua1LOHG5O5/bVZ5G0BsBLy4UrNzKWQwOmPYqYtlVE7KVjKlHuhh3lmuVF9rbapw2YdorKgGcDnnHS4Nnie8cHmgu5bfU5dOwTE7fKuaObuXT8EXb7+0Fls3Y/wXjmnOGTkBnrMSqF9Vm1LfFs6x4NQHPg8MYOdk8anERSTgTtXCfa63W5TK9SQulwB9DhGah6jsx2WF2wcrB0c0kXJtVDyE0PnsV7bn8aF+25meVxAzhWZxPGy3nb4dk2cDBBP8uE8oCORzWluPSJ946qDkzXGhwwco79O3awazxm5AQXG3YtTXpF1Lax1D0vxCaRcynM5wXv56lv3T1EbbGDzgvPVaHGB7+Fz1Np27YogylePQo2uXNi1k4JYWTXAhhZNE9bUsFTjOSoSLDURC9K6LRcgqVuBgc5Q1KLhBAHziPe9YXoUCWX37Mq2tvc3Btt7+hYJ63YpioaIRRw8s4bADiHU+1TaFMxkqkYNsUyjrpreKSrL2j3zxkvvp+sOecShZCNi1aVjijbiUO8o/Y1mixKqQqenGyMRYRZUyIFUJLGPrLAe4+vKgPIXFIy+yADIVIMeu700OxzikrTJhTBleJouM0zrAMozRkpKZ2+B0PTnQ6sxBsHq0VkgCUhBortJmYla6KloaoCKSdiTgieMKrKc5TILSe4KiBVbUCGWPFFtXsKGW3a0hsl+IIMOdJsrJE31vGAC552NrX07LFtzIK3MUnZCkx2kQ9BzGBrv3GwiLG11XUQ07FQhbIgKOmzKYILiLOUbk0dDCpNjGXD0RUcNM7insJJhN1LDf/fF72ZH/izL+F9t10KCN/y+1/LN73onfzMl/9aSXOdj4VFY6ilaC+IUSeUCQu0JYW7M85aFmldCm6nj107rFBjZ3gtwkdLv2eUdjrrFwOz2RQyBAcuQZiMaFJm7cghlnLN8y6+kKdfZsX8fvktf8mBg7dQC1g8h/RbIl3si96S5M1/DrKNZMC0AdMeCaYVPmsRwhOAafmEtsOsTYrlHgOmPWaY5rq3bQuSs+maiKCFc0FRtHyO0GPaacvH2D8+QmykbNQfO0yLhSd/NtsghDhg2iknA6YNmLb9MW3WZq4//GJ+85M/14/kcjjGGy/5n4yItG0aMO1Rw7TNe2CLag84b7weuXuRvPCCczvt0wZMO5VlwLMBz7Y/nm197/ihI/+Qv7jnO/pRPaP+JN9w9g+TplPYhnh2JO7mcLsbIXHO+G52hDU+vLqyTfHs+D2aOMH5zB1HTifnhOapOS0xJ50u1PUyvOkcuZttiRPbn8U84vr7LySWIIwQtB/veOIN+Sb5xStfzZ/e8Bx+7u/+BM84604QuPa+c1mZ3LXt8GybOJgeXrb4zR75SVtEC49kBdTA2Al7l5cYizISYWlpidxMSSUaZs+uFZrp1Dh+HdQh4J2jco56NKFtGmaFOzQmS6tsmwYQpOODHIWeL3U6s3TRnqNzNCJ4VwAhAEpOwlqMTEbj3iCLywgVwTtyTMXzXNI9mRsUwIr3OWecmXleYE27qIKc7OUFxQgmtTVsVtrYWhFBzHcSVEjiUClpi1UpyudLFJEI4sAFYeQr8zQXJ1AIjsoFKzamZtDmXl219i54a1XNi63iqKvKoqCAtjWqmFLxzp6x8gTvcU4ItXGdjicjpmvrfbSIbJqoSk5tvxj13htwOEFU+kWpqpJaewbN4KSkp+Zs0Rk+FEWUomJbFtcA2fgsnTOYqPzcve27Ao9l9ooUT733pJzn34EBtPNUUpFjGUvNBDfGS4WIZ75AEvM4dxW+VcFDNa7xTgg5M23bvv9d72VPxKalXVsFlDBZYqWuqEKg7p+REhngaWczcI7lpSWmG1Nc8DhX9QVyu8X86upRQqhYWloCJ6XYZOGsDWG+0EBRAqiZtDm3qdgaZcGj7uThkzQXU5mtS2TTxqPrWEsT94Ub2D6rQzfPuiOlbF5ScRRJzw1Lpt+gSEmNDSGAWDG/HCNtO5vriJZISYS6DjQb4FKmVmWSEl4iI7WVxt9/1Yu5/KkX8Wv/789ZA1qBJEIq7ZlXIl5QokFOShkwbcA0NEPhQQ7iqH143DHtRLKjOso/veJ/siPfixS+7gHTHhtMS4smvcgbfvE7qUM6/gugiZ5XXnwjv/6PfonHG9NSKarrNNvObcC0QRZkwLQB07YDpv3qbb/KravP36xmIkzqEdKkkuk5YNqjgWlbJ3DKfYmO/mtVe7Ft1/Xbap82YNogDyUDng14th3w7Mn03jGQ8TmSm+m2xLOHfe9oPi5UrUaYdx7vu/nQHWMOqQUS137MnCsZbAUfg3nheufmH1/3Ofz+tS8k6cO96TQZh5YXXXA7o8oysZ531s34nLYdnp0UDiZ4BMb+BIb9hF/pfFkkmGHsfHROhKhz5eoUVNG5AcGMtHdCcm6extkvdspNdL5QkgWjDKag8zTPeetEFrSt3K8L+1m8zmJ6aHd6v0ATU+2ea1SP7zUt1+y/0nnfLl6/77vuGUUsqkCkb183kcQV/sjFfnc2qZxSnJ7a32+xzf0EXegj6f87XqR878QigL2zf93EFqEHV5hT64lsvl43ln2/lDaozu9y3HSSh2pV+bpESy10Wx9JsGmst/y22I7Nt5N5m7p2SZfeKpuuYQMv9KmZC3ojmy86H/MuIqyk0VqfbgaxrvWqxZ/tTNe7/ivq1Ku9RayVaJSsxje7kNq8VY/nd9g6Fgtt3qJbW0VPoOebTt7Spx0l7eJD9vq45Wa68Idu+Zu+rxZ6rMxL66/5ky0+rSg4tBTYM9mxNGHXjhVbGHXHd4080eNtxrBBTjIZMG3AtE19+kRh2oKcObqOp6xczf7x/eh0ZkM1YNpjhmmTquVLn/lh3nfbJdxzdDcg3Hlk3wn7rZMr77yQ//3Xn8fXvfA97BpN+z59zDFty7wbMG2QrTJg2oBpTzSmXbTyAZo84vqjr+nPmaUR773v1Vw++SB75a4B0x4lTDuRnGgr1mUwbbt92oBpgzyMDHg24NkTjWfz/n1oZTtZ8Oxo3MUGI9C8LfFs6x5t52iDXeMpmhrLpMqRjWrNapcFy9a9+/BO9IR9fAI8A2ofOX3lKOTMRqx5cLqbt970bD5418UcWNt9wvFYlJddcD2vuegaJlWDE0dCGYe23HJ74dk2cTBtNgCPxaVh3mdOOm5It2kAYG6we2PJZjXs+31rW8tBNtcefgH2UJN604JnYTG26M/szpX+94cy5AZQWgxdNyFVWAAC7R+s44Dsr7/4c6EfOgPfL94WF3xlQWznLfSXMge7xY7T+RLQjMjc8HbfdEZbsz6sekjpszmZ5Zbv6A7Y8kWPMTp/hkXZpJodiMI07aDNo/6wSqbUsmEASgcRdk7naFhsW28c5w0oZ83P69u1UEy160NdAPHNZry7QkGGxVWNzo/Z9Iiqc8Ncvu51X01/Ftu1CPALTbVfFxYwHRjM7zHXx8WO0HKfTsH6Bc8JjKUCkyoyqRo2WkunncXAkekSO+o1QB96fj2UlPtu3fzofJC2tHkBWLtnkC1Lgs17m82yCcUWF4W28BzXFWsl1brTphNeeNiwbGMZMK2/6IBp5fGPx7SH2TM8Tpi2WS5YupIX7vlDhDPmODJg2mOGaSv1jG99ybs4vLHM4Y2lHtMeTq4/cBbf9cdfyRc//arewXScPBaYtvXymy7c/48B056sMmBaf9EB08rjbz9Me+UZP89Kdd8WB9OYv7jjC9h33u3sXbpzwLRHC9NOIPNH2Dprdd43PMz8eigZMG2QR1UGPOsvOuBZefzth2c9cuimD7dc8+TAsweafYWWPW1PPNuyR9u9tMH5uw+iyWpE5RRZr1cJweO9x/mKB44t0SSPPizH0lwqn7hw9/2QlAc2dnD/xh5+7+Ofw72PwLkE8MVP+zDf9uI/I/b9s33xbBs4mB6+gz7NJchDXgfAZSuwd95pu1gKFRMfqEWY+EAlIDGyNKrnxjwlvBPcqKKuanKMc6OVsnGBLi0BMG1aYkwkzb3BUlU2NjaoqooQApPJpG+TpUfas4tYyl2KLZosTdXoMS01zXmHeA/eU4e6cBWrpf5hXM2d51xzpmkS683UoiQUNEZyTDgfCHXdF6JDDURCsAJ+IQV8EESLh1OhrkeWWg5ISbWUbJPVlYJr4kG8I2VlY7oBULgwK8bjsdmYnJltTMlpblC6QqCq9vy7du5kVI9YPXoMVSWEwM4dK+zZtYuN9XVWjx21/gAkJ0IVCIUHta4q1hcmTV1IRe3aVrxAxZRAhD4CJCdIKc0NfRZw0KWCNjHTxkg1clRVjWpmNnP82LVv43B7Vj+ef/e0n+EZyx/CISSdT+K0wIFjzLWZXO4vokio8JWlFqubA53HdGxjdY3JeFzapBw7tMrSjmXqcU1VxjxLNh7RmChhAEAGKf+c4hw996wm6yenUDnH9NgxptMprK5RLS3jcLTdIzQNSRxrsUUExqMRy0tLaFZi25Ji7vlbnXf4ENjljes050wi07Qtqop3mC4Vq6Nq/WO63+CcUFWBerKC00xOidSWoonOGeeqOL7n77yLj951E9/zh18CwG995MX82Q3P5Orv+tfsHG/Yvfp02C1SQGSRm9gttDcXIFJVpk3TF5ycpzPbv7quS/SKI7W55xl33TFiAOxLju1cJQpQp4xrWyqZcxPv8Y6nnraLr//Sz+e33vYe7jlylIZMxqEoqZu0g2xzGTBtwLRHhmnrqSqdl54QTDtu7yNGexGcsN405FnDaMC0xx3TPi15nDCtk+A9ofIDpp1SMmDagGknB6ZVVU0VNjvqd9ZH+TfP+2HS0QO0A6Y9api29aW1cx5fjZA62EvgGBmlhObYz6d+DLfBPq2TAdNONRnwbMCzkwfPVHO/bziR3g7vHR+bPdqdR/Zz7+p+pNA1akq003Wr3yhQeeW5Z9/K7Yf2c8+xPbiOmnCrASl4Js4xzWM+cPdlCFb1xZH46S/+BX7zoy/ntz/+0uPG9rixLnSLJwOebQMH08NLN07Hw8GJPZn9B5p7oOgeMgATJ0ycY+IcYyeMnWcUzNB7TfhAb+hjsoKbUjiCnTh88Izqmthm+7xMNnGBmBKzUhAvqylwl9aXsnElwtxL672DztNvX1iRt85TXDzNKSV78VOKlFkEROkNZ4uqjdm0v6ZzQpsiLUBWchupfECc2Jvv7lSEcQj44OcRA6JItnpHo6ouIGWKqGJtk2x1ZbwvXtwg1g5Vspoxdt4TQsA5A4asWjghjYuzWyxq8arGqHgnVN5bgUMxjlAnsDyZQE60jV1bOwAskSAdUHjvUc0liqEYcs24DKq5n+Cqmc73Kw58cGjOKDBrWsR7A1dxVKECceQMXpVPrj6Xd93z1RxtT++fFaB2kUpaM0yZ41Yp0o2Nmi32zuFQRCFHK14qBfSLkiAUQIqFvFYU1YRG4xHvjKDLgjhFHfYPIGV8tgiBjBaQ6yaR9hEemhLiIxoFZUb2NeojUnVc1opKJpV+0ZyYzaZmxHMma8J5Z5sJNqcJe++JKeKd8f3GtiHFVGoSQY6xL6roRGzh5K1wrHGiaj9nnA249XXIXLTvAb7rlW/nVz70Eg6ur3BwfZl/+Sdf26ef/tDr34xfAM5eup1H2bwAaNHPtGXjIgU0F22PPZ+QUrRICecL/2/pUJTxaNxH7sxig6qUwAjBOdNpL4rzVlBR4sza0TYQEzvI7K7hqBfalC1yoozbQ8qnGw04yBMmA6YNmNZhWl0Z97NzgvPyuGPaVovixBGct8LBmA1LccC0xw/T3mbjX/Sye2F2aGOZX/3bVx4/AKVfHw9Mq8oLgUIUMmDaIL0MmDZg2nbBNN/Zw02ijKShESUOmPaoYdpxIg5x3sZQM9K9yHJiOivaFzfv5tjmGkz9FwOmDfKEyYBnA55tKzzLSi7vPTu73IvqgGeP0R4tqyPj6eo1ZQWlQpz1TSRz66HTOTYbl0c0zDreyhcdFpsBueiPZVclap956fnXslyv8csfeS35YeowpZSIMZ4UeLYNHUyPxFO2ZRZ1n6n2Crd4ZDdUtROWq8DEecbOMXaOygm1d1ZwK+XO0WieUDUPrOuNuRUOq6uanBoQh3NmBKvKI95KTIpATImY5oY+qxnPxXZ3Bg1gE09k3szoaMarGK6czcA5Z55oMQPbtpba5tXjvDCNrRm0rGjMiPcWWSBdyijghCpU+GCF66wImRkSUqLygSp4fCge636S2GLRe28g4a39GaWqTaWc8wTvC7gorqSmdy/9RWA8HtFNEBEMVLyjCnODjmacF0LwVJVNniSxb0eXrptzLoa+u17Ro2zPKQ4rgFjGo0u99N6R1fXZoFkVp9muI4L3ARVH07Y8sHE+Nx37XD548Kv60XEkTh/fy8RPcZ2HWFkYV/qfmwoZOm9grWrecECKsQd6HXBODHgNvUCzFamNmVS85apiYR/OohRUQHMmZAPZhNJDmxhNgOuvV4BdirFvGrSqoSrREj7YYiZn8I6cE03TENtIStEMcTfBysLE+t+VeWM93XnaU0yoK8Y7WTFIVS18pt4i1hb6r9sndgs9SzLP7F1a5wuf/nHef/tF3HDgTB5cX+G3PvIyAM7f/QBf+ewPcPH+B1kZzehSte16Cwur7rO+d+YJp7YIcAsD2A85IkKMZeNSbE53OQErFlkWZU0pcqh05skXm5IRTaanJbqPNuLaxEQzp61MODadsXZsZhsrSgJuH+akXTnbE5vDQbaRDJg2YNqJMS1U3sbXCd7P+aIfL0zbKo5SuDebbRswDR5fTLumDLBt0lKyl3H3HdvJu255Ovce2800WnT+jfefydp0ZBunMl7j0HLJ3gOPCaaFrvhzmWsDpp3KMmDagGnbE9Me2DifQ82cXWLZH2ZffTeS84Bp3WR7lDBtqxw4toOP33OWDahmlsI6F+yY9vpjL1w3O5gWM5oe733agGmDmAx4NuDZ9sQzslp/ZuXA7BJW47xG6+5wP3v8fQOePU57NBHB9cEV5qi8b3XXginQTbZ/kaqPLXiGbsayS/bdw76lI7zjE88idQ4mhbuP7aXNc1fNLDpWZ55x6ausjvW2Ynm53nZ4tg0dTPApjb0sPlkZUBcgtpayh02knM3r2Onh8mjEmbt2sHdpmbH31M4BiTrYpJtOZ3idG986BDMQfSdaRG+MGXDkrKRcXtKIFXsbjUZkMpJcKTbneqDIaf5c3gkhBLx3+HKP4EZoCMQZfcq3RRAEUkq0saXyVVEYRx0CqgkLN7ahjFgK4QNHj5A0IQgjF1AczlUsj2va6WzezaHC1zU+eFxw5u2PEYmRKsg85Q9wKN4L1aie976qRQk5QR3UlSm4cx7vQokwsjT34Bzj8RhVZXV1jQsvOnfuNEqJ2EZi09DMAtPpjJQUqDh86CAi0h/rvS/GUQlVKEURhVB7tJuYuaSfZiAK46UlEMF5Z15ZtYntQyDTpQ7DaFSTskV0mLF3uJJi+UvX/SSfXHvWJlXcUR3huy7/QXR9lTyzRYb3Hu8E70HIzD3XmAEUwRW3vhn2rthd7o2mabgZeSm6gvfkZNEKs0YJfjSfAk5AIQdBvdDEiE8JlxWH0mq0A9XuV4nRt4U6WDptVvCQmymtzKkGRiL40Yg6eKY5kaIQ28j62qoZXh9snkkp3ui0LEZAxDOZLDGdrtNGmydZ52HzIkKXQ1tVVb9W0wx4FilgSTmjrVJX1g+USIwf+sI/4U+vuYL/+a7P74/95OF9vOB//TBv+Sc/ymsuubbooyuPP9/Q9DbEdYsGK3yXtTzDCXZQqhBTpGkaqromOG+2phjzEAJVXSNi0ROaEjHnEglgCxEVh/hAiY3oZRJsWdoQePEzL2fXnfcw/fgNHJxpv2B1VSj9qEzbuGAGH8kCeZAnTgZMGzDteEyrK1uchqoyu/E4Y9rWWCsvEESIsymkVHRvwLTHE9NybPuXFhSCgtNXjvIzX/YLfN9bvpqr7n4KWR1f8PPfwVZ51pl38OF/8QOPCab5QlmiKROlHTDtlJcB0wZM236YtnWf9oI9b+Plp/0RqZkMmPYoY9pWC/AT73wxP/HOF/d/v+KSW3jnd/wEqW3sJfKW4zOK5tSvQx7vfdqAaYPMZcCzAc+2H57llMgpklrHT93yh6ym0/q++eJ9v8zTJx9EmgHPnug9GpQksmx0lX2G1cK4d31cGlIctvZZVmXP0jo/8Yaf74/JWfkX/+9bue3wGf097jyyj2vuO48XnX8bWZWj7TLX3H8Br9h7/7bDs23iYMrHfXJix1jnRdtygCqkWCaH2uTKUAGTquKsPXsAS1Nd9o4x9FEE040ZUgVEIKeIiLPFjoB6EC1/q6LJUk5nOsO5gGomlghT54Od6xxOSmprcIh2KZ2O2MwVUcRS8xxY+qMmvLN0cu3SPPvogURKmZQyK0sjmtmM2DTQVrSxRSXTacwsNjSpZbJvN/v27WX3rl2csfc0bvzotbiYURHGk6W+P2eqRBLkTGoyo+AZ1Z7xqKL2wYRBCAABAABJREFUQs4tOadydPFnLk5WIPiSAipYW7AUyy7FLuVEipG68uTU0raRjY0NVlaWmUwmpJS59dZb5t7z4BBf6Hk0c/DwQeq6ZlSP+ptadIFlinRRECJVH1nQNLP53PdC5UflOI84iwyIOXUrVYseAepRTUyZGBN1XRPUJlWb2jIec8X7nH3v5HP2vROaDUgJh3nog5R0WlG8+L6fjGPWFh6CA40lcsUDedPLPjM6FrXhg8dXHhdsgZy8kL0QvQ1CqAPV8hiZOKg92Tt8imjbIilDmxBn0Qko5PUZ01nExUQlhStbBZczaTYj524xA9V4Qr20xM49e/DHjuK8ZzSZsL66WrApsbq6amNeomxS0hLVoaUwnm3qJvWI0XjUL3xSSqSYSDnhfSiGH0vxXkgv7uaLsxxfyBlR02VxjpddfBvn7PlD/v2fvoFprPox+ld/+kZe99Sr+a9v+F0D7bJpSSnZAqy7dkn9FidWO6lEV6TS7i49HCCn3EetiLh5kcryT1MmNm0PfCKOynsUISYheUWc3StEs1VuAXlUHDtc5tzdOxnXY/bv2ceb3/VBchtRIJb0ZC3zLuaFBgyyjWTAtAHTPjWmtbrc31QeR0y7Z+0Cfv76H+S+6SX2bES++Iwf5YIddzOpArlpBkzjicI0DyiumwDaqWbmn730r/jQJ5/Cmz7wmhNalJsfPINX/Pz387N/79d4xhl3PqqYlguHvmFaNWDaKScDpg2Ytn0xrdunvfGC7+EjB1/HW+/9bgA+cvg13De7iK8/7yeQAdMeVUz7zte8i/feciG/+cEXnNAiXHXnObz8f3wHv/rGX+Epex+EUk+le91m2be5Nw+P9z5twLRTWQY8G/Bs++NZO93AYfsE6S3YvB8GPNteezQ6W682ArqAZxfuPcRy3XLD/WdZhpsKJacMUUWdsMhd31NSLsjZOw5y+f47MUch7KynPOv02/BZth2ebRMH00OLbvq58DSLnrNuUmDeti6Bbew9y1XFcvG+jYBaDAAqsULWriijc0LwhbsTzLsoUialGa4sBVNSwvsKwfhRKd8b56GZQ5ylJuZoBfuC9+ii21C71EVTPrcwzS2KZ+7x7B61S0vNKZNj+v/Z+/N4246rsBP/rqrae59z7vBmDU+zLMmyZYONwcbGBjMPvwABQkICnZBOQ9I0pNOBpJsfSZN0N/3r7jQNdNIJCUkIISEJ0CFghzCEwczGsy1Psq15enrSG+67955z9q6q9ftj1d7n3qcn2bIl+z5plz5P77579tmndtWq9a06a6JNSkwRPPgSMhuThcve8uJbufqG69nc2KBJcNf7P0RsLVy898RQoNWEw7we1IEPghfTj+IdIhZaBwxF2lJKZtUsYyTY+CFCGmbKNo5meY+kaCBOMZJiIgTH9oVt2ra1XK5zU6LOOeo64LyzkEwyXdfhnNtT9E8GBWChl2WBiphFHYeW1/vUrz5YGGQPU8tNm0t/V6GrzjucWi5O78Gp0Acx9h4mfTvaPMH1a/fCPJYw06Kgy5iIYvIhlA1uX4hxJQMiYkDvB3mvgPTj24faeqHyFS44tHLIpBRJbCrCbIJbr9HaQ3B4zWjXWthxG5GqKht3yJMleXuOLjroSthmNjnPWS1HaTHjx+US37bUzlEVr5jgHFUIUGStT+OjqrR0xWHBvHjq4M2TglWu7SH0uwh3Lrm6pchQ1tWGyPk968uVjY/qAGdxwtH1OeuTh3n9iz7KogvMu5p3PHA9Hzh1LbVP/Ic7X8VXveSDzMJiOLyoKpl+U+KKrEjRJas53Dsde1v/uqqW3K5F3lADm0gBnTN5RIimGSwBUum7qAwQcU7wCg2OjdrjQk09nXHLtSe57/EneOzCzupgJjJwbGyXVxuZNjKtZ1pmpTt7761PB9OaSjg23eLUIrEsHzvPm6icpfI70C5GpvGZY1qvFyS7gUmqwo1HnyBmzxecuou3P3gTy7iqAwmw2zX8wf23stVaTvBnk2m9fIo4vPiRaWMb2si0kWmfaab157TddIRF2himcBY6Dte79mXwyLRnlWk3n3iC3a7mnifu5W33XkeX/L7R2VpM+f27b2IeJ0jhnjg/rKEs2TTCxZ7Qn6Zz2si0sV2qjTwbeXZQeJbFgXMWnbL/K8hB9kaeHZwzmhk79xhg9qiMSViy2cw5Ot1iq10nZm+vD/Mqw1uGlJIXtca3rFVzNJvGCZLYrHcgTw4czw68gQnKYD/pN32zxRCcoDESMEUOsNk0bDQNvlj1ghOaylOJUDsheEcMnuAc3jtm08nq0wR8WZS91T9iuUX7fKQOhy9mQClKAAdaBF9EyNnyMHrnLf8MDKHf3hVhyo7gHSkl+3Ka1eLpW1/YbrFcoGVjlLoWUJx6fMlBmnLGN4Ev+/Iv5+TLXooCd/3+H7LbLdGuZbOa0VGskQK7YmG9oa6pZg1ZlbaNaNsyraaEOuDF7t0tTAl0XYsvYxK82V/7HKf9Gs2aSTmyXC5JnVmMreBahxPHxtqMe+6+ezUWweOds5ynYVIK/Cmxs3yZfWGzYdbFvBcM6wrOCu2BN68Q7fthC8uL5c503puXg8VSknMp+DbMSR8iTPFO6D0BVvk0hzkRR+08Xc7FhACIeQv0HlMOMe+qIrZ2TxA1JWSy0RdjW/VjCL90ds+Mgij1bIJMG2gCTEre2SrgZw1yZB2pHQSBENB2CV2ELoEKvUcMu0u6sxeI53eYn93CkZEMolIUWyJF68di5wLZCfX6Bj5jeW9RZtMJyy7SxjSElKoqbexsg5MzmiOzphmWa19ToleCdVUNnjsGACsOq+XZxYEr40/ZdPUbDc25FJa0+a+rxPd/+a+gCg+dP8y3/6tvR4F3P3wD3/KvvouPfP8PcOPhpXkflM/IF20W+znqDze2V8nlcJKHeem9JFRXIc7i/HCfHHsDk8lTVrHw29L3SMlNay57wxow+YNGHGtYWO5sus6XvfZV/M677uTChz5CKHmJo0LXF2Ec22XXRqaNTDNdYDePMdJ23aeNaSc3HuGvvfy/42+/7We4+8LLyAR+7fT3UNW/wIs23zwy7QAwDbWCzquDvv1964nH+MGv/EX+y3/7l3j0wiGTn9x/JVH6lTNZ87PKNC25/IPziKSRaWPb10amjUz7TDLNpEz5pYf/FndfeNkwJ6869la+/Ko3012II9N49pn2Wdee4qUnf4P/4p99C+fmU2OIrr6AA3AhWDqtnJBSnxbsi9BMHLy2Rewg9uk6p41MG9tTtZFnI88OAs+yt5SJnuZJ30E61GJoRp4dqDOagcRmsJ8yW5cQZMmLjz/A+07dxFaa7lM0Wn7unSv0EtacvYbOg86zA29gevLw7v+tiC11j1hdtfJnwwnHphM2mpqqTMY0ONbqiiAZiKCOSRMQTZAyAQZvGdRqEYBZgT2WYzJLb8uziewFquiNUjAsD5bC1LY4lNBvbAGcWY+nVTVYVWO7LKZnfZKSb7tuUPQxrQq+abSCZ9dffT2vff3rAPjAXR/ggUcfZPvceeLZc7jgSSkymU1ICXIRMnswR31ojVtfcQfXvugGXnTH7XzwnW/nvjs/yIMfvIvNWc3adI1pY6npFt7Tzucs4y51qEqorbBs52Y5944+7NjC1xM5duYh4B2LxQLJgFNi2zGbTQ2QZUaDE6rgmNSBpt6gazt2dnZMyebIfFGA7StCqMgp0amSkiME6GKkqoNZuftF7T2hbnDeI87jnKdtF3s8EazIW68hUs6knItFPZf53UOOPS2nSG4XkDrK4OJVUO1gUF6mYHpl74t8Fl+DfcDYt6HZsz/vcmceLziapsKvTZBJRWqKcqkCeVLjpxOoHVQCwYNT1DvUx6IQyw2bhmBalOX584h2uAyNBCoXLGy1dKTd2aHrOhSopjNcXZHaFi2p33wILPaEZTYhWIE5ihFQ+iKO5oXQtUuymhdAqAKZRNSOWTWz5xeoQl0OEpafG+wLs5izeaQIuOCHHKeaEinpoOTTJZSGlgOPcw4qyy08zKkMpBqudwhavDz2ORKUa9uuoyex1BWaE855mqYxL6MCrLRoB+gXcSvh6ZDKOl7ByCTCiyCxI8WWRdqh2l1yVeO48cgGHzpzAbW3U4k9q3nNXOyJMraD2kamjUzrmbZY7tpmL1tKl5FpI9MuxTTKOGfshg7lLd/196hc5Nx8yqv/77/FItari6UM9rPItD6CHNl/zB2ZNraRaSPTRqa9cJkmOfEP//wvkZPyxIUJf/Xf/snBGx0A76wsScz7v9jCzmaqFtWhuY9M+PSc00amje1SbeTZyLMDw7OccDkT3CWkMkfI3cizA3pGKyvZUlgC95y7knvPHbc5py5js5dnkMTGoe/Xk6Y8ZVIXLwueXZ4GJllZBQdrX87UDmZVxXrTcLSp2ZxMmHhPlU0QmxCYVJ7aQfAWtphUhlyf3gluz4x6cUN4nYjDo6Uolwy/75vlP3VYtJ1HS9W/4HoLKAz5FLWEMFrVN6ygnsdCX4WcIj6s4lplz2fN5zs0VU09aWhmU9ZnM6677jquOnEFAA89dD8PdokPvPu93HfffWweOcR1117L6770Szj10ft56M67hi/b6+mE2156Oze99HaOX38Na1ddwbW33cr2mXM8eve9JJSYEl1nhdJ6T4eUEhq0eHA4tKqoqoB4RypCq6pkESrvSdEU86RpIJv9HO9Ym64NoYgpWUivc0JOueQStTmpq3rIBwsYYERYLpeDV0DnPW3bUseaVNfDeGkZX80lrBLQPUoha/9zMQikoujVlD1kzrXH+e1HvoGzSyuuJyS+4PivcsP0g2hOBCdIFpyDEDw5AtrnPNWiJcpcU9R7AYjYYJlkiD2zzXkYwqQHW7iYohLvkRCGwmtUAW1qCB6tPFQORMkCWQBn4ba9tLrgkaZCJjV+WpPazp5XM16KR0RZB16L98jODooQciaII9QOaWoIgXZ7xzzGgme2tsZiPh/SD8TYodlSxVWVAdjtUWzs9erQXIDrTMmrzQWYoldVkiqVL0relY2FuX0MhpzNWcu3vvpt/OcPvZhHtw6hKvyD3/tivurF7+VLb3m/KV+RQTeKM+AO3m/IoPe1yNDKwcNyEnvnV7/LJdTWm8cPIiVEWoZ5c+XjfL95FDW3gT0TbAct2xQEsT+VZhoSVx7eZJGV+7e2zSMH2M52z75w7dgujzYybWRazzQfbUzcp5lp1uM9erhvOY9MO4BM6xeZqEPFZK9Nwkaz5MqwYM+yBeCn3vF6fuOjLx0OsF9484d5w413fUpM6xWUMDJtbPvbyLSRaSPTerF/4TEt58zh6bZ9IVeiIfa2H3/L53N87YJ5WZP5ipd8iM+74X6KYgDVEnWRy1zop+WcNjJtbJdqI89Gnh0UnjnM6FiViKO9zZW5Hnl2EM9oxjGRFc9S9mh2RZ5s3a4cLsSMwOU+j22v86sf+SzOLdb2zbkIZPXcf/YYOMfaJHP9WncgeXbgDUyXaqucl6CxZLNUpfKwMWk4trHOifV1qpwJmqmK0FbeUQdPXTYYFmUgpC6aZc4FBLdaEFiomfPOPiOZUDkvewDQT6hYTlExoVZRlGywkH4XpHuupYS/qYU/Oj/kW+1E8N4PXgYVppBitCJ1TV3TTCZsrs249uqrufbkSQ5vbAJW0Ey7yAff8z7O7WxxzXXX8Yrv/i5uvuOzed/sj7n33R8Y1E3jPS9+yUs4edutTI8fgdmUq2+6kcfuvZ96OkGx0EMpiy12HbEs4Kx2FwsXbAh1MM+JsohzzmRndZNSXKI5M1tbQ3PJ54oym04JIZjgpnYYy265QPADEOvKkbMnuQJkb8HI8/ncwoldsYovl4MXgIVBlrlxQo6p17VDGCJFboaabAop2ftXofWJJ+bH+Pl7v5t+8TrJfNGJN7Hpzq4OLiXsOIRAsjea4tGy7gfQC4jiVEEcFIUGIEPoZZ/btrek2/uG3LPOFJsr1mqtAlQVGgwAeAclsFfLeullDcD5ALWHpiJMG9LOHE2ZnNQ2MwglIpKsENWK8GkJJa0aK5gX6hppGtzOLojlEp5Op7TLpeW8TYkOy2PrvScURT8oVOnXsh8OCOWYgGJhoPkiRa9l3MR5o2vOIK6kkrX5OTxr+fbXvpX3P3K1GZgQfvR3vxxIfPEtd5actau165yFy/ZhwoPc9JtIp8O9YzQB8iUsVrUMksUp45yn9wPqfY6MQxaSnPamgghGYinAIZlOkJQICLUInYNalBNHDiHTKet3fRRFWCTlQupX8l5Ije1ybCPTXphMq1rT4b7kmf50MS2pZ56PknJ/oFTW/AUqmY9MO4BMG9aZKQYycP+5Q8SkHJ7ucHi6Q8pCm2xv9E/f9sZ9+uX7uv/EHVee4mhzHuSTY1rPMxiZNraP30amjUz7dDIN1DBx0dceqnlk2qeRaZA5PNtlazGhSwEQ/v5vvn7fnITgeNGV5zg+27bi5jlbhIP2Uq+flnPayLSxfaJt5NnIs88EzxxKYsJOvgJVV+Yys+63qF3Ee3le8ixpwrXbRO8uqzNaJQlyIqdoqQpRUMpZuzi4+ETu4wt7JJTbeHFc6GruPXc1P/2eL96jgZTjsy3WqpakgfvPnQDnOLax5MYrzxxIno3xumMb29jGNraxjW1sL5D24M6t/Ld/9Bvct/NiALxEvufWH+C1x37tM9yzsT2T9uiFw3z4sZP85Df/U77o5g8/5XU/8rtfwav/wd+2orJjG9vYxja2sT0H7ehsh5/4tp/ildfd/5TX/N3/+NV86Y98D0//9dTYxja2sY3tj574Mr7j7b/BVjwCwGY4y9+5+bt4ydq7P7Mdew7bzO/yisPvZernn+muPKN2+5WP8Job7+bV13+M11z/MV59/d286tp7cCXUaVYvec31H2O9WTzlPX70D76KH/yNb9r3u2loeetf+l6+/bN/4znt/7PZDmwE0x473JNfK5be/lVHyeWbLZRw4hwSWwv3AkK5m0dBE84FegtuThHIZqVFV/lPsdp4DrMKq2bzABiMlYm+UBhgYXGaiDHhg4W6IUrlg+VTjNms2VC8cKxYViyhes4zWEqdc+S0sqJOZzPEObIqR49HXveFX8SRY0f57V/5Tzx+7izz3R0euvtjADx++hR1l1mbTZlteK7YPEQodkTvPZOmZndhRfpyjizmOyTnwHmYL2F3AW1b8pgm1Hm05CsdLMLem1eBM8+Mvphmiomua4d5ExFL74qg4sgxUVUNPgR8CEyKRVqBbneJ9Nb2nC30ECy3cma4D2W++5C+QR6y2uvZvKEGRzYB5wLO9/dzRDIpWxhu23VE1cHy2+3xOLB51j0h+atWh0AlDkcmIGRMPkLJlyvO8nzL4E5gAipCKWqXh1BIsiLe2fX9B+QStplLcTmwXNO7SyazGX7iwNnyldDAZIbUVfHQghwzOWY0ZpwqOa2eoQoC3uOaimZ9Rnt+m9xGYop4BCd+CMUNCKjQdRGpM16tOKNrJog4tIsra38pIBi8JzlH7DoSydaIs3DkmNIwnqm4WYTg6WJnngXOrPD9dYNx33tbeyYIZNTytYqAF8vDW0Jdi48R/8Vr3s7tV57i37z9cwH45Q99NufnU/7+1/80lev2zOZqzWsucyX9HDB4hJjsmcdQqCpitPWetfcIslDnmNPQb+elTGUma0JToi8m6MW8hfIw6aaToq6KdQbvqasAyw66lpuuPQnA+fmSsw+dRhFW/gRjO8htZNrItCcxTcT0HvbvTxfTcoao1T5ZnFbQCLgkI9MOKNMUu06ygz1e30ESX/eSd3DD4dMlemm/jknqaZOnz7v9yTAt5TLXXopH4Mi0F3obmTYy7aAw7fT8JP/vA3+dU/PrbBxI/KkbfpqbJx+wNEIj08pYPfdMa6TjT7/6fdx29Vn+1R++6km6IWVPlwOurvmhN30FLrd87xt/GVKmT1MkyEUe0s/+OW1k2tj2tpFnI88OCs80J3IXIcZyXlvVWBVg4i1S5vnKswpw3RKXEr66fM5oD104wU7bcHLtFB977Dhd9MOZW1EWMfChx65i3tVlvnWP4hFQ5U++5O3ccOg0P/XuL1rNucCscVTB0mvecuR+xHmmEyElOZA8O7AGplXb+wC6+ltXr/bKvqkqGu+pBCSVEDkZZH9PkTM7lPeL2DuHl1W+wn65ebHJ7mMbvXdlYItiUB0WhJAt1DF2OBcQsTyHoRRy05xt8YJJijpSShZGWMK4++ZKyKfmPTBzghPH5uYmN912K4ePHePcz/8s62sz5tsX2H3sNGDRb41zrPmaSR2Y+YoLZ8+izYRuPqdpKlK2vnonPP7YY0wfepid+S5OE253m/n5LSuWhylXVx5SneWfzN5beG8JWa2cI2Yr/Bc7yzvrvcd5bwpGyyOr4orwNk1TQi8p4yhFedtz2s/WB9WMlPeCrXXNOswT2ebDSTFBDYrarrHco5a/1DatFkqbUibFTBrKD1p4pOll0zrD/S6SyDoEahcQUUKCTgxS3lsIpGTbKMhw570dW4Wn9jf0lL1Br6b2bJodoElJXaJbtNQxE/KetSGWHxVXcvCShlDb/k/GxhkpPfIOqSqq2RRfV+iiJVuspG3NC0VVBK9WhNCqNSYEHQoU5pQsxUF/f9VScNJWUs4ZdTY/KSZiZ3lX+zXjvUG/ix0+eLyUsFVVK1i5Z5PhnClWxUK3UbWxloEQe5ryiusfpvKJd95/LXc/fpy7Tl/No1uH+bZX/j63n3iIY2sX9k/sHt3QK3r7s9JCgyztkaNcru8LQKY9cyvOscop3qvlDGqyryL0SsTlXPaHdp0r66+uHKFVKlGuPHIYBULYYeafYDuXOpCM7fJpI9NGpuXyc5ltU2oj00amPS3TRBWnHnWlWHruZ0G5/YqHWW/m/O69L0YELiymPLR1ZP9E93k6Pgmm9Qc056Rk2RiZNra+jUwbmfaZZdpOd4g/Ov3Vgyw6ybzy6HvYcE8g0Y9M+zQz7WXXPsq0ibztHjP4ndudcmprYxjORax42/038OY7PxunLW+85UO8/Ir7aHxbZkH3/b1vIp6lc9rItLFduo08G3n2meWZZiV1kUd2b+Wx5fVPks7gHSk+j3mG4g8Qzz7RM9pWu45m2Ky3eXxnEzRTu25Ya112PLazYWt0mM2eX6YfXnbiAWZ+yVsfuJV7zp1gmSqSCu859SJu2bybo81pjk3OgfOEqiKn2YHk2QE2MPUq/OKmppD7l7Jd5YEbrzjOmhcqMj5npFjzhiKR4vF4Uuzolb1DmdQV3jmIabDuCqXwW/FaEJFSLEzoUiy5QDNS8qyaYs/k1JaFYn9C7c2CGiO9JAqWrjLnYm0XIKsJhdgm0bsiACIsF3NwDh88Rw4do16b0Urmvocf5PrrTiI5cf78eQCObqyzvrbBJFTUHuL2Ln/022/h2PET7J7fZm02ZWNtAxGhy4m3vuW3ec8730Y1bZjMJqw1DXFnh6AwbWqmTU1Tah51KDFX5JCJpUBbypYbtWtbYorsLVgWfEW7aAcF77BCgpoc3lnhQC3Kp6nrIrGZ4CZ0sSNlRVMmxQjIkCfUiv5lvJiHU7/ofRXwziFq1ncAcQ4tQDWFX3JVlz8CxSNhNTf9EzjvTdH6vZkk7dXZtGHdTZEUYLkkdy2SM1VwZO8Qlwm6eo8q5hFh1QbN+7hA1GEbAidukPlU4OiDp8LTZSue2s0XaNuhMSGtQZUqQoq2ClJG6YuZBijFJPOetZTJuOBxk5p6c53pxjptUmLchWjrrs817LINUOM9i661HN/LJaRIzkpKES/FAwabv6oKaK7IKRIXnUFNbR7ni11imZtm0kBtinqxXDDxM7xYvlUfbLMU+nyvZdOVYu+5oKjvPXlWx4p9/3eeO06e4u9/87/jz//0t/Po1iZbyylf+hP/X/7xN/0T/ovP+V1be5Lt+j3qRgoopZj5tV/n3nKatl3HcrkkxmgbGk2oKFWsDLq98nalMKOIeTRpJmWIGbLmgv3ywWWc0EwQIap5sEwrz6FZQxbh/NJUuk+Zk2sz7t3eJRcvp7Ed9DYybWTafqb1U55iJMU4Mm1k2sdlmniQnOwLBABd1QM4eegM//fX/wuC9/zOPbfzd3/t69nbtD8g6oo7nwzT+iLpI9Ne6G1k2si0g8g0ayPTPrNMu+nYaX7sW/8DwQlvfs9L+NFfW9VieuDMYV77Q98z/PuL/v7/wPv/+x/gxiOn6Idf936F9Ryf00amjW3k2cizg8IzcqaNiX96/49y3/wV+2QRgboOtNE9b3nmBabBo4vuwPDsEz2jXejWec8jN6KauGbjca479Bhve+BFpLQaDyXb8+owpavpVeFFR07zD77mX/Adb/qvuOfcFSxjzVf99H/P3/mCn+CvvPI/XBY8+zgGpqd7sz7Na8+89YXRgOKY4oaPWQWEKkoaQhTBFPzhtSk3XnGMJkUaJ6z7ipkXnCrBO9amDWBhpw5FtYTPqYXLeSwkz9eB2CVblKJkMloi4GKKSGsCWFcBVQsl1RJWVlcBUUfXQvDFelgs1FXlqKqGzsXhWZ04QuXMQpwy3jt8eZ92iZwjg8lT7H+xFR45ex+/9rM/x4mTV/G3/vb3c+VNN3HvBz/EudOnAGgmU8J0QlU3NEHIKBcePU3c2iaIZ72pB2Vcq+OqtZmFcaaE7uziupYZwub6BtO6Jshq0957NngfqOuGmDLbu3NTiMETnBC7bpjDLmbW1zeJ7ZKcIlVwBO9wAjG2w+NphvPn5+YdMHgbOCofmG5OWHbtAFyAGCMxWpG84KtibRa6riPFSIx7gOMEyYI4TwieECpCHWxuUXLs6Np2AEPGrhXvySny8/d+D398+iv6SeCzjn2Ib7nlTRyeLKg04LzBulvMQZXaObOAQ7HGrwAiLjB4ZWVnFndM2aec7NN7BSsMBdpUFY8pOlJiefYcKUVm9YmyCAR2HbgElUO8Q8VTz9YwzwG1kOwyJh6FHhKTCdNDh6mpiFLDIqExkUpYtW3wBeczISl5ucuFc8qhWUNUaHPqs+6g4tDUEQ4dhkUNObPbtTSTQNPU5JSpYxhCjifTBu8DiKOqK+oqWBi0ZjSD8/0WDnJMpBTBBVJqEYFmMkFUzZNkj2dG7z/SryHnHf/r17+ZX77zpfz8u14JwP/yG9/Ir3/ks/mZv/Djg6xIX8QvJ0iJlBK+FOCMZTycOEIQECVpRIlAwotHNLFczJmtr1sxYRH8bEqOEekibYxQecgOooVL71HzAxCDsy8Lawc5ODQrU4TOeY5UFnhfz2bcevVVcOpxzsznnF8uGdsn0kamjUw7OExbuNoOfl4IlYxMG5n2CTFNXMDVjpwiKWbzqNuj27Rolr3t9M4Gr/2Jv4vs+f2//ob/h1uPn/qEmObLFw/1bErl8si0A9NGpo1Me2EzrdczF0mrpXoJI9MOAtPEPX39v6zCN/7k9/AXPu8tfM8bfpXgwYnvF9Zzck4bmXYQ28izkWcjzx5ZvIj/5yP/O6cWN++T2a+85nf48pO/w7SqSUv3vOXZrja8b+cltDGQOXg8+0TPaKeXRzlzaoo6j9vjAzOYRIOzUKCskBM5p8I9N6QB3NucBx/gcuDZ0xqYVur14rZXAT1FewoOXPyWS14m5RXtAw0ph2LFi9jkljfOKs9aHZgGj+tay7UsQuU9ohbl0l/c5xr1zllIqgOHt82PKl48kTh8tgmPvV9EQfu8mGobVnRYPA7zNgglYsGJWbFF7HNEBO9XFk8plnUzgpd/exPAFMtn9V8TZLVNFWYlP3f6cYIXNl77Kg5ffSUbj58m1H1eTMutKL63jJrZMs4XiA+4UK1C8xSkCjBEWThqcXhxVOJMycAg5BaeCBoV74Nt3DTRRcsni+pwrZBBSr9Lbk3vA5Qw3LRc2ErBvAHadjmEZXpxOFdZmLH31FRmdR28FDI592Mrg5eAhR7buJnnRhl7PPgSYCxYftFsG1OLhtqTn1N7zwIBHKeXJ3lkftMgmmthl+vXH+xn0TwBgrPQTGfyFryHpEjJDdo3U/xFkr29xyEld3TxxOo9U8r9e+8HJ+X8lDNp2cLunLS7a9dKxs0CdA5xFeo8EoIp/NKDwbtPgdgxeIOpbcbxAQ01WnVkBcmrsTYZTQiueAcsyMulhS6juOJ/I5rJMRIokCr5Z03nCc47qioMfamqCikhrRXBxlAgp2yeaL2XSD+KRSYHLxGLAUXF+ihykfYWV9ZQ4sZjZzm+sTO8/MC544Dwr97xOr769vdxYm3bNptqb9beRUnKTPT3doJ4Zx5DTnBVGHIzK9n0Q062oUQImkuO3/5WYptI15+H92rAbONd1pLlZobaO6qYqRBmVWXrVuHwdMKRyYQYI8vx4PIJtZFpI9MOEtPqVJ7RyRDtODJtZNonwjTnyjp3yb4g0NW8AFy5scWX3fYBfudjt9KmipgD73n0Bvbe8Oc/+Gq++OYP8wU33f1xmWYHEZOBkWkHp41MG5n2QmfaXec/hw+cffUgmscnT/DSI3dRu658sToy7TPNtGuPXeCLX/IxfvfDNxLzpYxNwgdPneSRC0fty2Nf7rj33vCsntNGph28NvJs5NkLnWdOhCgzHpjfMUivkPm8K97DZx39INevPYzLz2+eZRW2Y2OyegB59ome0WKqWUaLHhysm3vf0FsG6deedf78YsYf3n8TF9rJvnc471Ff88TOOpvNDrWLB5Znn2CKvKfQ2p/EZU91yb7DsRPIq3/3Uy1kKvGmVEvo9JHZhM0qoIsFISWCc3jVkodSgEy7sEEwJQJNUw+KIohDU4ScBqVvQqBm8aPkBHWCiFlk4zIyaSZIyW0IWJ7TYgGHXrH7ouzArMSrRxQEUVkV4yreBCE4Yku/0gEhxlRC3hyTMGWZOubbO9x7z90cfslt5LKobJSULicTmdihOdO4gKZMzh0xRZq1teK5IQSxMDrnHaEKhLoqgLDnzpqHFK51VZHFk1tFfEBE0OTous4+S/PKIyQIwUNOpaycE1zVEHMkZhPMKtTDZiLGzqDsTLmWrSMZxQeHZzV4fR5TKGHExXurV9YiQkrm0SBZcCg5m7U+5oSmTNd1dF3Lsl0Wj5CiElIeFEzw1SUUwh4hVlvBzjlC8OWfQhXMY8DywO6xQGvJ+SpWfLDP/1poP2xqBvnoxUBksExbXtaOvID23HkAvEaq9QomDqksxFNCVYziSlK1sZbyWTFBFjQBy4hGA531zVmeVO0t+P14pzLGidQpcbmEusJ58yCx9aXEtsW3S0jlSy/M4yZnk++mqU15i1CFit7TAZH91yOQZfBoECeEYDIXtOQazZQdVYGZKyGiaiHO4oKtnz7fsYfaRyt2jvDAuWP8lz/zHfzn/+Z/53U3foyJMyWrJLJa/y0mXszLAJtrvNCmZOHjrjZ5ii2oFeWMXVdywUK1rMpZqGzaymZDSzFO2ysWDZczaELVZBQsH3PjKxZdppLIWlWXsRbW68CR2ZS2bdl9spSO7WnbyLSRaZ95plXR0kD4MhYj00amPROmCR5PKGs7Dd6eCLz4ykf4m8d/hTsfOcnp7Q2SXvylnvCDv/3N3HPh93nti+7/uEzL5YClyxatIyPTDlobmTYy7YXJtN989E/zB4993TBvtx66l+98yb/phXZk2gFg2itueIRbr3iUd99/kvO7DapCFRJd8vSp7WwFBjqmBN+hJS1Yr26kzM+zdU4bmXaQ28izkWcvTJ5Z1NOeiQO8ZP7CbT/P0eqcydXzlGeOTEpCSvqMeBa6BS5fXme01V+ZjDFFHHTquef8Ffy9P/jaPRKgNKGjqiD7GfdcuIFb/MeofHtgefYMajB9gsr+k2z7QseyLTrBwqpzTmZVFpCcuPrIIQ7PpgBUKVl4qnNMZ1OCKF6yWTJF8OKpiyAKFqYngGTLqTosumKRrqtqGPiqqpCClS52Jednx87OBeqjFcG7IbRwUPTOkZIJi/ceNOO8t/DOojBUzVsgtnEY1rW1NVMoYhNc1zWu3DtHLd8JCZOmoaEhaeKPf/f3+L3f+E18VpoydrUPTKuG5XJpnhTe08ymzHd3yFgYaG91FBGq4BERYsos2yV1VxNCwHtfigH6QXl78Yh31LUiyUJ2q+DZOn+OlIUYO9plKdDpQSN0KdNmSAjbuxEfHOJqpAksu8Wg/A4d3sC7gBM3WPi7HIlttMXtPZUvngSyR15UEaxY4vp0wmQyoaoqzm6ZEsxq4CNFYoSkSsqZNnbEmEwWCnAEEGd5VxFhMV8WQ9aqeSdMagNOcBWVlEUUI3HZggpV3UAI5Bygf7+qyYia5dx0lMkcDoOZfSxgdaZUlZwTIQTqqkZqRyQzjy3t9g5bu9sANFsz1ro5a3olZAEC1A5xDu+s+KvFVAqgEDx5uSSf36U7fZZ0/gKy7KgSVObyAX1WYPGklHFEovS/A82RqpoSZmtoUw993l3MmT/2qIEuW77Z3cWCtuuYTqfl+Uo/iueJwQi6bAHEKWWcD7adcyXlgbriLeBLflRTmDlGcvEKcU1jytr1eWAVyZnaBdDI1372B3ntTffwX/7LP0sbV6rvG//Z9/ANL38nP/EtPwmSUKdQOXztyInSx+JpklrSMoEIbcrEZNDxeIRM27bUzcrjYNku8ME8Jrz3VCHgVPFJ6TDADkU1Kz8conJaoghSCijW3jGpApNhMxM4srHOXEFJpHbB2J5pG5k2Mu0zy7Sh8KiTkWkj054x00QEcUKO3b4o78F7zUX+0Z/6SX7mna/l597z+VyqPXx+ym995Eo+5+ptsrafENOakEemHcg2Mm1k2si0kWkHk2nBZX7yO3+Rn/itz+Gd917NP/xLv8oP/twbeO/9Vw5z9xO//wZ++f0v553f/0PU7JATw/NkFOf0OTmnjUw7iG3k2cizFx7PsvPERfskeZ3Ugelk8rzlWeUyn3v0w9x17lpO7W4+I57d6t7GuXbCPdvHLqszWim4VabIDIA/8ltfxe/e8+L9cx9a/vA7/ybHZqfJuuCOKz5I7TKq7sDy7BkYmJ77Vqa/WE/tf6oZ78pEKEyDo8oJ15qleOIdjUDIGZfAV2aNRzNVCFTO4QaDodrP2az7fai1E1vkZslTSkQhKSaqyuPFkaSEGOZMt+yIXYcQhsiapAkRK5QnoSrW7ASacOKQICWvIiVDqv0eYQi/E1cKCbpe2fWCaGGqToScEqEKOAmEVohdNMFxNpWV99RVYDmPOG8F+pwTmkkN6CqUsoyzEykWcB2iLnol04cO9tkac854hFB5s/6qFT+LbYcTy12afRrmM8ZIEodWFVJNWD9+jPPbF+jaFnLLRjMpgm0h7N4HnHgqqey9MdLFDlElIyRW9/beM5k05C7ZhqAsTO89Ptjzae8VUqzYiuWfbLuOZdcRUyKnSAjmWWKi0T+toKn3Ktkjo2JeAwBBLPxUciaECk2ZuOgIdV027rLyrCheA2Y8Njj1k2BXuGFOwO475E0d3mOgrsSh6ojZwnF10bI4e57pkUO4eoLUEdqE1DJ4dg0xuIpVemsjumhN4bctLqay6QhF6a367Zwz+ZLiiKBK1mT38zCcACjj7CAmCxdOalZ7ccIeJzX6B9M+bFjBBSuWmLNSVRXOWa7YXvZSSoTQe+aUuemVRb8+i1uBUEK8FSt+l5UqJI5tzPmLr/09fvPDt/ORx64CYGsx4w/uvYUf+I/fxN/44v/AZjO3YpnekzRbUcGSgDmL/VnGSHae7ALz3V2qnAmYZ1Iq8BIRfFUNylzUiiC6cp8nNxvrfh3a+CRSmXvvhD7a3TtLq9oEx7QOzJoDpcrHVtrItJFpT8u0Pd5vzzXTlmmNNz3wHdy/c8fwPF9x7e/wyis+MjLtMmVan+4g9dcNkd7FO1KVWT3ndTfdxZHZLuIcv/ahO7jniRNDl9fqJScPPcEyt+DdUzNtkFWH5jgy7QXaRqaNTDsoTBvPaZcb02B9uuSLXnovLz75OBuTOV/3yg9x8tB5fuV9twGwiDUPnTvC33rT1/OnP/sP+Zxr7171g96r3e7/KZ/TRqa94NvIs5FnB41nKXXEImt7BTV4T/D+ecszVceDu1cxT9NnzLPTO4fYad1ld0ZLWu5QdIM4z+tuuovat7zpg5+7kuvs+Ud//NV8/o3v57YT99EuFlw9fYL10B1Ynj3tq0M+wKe+4Nlruv92/QIxjxWQbNOyVnsqzUhn1t3GT2gotU6y4nGmyDQTimV/yOuo/QepLbxhSfeKXksuSutMzgmCmpeB2mZGFNvIxIiXVRlAKYXVMkrlHIiQc0ZytlHWPaWVtSihPRukdtmCBIK3yW5bC/0Ey9caSpHSnLMVaHOOaagITQMp4QsUKvEEH1iq4nzAB2+F/EJtC0CVyvuBqlIWGhhwhnHP2ZSfKpSw4JTNQ8MFhyYDWYyJGKMVUhU/QCRnpUuJ6AU3mVIdOsKxm29h8fDDtFvn6eYXcBOzdAvgKkHE45ynrgOudXQCXSzF+1jlWQUDVFNXLEto6bA4C7yH8L/+eie4ZJrK+pxoY0fbtjRNCSsGVCxfN1l4YnmSZZoNn3mk2eJwszM8o7OASoNi8PgUWNKiIqhzqCXbLXNuOUOV/hn6Lc3ef67+3ef5RPu80YAoDrECiC4M4MtdoruwS95ZIJMlNA3US7ud9yCOId5YFZYttPZHuw5KqLb1x/Ld7l2MIlLCQE0GyJbD2kJMlZxa66oIPthzp+JNpihZrKilPUcveJSc2OZZljGvQ+c9MWVCVZnCK3KdipxZXwTE3uu8K4libTaGESwQASwnrDpUMt5HvuGV7+aJnQ225ja3Z3bX+Mjpq/iR3/4K/sRL38rtV5zi6GwO3qExkSWjZW5sboU2Z/x0DaqaC1sXmOZMA4QqkFMePJOcuPJFX8FL2UiUs9aTdV7ZdIpY6HXWPq9q7xll1waBgFI7ofGOSbU/nHpsl24j00amHSSm9ePRH7qeS6YtU8OvP/qXiNr7HcIbrn4HLz76kKXOY2Ta5ci0nMX6IG41/OXLOLKCgzuuepCXX/0Qzns++OjV+wxMs3rJ1YfOsr1I+Gb2lEzr5c9JyYk/Mu1AtJFpI9NeqEwbz2n71+LlwrRX3vjoMJ5fdPu9HJ7Nedf9J3l8a0ZSxyJW/P3f/hJeduUDvOr6+yk3t77ljLgiS04+pXPayLSD10aejTx7ofLMFeOUlhpNFzfvzXD3fOVZUseDu1eU+l88I549Pj9C20WyLi/LM1rPE5zw2hs/wtHpef74wVt5YmedmD0xB/7J27+KPG04fmXHma3HaHQLmbQcajiQPPs47hTPpiaHfYJ98ScpyB60DHl3HZChEVgLwhWH1llTqIvczhzUYiGaXkzJhlLkMsWEAyrXW5bNGlmHUKzARRljQtanYOtbjJHUdlaAz1mYqBOhbVtT+CnZJhesKGVZnHXJ8Riz5f91CORs+YAxJRpjQrMtaJcSi/kuWWua2rwERLoiLDJYyKtQQbb8iEHsvZN6YpsrNYUYEFwpZumco6oqCxF0puQlZ8KeuOAcE8tugSq4OlAXK7jlBIWc4yC00UXLZ+w9LkDbtiwXC3COFCMp5yGHYx9++fjuLtffejvXv+JVXP/lX8mNDz7ImYce5GMfeD/51H3k2CICKmqW/ZyYhAavSiZTp4qmn5fSj27ZGoQzBB9KLtJA1yYW7RK6Fu89lidVUfxgxSfrkPs0Z2VnZxeRQKhsHn0Jid1uN/k7H/h12jwdxup/eNVP8+KjD+H78GSlhDxmfB0QJ3RtRyxz45oGSsE9806J0K0K++31FEApstRvHFxRQJaLtogwDqH2gYAiZayTgy4r3dnzJgNdB8slTBqoKqgr8x7oP2xnh3zhPLrYpSIjXnAquAR9XtnVWrWfXdmAaE50mNdbFZdoF2hLYUOcwzVTPInKC0xqdpctrqoQH9hpWyxQt4y12JpxLuBJhOBwwZHV23jioEStuexwYp4xsQ9PzZHpbAoFoENBQQQySKCMoW1VYsp0MTIJNd/5hrfynV/wVgD+25/7Rj746FXE7Pni/+cH+cEv/wX+1pf9ovFILE8wJcw8x0SXwW9ssHnb7bjDR3nnR/4dV9e1FaCM4CeVbQpFkKzs3TDrsMkE7zyCkAv8VVd6z2Ob3JQzlasoWVRZL0U1F11ki8wsCNMAE3m2dfXztY1MG5l2cJi2WwrI2ob3uWVaauOTZLSpKiZVGJn2PGCac+WwAmgSNEWUaF+S9GfOS6i/jNLJJ8K04oHae2jSD//ItM9sG5k2Mu2FybTxnPb8YNpn3/QEP/1f/xJ//h99HY+eXx/m0oeaqjbjYe6WaOxQ7Et958Bl/RTPaSPTDl4beTby7IXJM806GCiD7JfbXl5ERp4ddJ596mc04dYrHuOf/9l/zH/z83+Re8+sHAJf8spDfPH/5wi/8W9+lXu2X8wT8+N8zvH7DiTPPgPxupdS9rrntf0dvjQahIvW3pP+vf812fcpH+86WFmi9/ziktdd1K29tsynwdq+212qI5e8w8rzYU8/tDdCP/Wn7X+pVyRiirB/UZ6+v/s7v+8ve99TPJD2F/RW0t7av6dzthSfekyHvy5+xqcdxL3P+LSXPWXL6vf1S1DLjNvnBN338atxHf5ZHksQeu+Vj9eG9w+eEE9+T/Ff2DOOupqM4e/+d3rpcSq/23v3vbe76AP3yfUn0y757n7aL5LP/rkuOecf73PKmuiH3HYR/ViumhMdil/uneGsgj4ppvbp1rw8WVc83fVP1Wc+vn4qH8eT9cIzHqaxPattZNpFt7tURy55hxc805508+eGae8489X83qlvIalt+a6YPs633fYfuGp2mr06d2TaJ94OGtOGa4fff5LP9xRMe2a3GJl2ebeRaRfd7lIdueQdRqZdfPPxnFYuHpnWv/sipomAc8r3fs3v8Rvvv5lfea+ly/ux3/oi/uDum/ieN/4ONx56iKlfpY261Cc+03PaM+4zI9Muzzby7KLbXaojl7zDyLOLb/7c8uyZ7dtHnj1da0LkpsOP8tDZE3inXHv4DJuzNZ7Y3eTR7Y19H3YQz2iuRCjuv0HfPeWajVNs+k/OjPPp4NmBSgjbD7jK/qEPAnVwTKtAAConfQZiLEJailUugVr+QLdnc5kHy61ZQ4cCeRQloBTLct6n4O3f5YLyHu/9UAjPuYsGfK8ik97yalKn5TMo8tLLn6oOoZDmYRCH6MZB4IvCyjlhmRQVVIaF4J1D9+Q3zXtyFAPEmKzI3Z5BHQqNlbFbWTMz/fpABe9Xz2iyrqQcyVqs6FgYKElA946f9a+qK5rJhMlsan2dTpkcPsKRa67h1KP3EpctAqTGl2Jz5t1hQY0y5GPduwpELLg45YgUxWDzWkLkRfDBRFtyJqlaPkq1e0p5fvO28BaKvOfeZ5Ynuf/CSwclO/FLbjn8IGt1a7LT63InoAIlxNcBvgqrEEknQx5eBFwG9WqhrLpK99cLocKqoGD5vQwD31+xwuLgxSL2J3eR1LYwX+LqULxbQEVIbbdS/m1LKtZ40TxkYe09L1Z0Hrpun9zLjKwmRJzlsbWfHeIdOVpIc1XVNN7jQ404R5wv9zEnwzDnsmenMYRpmkCu+lHSzvXjLiJD0bwnnyFtnUiRxP2v71+3t17xGLtt4N4zxwG4+8wJfv/e2/iCW+4pt1u93zwfLG8uISB1TT2ZFDmynMV7UzmC5UUeiq6z2piJ2BwOfXOC5mEQTKZE0OFaLbU4LWdy5R3BCcEJlXeM7eC1kWkj056OabLnzs8l0x5f3sAHt14/PPcsLHjliQ+xVgek7w8j04ZnvMyZNpxCxGHegXuuvvi80q9reFqm9TLQv2dk2guzjUwbmXYQmDae0/YICpc/0wBqnwhuNeZd9ixjRaakXKJn2l6e9XromZ/TRqaNbeTZyLODwLNe95ATD85v5e7dzx7G4HCzzYsOP0Ll08iz/hk/AZ5VdbJoxPJeEUXI9vcBPqOJwO6y4a4nrmbR1WW8M597/Uc4efQ8WjUsOcqsiWyEeGB5dkAMTOVh9whQ/7fPsFbD0bUJJ9bWmKbMelXRFMXmU6L2nrVJQ9st0ZxJbWI2myFYns9+Wr0IdV3jBTRbqKIPjhwjOUMi0XXdPoHuvysWEZhAXdccOXKErluSUhog4pzD+0AInlA2MRkrSGlKPUNaLW7nHDFlUioLTpTlcslyqThRJk2Fd30OadCUiDkzqacl7FLxCE4CroJQ21PmlGiXy0EQvMBisWA6bXDe4ZyQSz9EoK4C4hpSSnQ5EVPCOSvsJ97Gq89vulwsybFl2S7JUSHaTUIVyF0mpz3KQG3xrB86xLGrr+L4tSfBOWbXXMvs2uu44uUv4+f/6C1snTmLE1g/tsm0qhDvWCzmhBAsh2tVW/irUREooceS2FksmDQTgreQ3E5NbgShmU5AIHaRuJ1YtgYn70r+2pQhZqbT2RDuKiJ4Cbz9zNfz7+/73mGurl57nB989T9jWpcCe76XUWeK3nu8D5AVcYHdCxdQTOn7XsJV8UHR0A75VVOKe/SYkJPJJPS5Vp3tL9TyjlpBuIR3DuegKfLhUZzLLC7sEruI7OxQLTdojhzCTRoIgfn2BZNFgKTk+ZzctoQcadRGTbPdX9Tjhm2XQ8umIpMRVYJ3RBLqQOrApKl7MqE42u1dQjNlcugwm1UDPhBT5syDD1tB3tJizrjgqaoSliqm7ENdlQKHwuCFKJYnlf5vZ+G1KSdy1uI14FZKNFSWA7nALXdL25wgiHhkj2fA93zhW3jLR1/E//wrXwPAv3zHG/jPH3kZ9/zA9xX9LwNUffDUCPOs6HSKO3KEwydO0MxbfEp4LIS7DxWfrU3wYutONZG0z4XqCEHQ7Ic8tRkDQovJqYmYJ6uUorWKJ6Fq4fkbk4plTiwnFYu1CWM7SG1k2si0j8+0/ogRnMOLPGdM6/XoIJ0iTKqauvIEz8i05xnTRDz06UkAUUs9QvkSYG9zzlFX1cdlWtetvMdHpr0Q28i0kWkHh2njOe35xTTNwv/25i/ikXNrw2d/35e9hW9/7dtBFO0SmgKK1QRxOVkqoDJen8w5bWTaC7mNPBt5dnB4ZukEM6lr+aVHvou3nv/GQSZfeeXH+P7P/7l+FkeePQOeffBBgWRGrPefmtGEmrquqOqDe0YT4P5zJ/gf3vRnh7XbhI5/8q0/xk2f9xLS5s08rp/Ltf6j+GrrwPLsWTIwXWyZe6bvlYv+HfFi+QCrDDcev4LNKjAVweclLmf6omXiHSrKol2wNpsVJQApJRKmUKdrlsNXUzKlrhmy4oBp3aAhoDkRu46sfe5Poa5qJGlR5kq7XFDVFZPJdMgRuopWUNAEOCuY501wd2Mkph4GxeIqgoRACJbrMKVEbBeogGbLrUkW+spaUu5vX1qDiOK9ZzbdoJ3v4oBJZUW6U0qklAgEy+WaE3VdE7vOCuSJUDm/8lJwDl/GKbia+XJB7DpQ2FzfwIuY4ANeLG9mcDUdHeocZEe7aImdFW6TMi9VqJlMGzov3H3Px3gM+PzbbrNxygo5UU8mNLMpTgQfakJweJRl11E7T+U9UgmLxXYphGgjXTmHD4Fltco5m7slCdtk4gQtGtYs94o5hNiil2xKXlKmaSZoVHLJuYlnlc91j0QGSrii2yOuYrIrzpmXlAhVXTNxQtd1xNgNikqw6Qy1L+4tia5rySmhSQu8Vx4NmpWsmZwiOUeC94j3BO9YuTqUbqjiElQkdNEiOVOHgDQTcz+TJYv5gpQzIsLa5jrVdILGyPyJc9Au8ZqpsI3VvlZyXA9FF8U8VIJ3Nh6aGGhW3D2SRiR3kDoo4ZuSMr6qUJcHQ2HUjKQIUfBNMG8g58E7Umfrq88p2nt+aKeEKlheYN/QLuZ4L5ZXu6emqn2+mpeCDxXOByoRagI5RXovAzBWX9xULRdyEyz/bBd7WbDxOP/EOaY7u0w0s7G2TnfhMbaXS8R7KrF752weDkmzTblmsu4pt5jc8Fz01w+ODIITRxBYtMk2EapWHBHzQFirK3aWC9ZrTxwPLs9yG5k2Mu25Z5qWYrKV90xGpq36NzLt2WGac1BkXYuH2yVTBX0CTMvlYDQy7XJtI9NGpo1MG5l2cJn2v/+5/8yb33kLP/tHLwPgB37pq/kP77mDf/9X/oUhxAmCfTmm/Rf2MeFc+KTOaSPTLuc28mzk2fOHZx6Yd5v8bx/9RZ5orx0k8wde82+548T9I88+BZ7dcuXDrDdzVIUPPXwNXZIDfUYT54eIwuEpVdk6c47lzi6bruU1t56C01tsLxYHlmdPb2AaRuLpX+7/9RSXPcWbZd+9rVCWAEpO9lPtHYebirUQmDhHU6ICKi/43lOleK0EvyrElrMtFMUW+WA5Hz4HEAvRs+szKUaWyzkpJvocoSGYQvS+hFtrtkJpWgqmqQ6FRPuI/P73goVwighSvmjuQ2R7JdsXagM1ZaFaYg6dhf0VJeFLkbNclHhVQhCdd8OYS7E8eikhslGJuUOKwPSF5lQg5ljeI1bcLFloKNqHK67C6Chho6XD9lxixe1ShKQmyL1guwInFxxVHZjWDVu72+w+/CBn77+XjWNXUlU1LOc4FCcyjN3wRy18N6nB2ML1dNjw5pRtnlLCVxbWr0CoA6XjJO2ts+Arj4+ebtmxXLaQleAcGix9WcqrUEfJOhRhe5K4iuX17p8RAXUWOim+wFOEMJ0iVSDFMBTEQ00ORezs4wiEADkmcsxDMUApWiclK8aIODQ6shPUCYliAddVKLKKzaWomDU8ZnTRoTtzNEaSd1Te450VBKyOHDMZ6SLdzhKdJ4jRZG6PIW9ovXcI1ncvtg8RsilG329FbD21qYMUrCDeckHGPHKqqgKvw5qXMv/qbNxUxB4ta1lvsifk3JSkODf8geJVUCbo4u3msIZ67wMviDpSzhY3nIeOcPLwOf7EHe/h1z/8UpaxYqdt+PE//FK+9o73c/PRx4cxMP3j7DHObxEfPUU330VRshOW9OtfERxdioOXiogrHhr2qnkYrfprY+HwzpOkX5OKF9s+OXQolJkBFSWgVAKNuxhHY7tkG5k2Mu1AMi09N0xT4Q/PfgN3X3jlIKZ3HPkwn3XsoyPT+vXD85Np/Qkm2zSXg64+Sfd9+LGT/ORb38gXXv+mp2VaKh52McXh0Dgy7QC0kWkj015ITGM8pz1fmXby6C6fd8spttspv/rumzi1tcE777+Wf/iW1/GNn/0Orlw/V0a2Z1lvCLPZeqbntJFpB7CNPBt59gLk2X27L+NDF76ARxa3kKnYrLd5/dXv47YjD3Jsur1HhkaePVOetWyymyYogq8bsoYDfUaztfBkVjiFc4+2bKVdnjjr2cwef4B59mlMkTcspdW/dfXvEHrTr9IlCAgzH7hyY40JUGelxiyYIZjihfLw3lNXNarJrPbJLO2qlk8xFuubhTQG0GieMKrE2JFjomtb5rs7puygKPpAHSpC8Pgq0LXRcjSmSI6pbLys205MqacShuicxwWHK5Oa0UHBA+SciLEl94uoTLKoKVInbrjeh4ocTcHFGNHKwilV1IRfZLCGemcW5xjbQZBDH3qKKdEulrygzrROShYKSXkG7xxB3KCceusqvSISJYSA5kzXRQuvFEGCw4XS5ypQVYGwPuX89i5bpx7m4fe9m2tf+lmsrW/A9gVcjPiizEV6iAgOIXYtWSznrQ8BYiJns/anGM1S33U0a+tI8GQgNI3lGcXCfsHWb1XX5JhYLlrm8zkinipU+ABdVlTysODaOCPlepin2nVMQ1vyeAKOIfdnr+hxDpxDMWKF6YSQaxufxRxQCwNtO8TpoHh8FFw0WULV8smWoXbJvDRIYj4ODpITy1nbbw6k3ziAeIckyykqGdKyJW9tQ10Rm5rJsSNQvBHq41cg3pOXLc3ps3R+AU5xKjin5mnRK8HCFcr8YBw1Jdhv8EMJM1VFU6KNHeLNYr/slnQJwDObru/TAa7I7758waJoAueDbVKKHT+rhcl66RWv5UsV580az8rroKgRu0fZQBkQbAJ96oow9xcLt5w4zXe94bd414PX89iFDbaWM773zX+eExv/nKs2dwi6O6yvqmpoqho5c5a2jeyeO8vEeaQJdDmZR40qThzL2FGHqoQee7yCIGQp/ZeV64bl5BWqqrLNZ86QM94ZKDMyKPQEpJzwZIIo9Xhu+Qy0kWkj0z41pvUH1pgiXdc+60zLOH7t0e/k9PKGYZ5efeLdvP7qd1sO85Fpz1um9bakPBxa7Pma0DEJHYto3qdve+AW3vfIdfzyt/8BVzx2gba995JMi2qHj2WKTHMemfa8bCPTRqYdbKYBOF15AvdNCrZGpnFZM+1VL3qMW685x2+89wZSdjx8/hB/7We/gc+57l6u3DiPYpmmZA/TKJ+nmob19Ymc00amPd/byLORZ5cHzz6w9Xr+/cN/c5irE9Pz/JWXv8nSEDo38uyT4pljY22Ns8ur0GVZ85ODf0YDwTmYVUvmXUWRVtq0zqm7Ixfu3uKj90952ZUN00l7YHn2nBiYzAr4FKk4iiQ7caWwmA1+am1hCjAFbr7iGJtNTdO2HKrMk6B2EPBFdRYlFio0K4v5juUh9Z7QBCrvyWrW/EldHlOVFDu6bkldBYIzhWI5EyM5dcWiaJbNprL7WARbwon9vdjdIfiq5GC05+mWlkPVOccOiaqqqFNDF7vBShpT23eDmJWYoi1650Cw3Jg5M2smKKu8mKkYVrz35l3UW5SdkINZXyOra6W8r/KWT9QUdLHCayb24XgpM9/aLpZo8w6Y1DVO7H2aFMFDWUCTOpBSZDmf472jW7bMd+a08wXrzYS6mlAViatqj6scaOTYpGKSlQfe9kfc94634Zxj4gLTds5kUuFEOLo+JSiW81UcaCmcKIIPjlBXgxV1vjOHhdCkCcFZ/8TbOGYUVaFupvaMOdPREepEqOyLfjDLOuIIWenalpwyGc//9KFf2Pdl3F966S/yxuveSzMJ+NqZwi1jbepWCVVlEqkWblitTWzMU6aerIxVOEWXS6uDoBnthNQJ2UHuoins3qLtPY5A42fIcm4LX5VclKhzq0KIWa0QYyIP+6ncRbw4vAquakjLaNZzl6ifOAs+IF2iyg5xNSqZlFp8Ftu971X0mHKCIkcxk2ORuOBWCjN43PqM6vwWgtDFDk0g2eSrritiLpsTIEwacrKQ3a3tLdbW1mnCBMSxWFqo6bD/K14aKetQ4wgs1NsOE3Zo6BeYluucCJX3tkHKEdFE01SUBMP2TGLhsc5n/tGf+df867e/hn/3zs8D4Dt/7tv4yT/+fN78F/+vMhwe5wNXHjlCajvaU6eYpkgbYHLsKLe/7A7e+Wu/wmJnG+ccaxtTxDuceKoQWC5b81YI1SX1pIjlH86qSIyklPBJLWVFFjotxTBzRmJk4hy5Crhmj5yN7VlvI9NGpj0XTFtz9mWOD4GqqZ91pgH83du/kf906r/iFx75awD8q49+I3/8xGv4v774n49M4/nNNE0e6QSkgwxd6vhrX/jrfOXtd/E33vynhmlcxJo/8VM/xg9++b/nL37er1+SadvtvFzb0qmOTLvM28i0kWmXK9MC0jswD01ExnPa84Rpy92Wi7VSqD1VU5FDQJZiX4ymlrZrEW+6qo8J+ETPaSPTnj9t5NnIs5FnI8/28mxtknnjyx7nnfce4+xOfdmc0UTh1qOn+Fff8uP8d2/6Vu47d3w4o/3tL/sF/uwrf4cH8yE6SYTjhw4sz55VA5NgCsMse3ubTYo9mP1TVa3onSpKxunK6n315iabVcW698ymExqBStRydqIWtlmUVcwJ7x3TSU3OyYy6Xpitz0gxWc7AmS36rmtZzOdUwQ9W6ZwztXP4usZvbBJLDkYpit6JQ4AuJSpnykHVwlhTVGIphtlPmPTPohb+mmIH4izHo1uF07mSU1JVzaKIoMnyotJgngh7PA/6EXXO8qw675DKm1W7fx+m2EiZFDtTgv0SVbuHqprQlznqUsSHgBfBB48rIZeCDCF7w1x6MatrUrrY0S07UtdR1zUbhzaZTCqDJeBD8UgIDjxUObPMnXlZZEdTAbWHXLwXVNGU0WihuKp7wmW9LXItbhuZlQdFF7uSq9Xjc7ZwQNEh/2wP37ZtzYNCwCFDuKMjU9eVbQLU0+Y1oq4WzaxW1huzWIv3EATZk3ohC6j044YtyKpCtPzcm+JFIDjEgVV2tGJ1Ltg86rKF4lAA4F1YeX+ImseeKmg9yEDY69Egwnz7AjmnIUwbH0g4tI2k3YV5qojAmXOQQbsEO3PoOpxmW5+p73OxcEvZ+OgqZNl7R5ssV+hKOBRyJrdL1tfWEPUEZ+Gqg2uh2Eaj38AE5yzsOOcCSptXFyqqypX7l3ksr/dFMhUL/6wqU5jiXClgXnqfsU2YWP5f1TystZSKnhis8oZuEc+kiXzBiz7KrJrzU3/8ehax5v2nruG//cVvA+DVN93Dt7zq7XhXkeMSYsSlxKHDx9i46grc1SdI3hFRXAFRymp1NsXhfDAPp5SLWLgh3F3LxjHnbNvfokudK6kevIXma5l0LxDElLgfMqyO7dlsI9NGpj2nTCvDkfW5YVpWx8/e+118YOt1w7i/8bo7ec3Vdw0pFkamPb+Zpk4Hb7s2d4hrObF5iu/4gt/g/33Xazizuw4Iu92UrnNPybT+gJs0kzIj0y7TNjJtZNrlz7Tv2ce0L7/hXSPTnk9Mu4SRICpEBMSjrh7W5FJbll0CccOX0i5Du2g/7jltZNrl30aejTy7nHkGkJbdYLzcK9gjzz41njmveJ9QTUPdsuAcm5Nt1qszzKcX2ImBVpsDdUb7Tx/4LO49c5KclTPztUEgdrspKQqNzrlxY4srTkw4dNXxA8uzZz2CSURWBejkEq/3P2gJ8yzKriovNN5xfH2d9eCZOsfMQ4XixcLUPMWSW1aE5oS4QF15uq7kmhRoJg2p6xCgKVY2m3hKsTnoC+55Z4PdhEC3bIvCtqJuQlEWaoJpzwdBxPrePw6mSPuJE0zpasq251ELvOvHoM8BaqF6pkh0z3M55/FFEHuLpwnjStmrCC54VBkUfU4JjbaYerDmnIeBz1lXUCgK05f7+WChfYIbHkz35AlFrX+aDRax68gpM5tOma3PmExqlott678IzgvBW7BuyI4Qs827Mx2fgkFTAI3Wb03Z8l+W9D2uKPgsRWFiRUdTsajGGJGsxctDEW/5K8krwRcgxV7BSFEMBg/7KFfA5p4kszYuvuxdHRJM2a/mQ1FxEDwiDiGB92UaBc0WKokIUgUrApgTkjyQIXtcVeHFDYsfwIfawpxzxomUkGYzWrhgoPelwKJ4ByGwREvxPsvVG9XGLiYltbGMqSBb2xAzdBFpO6oUcaqWMglFRZEhIaqspp6VvOfU5wNmRSdVtMs0TWNjqR6fTPZxjqRpKDg3iBQM0M5ZyUlxjcMHZ0o5xr29IGPvV1W8F0JlniiKIrn8TZ9ndKUU+7DeXmaqyluxSLA8s2ohsNkpt15ximOzM/znj7yE0xc2eWz7EP/0bW8E4JGdTe64+h5edtV5wMLBRZXp2hrTQ5uwsUF2jlTWVyohp7mE4zrvyclC1R3OvEeKXkgayTkP+tP0hBvWsdMVnMDyHpuSV1ZZkcf2bLeRaSPT4LlhWh4OL88+05ZpjdPtNfzuE9/MufYEjsxVa4/zupMf4LXX3DUy7QXENM2JhNCJI2YFv+SOqx/kzXd+TrmbcvPx0xyZ7jwl0/qxMocJHZl2GbeRaSPTYGTayLSDybRLtSxCq4G2a8gp4LPi6FjiaZOSnVLVFW0XcCnRsORQ01AsZiPTnsdt5NnIM7j8eFZ6gbbtSj/vlduRZ58iz5QLC0eXGIzDAHUV2ZjOqfOSxXYkdwfjjNap5/7zx/mdu2/jPQ+/6EnyANAlR9s5NuttNjYOH2iefcoGJmG/suuVyF6xsIXlSV0snTelqckK3gWBI5NgSjlUHJs2bFSeoBlZznEBgnjqyuPEMaEeCoL1g5K7bvAOIOc9zjhKKtZtciKY/sBjRfqqekJdlJHmzGQ63fd8Wqx6swEWCmphYkFXk+REURFCXROqiixCzJnKlYJf/SKidECwHJ5FCAQr/padx4m33Il7N1tCCdfLuGBKtOva/mF6Iyoq7LO4dzmhi0So3DBeYv4YiDjqqqKZTKnqyqyyAMk2kFEt7LXvhYop4pwzy+WSnDNVFVhfX6eZTgjBMV+U7jrLYSpkaoHGK40W4XWKI7Gg2KsVFrtzUtdBVqZTSyuGM0WVciQquJI3ctG1LNulWamTIC7hNdM0UxtrL9QhDPI4b1uqqqauE10XaSYzA1WycMuczYSfdAW4Yf57xVcFqDwED6XAmxR7VAKcDyDmAWHPpKbQvLcHdI4wnaKpRVO0CRPbGIlYgT6y7lGaZXBSJsymxGweZOI8vg6Wy7QU/ZMQcHVFs7aGj5EuduzuLIhtS85WIDK7lSyl7QVBs3leLJY03lthyZxIzrxvJISyrkyhe3GID8ScWLRLlihV20LXQbSQawOnhy6CBCRUhEmNUyGpsD3foaonSCgbphgRVSpfEWnp2kjOS9abtWEIKGug34Tk+cLCZNVyG9vvTcFXxfIOAsHmVYAQAiRH7KBbJnYWc9bCjKbMo5MpOUJsMwTPMiVCs8v/75t/ih/7ta/jbffcNozdf/zAK/mtj9zBe7//BzlcLcjeIVUgkokFfClBTrZel8tIkIT3mS4nnDevnZjME8I2W7Z4u9gNYLNHF0QUL64/+1GV3K/OCdF5QnBUyQraju3ZaSPTRqZ9upi2TLbe2xiZP8tMe++Zz+GHP/jjw3CvVbv8vdf/GOtTRSWMTHuBMa1VIdYTLkT40Okr+dH/+OeG8ZtUHb/yXT/C4XCG1D4F07LJQLvsaCsZmXYZtZFpI9NGpo1Mu1yYtoq5WDU/WeNcPsF77z8Gmrli9gAn1nbpJjPOtNBJYHOyxp2nr2WSz3N98zFCmNiXqE91ThuZdlm2kWcjz54PPOuNZ2h6ksYTEaTyI88+BZ61KvzmnZ6qrvbx7LGz6zzy+IT59i4igg8H44x2aqfmv/vFv0zKT82JB7aO845Tt3J25wpesYysHWCefdIGpotvO9gdnUP35GSEooxzb9Ozi3PKBKASmDi48tAhnIgV04otdT0jAMvYMpmtISix66zQXilGBwaRnDMxRprJBOc9zju2LpzHlaJk2xcurDqZlboKZqUXaJctrgoEb8qVtLJy5hK2av9QYrTFGUJAFJaLJfO55Tzsle6abtjrRbF7L2VCoYtFGwulSJvYuOQMTqjCKmdrbLvB+ln14aMCi8UCH7wNb3J4bxZs18+kWKbYSB4WSdLM2mTdxkozXQGfqtJpNst8Snhf4b1DsPnqlguaqhn6NYSuqpJypG5qJpMJ6xtroKZEfb94nHmU7O62ODFPkCoEK56WhBgT7aIbwvAXi6WF3jmPF4fzHnVCokR/KMNcuOAJdU3uOnCOnK2AYNt1VM7hvbNNZJG9vgAiakUYxTu8BMQ7sippmfnwuVfwb+79Xs53xwCYhTn//av/LbedeJzQBNykhuBR78jFmiver4q4+VAK/QlRlUwiKzSTmSl052BtQpybR8QQFowtaKrKZG9Q9GY9lqwQglmMBahr8/bwDoZQ1aJkU6LCUYkwEW+WfpsMtIcZStzeIu7ukhZzUtgmxgjJPDPUeUt/0IuTL+GtIuRkBSabqmZnsUtcdnTzluBsk6aqpEXL7tYOVTNjtjnBNzXOeSQrO+2SaR0IwTwgNGUz3jlhNpsBFkKcYjRPupTolpY/uFahqj31pMEKREaUflNJTwXrq2Jra69HiAs4r4Ra8akjKSxTLyOCCzX1bMbpU4/SdYmYlYVmvu1bz/HaJ87wf/+jo/2IsIgVf+6nvpPvesNv83Uvezv1cpedcxfQ6jRXXHWWiVQQJtaXXMK+xTY+qcyXCx76+ZV+mmTYRKgWD4qi7K0YZflTHtF7R6WeOlfEtGcsxvZJtZFpI9M+3Uzrvar8s8y0f/7B7+d9Z17DoFwwXVJVnqqGUI9Me2EyTTn+kpfzBZ9/HTe8/jT/x48d5ZFHA8sY+Naf+o6nZVrtq6I3GJl2mbSRaSPTni9MS8uM678lHYZxZJo94/ONaU82MS2S0rlAfegIp089SlNFmqxs58w1n/96Nq+5geBrzrxvnfaRjzA/9SBLF5gGR135kWnPgzbybOTZ84lnJC3lt/ZG7/TCLiPPnkueiTtQZ7Tffv9L+c33316MSxfTD+rQ8ePf9tPcccUjXDk7y+kzj6G7ma1H2wPLs2dsYJKLftZLXVM6ss/F4BJXCgYML0JTVXjnTMBKGKv9nG0AimVWVfZ1wvJ2MoSX9nkRY2xxgGgmFQUrCF78EHYHpgh6ZS6yyoLZ5zbtX+sVhoggwZ48pzRY/mLXmeU7rcLMhEHX9jdd/SUwmPz7Z3YlZDJjeSD7l33feykptxLeTJU453DIEMKmWcGtFmc/8q6ATHIml7yJWRVJWpT3ak76/+eUodofHreaRsU7RxUqQvCgGS35NPvXVS0sVsmWA9R7ROyzclG+WgxHOaUhD2s/FxRADpuIPXPunGCmfLMia7GYX5y7eZjDAov+vhaSC06VDGx3R/jQ1quH9wWXueP4vaxPBXEBnCBOyHvGtc9xK+VnRFDJxQmgeBT0z1QWcRYpIZJa5nV4KHvCof/OwJqx3Kr95zY1JZEn1NW+5xQfEOdxzkNVW+E+Abwzaz+gOdOmaOGsKZFDGBRuHgZZVvIgUjYtsMrHa5FemrIBlNK3nMgpE7uID8Ujo0Api71bnG1OTDb6kHbLwyu9KtMMKoOXzfB8WFi5cx4tG5ReKFT3SMiwrGydSvkMcSVMuZer8oxJLfWDq2piVrqsJIVOlRuubzl03YLXvnrOe97XsDt3ZHX88X038w2veJ95BnhPbDviYgnLDo/gi1ysurIHQkVZg6mA/ijVe5U8Kf+4rIZ4EL3e2CrONsju4q332D7RNjJtZFr/w6ebaQPbniWmLdOM9z7+Ku4891oe3L2Fi5tz9jni3Mg0XphMqzc3OXTyEMeubvn8z1vw3jtrPnZPzR/fdzO3X3Waw7MdXnLk8ScxbZW+qO/TyLSD2kamjUzrf7jcmbb3nHapNjLt+ce0KsCrbrqPux65gse3NwD4/Q9fx1a7xZHDNbNmB+9bOoU2K5Ojxzh0zbWoeq7eqTifZ+w+5kklisPJpc9pI9MujzbybORZ/8PzimdlPt534Q08vFid12458gifdcUDNqYjz6xnhWdTt8uhyrPL1ZcNzz7eGW3W7HBud8JHHrtmGNsjhxOv//w557Y8861ddPtxXnPz3Vwxm1NpR7tYsBXzgebZc0O7S2l/9kNi7++G3w+j/xQ32PsRe5Tw3o/VT+C9T93293DvnZ4uN/BTtr1aqr+jrgT3U257+7RXci/uxse/0dO+unesn2p8P7GnefLnPP046L6fnzTn8tSfe/G1T3udZRh9mn5QgLK61/77yZ7L5KLf7LlW9mxS9sydPs2fJ11T+vGk6/bKQg9JipqTPausB92T+rL3z6We8eO0i+dR995jPyiedOdy4d5bXOqzn1mf9ozvUyyNS93vUv++4drID/2PT3DVlZew1j8DtXCxDvlUVtLez/4kNNPYnmkbmVZuMDLt433OM2HapW73qTDticWV/G/v+lEe2N5vXBJ0SDG7+uXItKdtz2OmgQ3RX/0r5/imr98usqH8y7e+hu/+d3+amH1/RHr6Ho1Mu3zbyLRyg5FpH+9zPpNMG64bz2m8UJg2qTu+76t/jZdf99Dw+v/0s1/GP/hPr0NEufmKxzi6vv2k+4jArS/quOZk2j+54znt+d9GnpUbjDz7eJ9zEHjWa9uM48fv/T/5nSe+eXj9T73krfzXn/tr+z5r5Jm1K2dnuPXwAzwJRuUGB5FnT3c/EeXG46c5sra977rbb2v5xX/3CD/8Q6f5/r/8Ab79jb9ME9qLP+bj9+gzyLNPoQbTpW6v9CFWvbUWMOt2bwkUioUfauc4sj7j6iOHqDobOFesinXtqV3As1ksx94Kt4lYCFsJW5xOpwQqtOuIQI4JlzNVHegNuFU9dA9LTxnI2YqvNdOJFUDOqYSYukEq5os5Td1QVxUhBEIIRYFYqGEzqemTsgZveRi9iH2IZkQzXipLaamlMMvQj9IZbDychBKm6Un0qQCsOeftC4CcyTmRYocGz6Se2VBnHUKBe1CGktsUVea7c7pkxdacdwRZ5XVV583C6j0Oj6iNYVp07O7s4vcU/etix6JdcGHnAl1cUtUVzkMuORydCJNpU+5tBfFyNmO4ijCranI0C/Ky7ei6NChjJ57aV5bDtCjpnJQUO1KK+zYE3gdcLSxjIqNI8EwmNVIFupxZLJYsl92e8XB0baRtO9oYya6jqitCVbHY3uHHPvh/cefZVfRSL971pCI04KqABEdynuwcuYRJU1X4qkZKfkpE8K4hxw4Jis8ZLV4A6hzUE2hbVDoUSM6KqZITvhSk6y3EZIoVviphsPa5fm2GqqDiVvlKSyHCamMTS4ZrUpViArGCdL2sgVIfOUqlGe2WxEcfoTv9OHlnl253F+cqXOyQ4nngxIorkrNZ8LECcpUPdPOWeHaLYyfNO041o10id5lOWhbb20wmDYSAqhW5074fvZLqxRaKRb3XGZYrtG5MnnxdgXe0ywWqCeeEytfmDUPxDNmT7xUV8zzKSsyKE/sEnEdCILuA9q4cVYD1TbjyatKjj5BEyRLY7VpSXUNdQ3py2Go1mzE7fJTZzgXizg7NTgunzrCGp/JlHXQZQQgh4GdT4mIOUSFDTtHWfum3c1gO4tgNIBYRXBa8WkHAwWFAxCKl1DwzQvCM7VNtI9NGpn16mZaLl2OoKuq6/tSZtse7bG/7ypv+mG++/XdYm4mlfB2ZNjItWb9efccp/te/8i7+7j//QubLike2jvNlP/73+V+++sd44+SBgWkTt8qPLlQj0y6LNjJtZNplzrTxnPaCY9puN+N7fvrrePzC2nDpv/zeX+L22zouTF5Ed6YGnRCpudDOiZMJTKaQ7N6TI8c4dvMtbD/6EGvTKbNm45LntJFpl1sbeTby7PnBM4s3Mlm5WKqlcrh65NmleLbYnnP2sQVclblcePZ0Z7R44lq+5Qe/jwdOrVhnnXRQNRBqqvVDbF59DRf0ETYvI559CgYms1I+tXVPV5azEp7oxBSiA3LMHNrYYK2pIKZVtTgniHjmiznZOyrn6GJENCModV3jq5pQFpuKNwulrywPpypKBnGI2Drvi1mJCk4cXUrUIVB5V3yibIHFlJCSP1I103adKXcqnLfcqapF8LHwz6o2pZlKrs21tZmFrFLCbcVCz0QZ8rfayBW77WD1VVPYTodwvF5se/nV/nq1haCpzzmrwzruwzVdoIDW+hpzxmXF+RWAoSwrwRZGzpaOM5V74Om6hNCWe3R0bSTGhJRnySnSLpc20M6Ro3UkaSLmSMpaZAGyiuUezTaGTtwgO1XlLEdtASa55H9UxamNew9gyYJmC4PtYsZXFVVdod7RpkwXI/PSJxFH5T3LtrX8u0UpxxjZ7g7za/d9Cx+78DIuxKPDmLz46P187lV3EbwpbxW1xe5Lob2yiCVUECoQy1uKlEOOCOJtWGOZGBHB45BQYyHHJi9O+jBjMSW/R9ErQnKOBEgISFWTQk1M1p96MusnnUQmVI1JVgYRTySiCFLVdsN+zl22sY01bn2H0Cayr8nirVheuxg8E3IXQZNtYETBhoTKVbQZ0jKyuLCNIGQp+3uxee66jjpGnCriLKQydR1teUSnYqG1vpcby03bdh2hqkte41D6LCbLgy433eOKQsyacaUwng7/M10iYtNHH7rrfJHffh24lfeFCJPZjHrWoMslVVXb+7x/0i5AnUODp65qDs/UNlbb29S2UEsvyziWjYCU+a5qh2pGBk8W9jtziNrnlrUjTiwk3dua1oxVeSxj8kl5Oo3tSW1k2si0TyfT+nzVlpbgU2da13b7JFZQvuS6P+RVV36Y49PzOBdszkemjUwrebcfO7/BWz9wklgMTuvNkj/9ue/kxVe2bO5hWr96+yLQI9MujzYybWTa5cy08Zz2wmOaiufxC+ss4+orqhOHdrn2ipbt9ZYL54XZ9BCT9Sm62KWpJ4Vp5mAz2dzk+HXXce/DD7CEpzynjUy7/NrIs5FnlzPP+nnTLnJqfj1/fPZr2Y3rADS+4+tuexs3HH5s5NlT8EwSpEXH4sI2XCY8e6oz2v2PTnnb26/i3seOMF+ujDV/8mt3+IovWxrPfKBeW2PzihPsbOfLimfPyMB08a0uqeR77Qorq2H5tRMIJXdncnDk0CZ1zrTzHSrfP5/DOWE+n5O8Y3M2pYuJnKJZXENNaGpCcQ/ous4Ug3f4ukFTtGvNXwHVPBS5887jnKPtWpqmoZ5MSF1LFRzkxGJ3B7IpT1Vl2bVUdUWDWlE1McOGkq0gmfdURWnGaNbu2dqMNrWWrxSx5y7rN/hegJRMHpS8eEeKNl6qjuCt+N+gkHPJGUo/oaYoNaZ9llgbcy1F6VwvJ6hAlzI+KJX0kmRLxftgFld6WIkBB/NgaLtE1/XWX1OiOStVFUCtAOIyZ7zz4Bydsy+4Yk60qSXmZEoFR8rmHaDZBDW4QNGPNFVdPEUwS2rvYaCrhdIDlmxKP8ZIlxSqgKsDLY42dixiy267BBQvDqjpug4B6rqmVejajtPba/zM3X9tj2Qr6/Wcz73qLv7M7b9FcBUqHsQUFSEgPuBKsTizLle984ZtUuoKy2hrm4p2MS/QtHF3vjIFHtsCM6AUZCSvFL3mkjNWYalKCJ5qOiE685YQ55lM1spwJDRHcrm3OsG7ipRam1dfUfw5bBq9MgjOdI2wqWioiSVftW1UylBn82CxzYYWmRIqF4g5EdvI7rktUzbB49fWQHqFHWnaljDNiHdU4ohta/AAmmqCryoDkbbmIZGUxWLJtIxzqKqVLsnZxqtfA9iYqyop5326yFIJ27j24EVNH5iSl2HWs6ptbroWVZjN1jh09DBusaTxtd2senIEU0KJAnUTWK8CHtALW1QwrN2Ebb5Sn38WcN4h3hNjBymtiiKKzYulmi1CVcDa5/513uTI9MdqEzUeXD65NjJtZNpnkmmS+od9dpi2XCwvkm/la2/+LU5ubCGYd6UWPTMyzaTshc00uOfRI/zCW64dPufIbJcf/JpfYoLiqQemhcIYLzIy7QC3kWkj055PTBvPabzgmLbPK7y08zsN86WycaLlofkam0fWuPrkEWRnh0loICk0tnanm5vU11/LnX+UmOf0lOe0kWkHv408G3n2fOJZTmbYzF3H/Vs38m/u+95BXieh5dtf8ZvUE0XFjzy7BM9Innah7Jw7b3J8GfDsqc5oDz4c+IVf3mCxdOxtX/c1c77hTy7AW+2rZm1GfcUJLmxFtqOymT0SGjZDoJKDy7NPOoLpSQp+eGGP5pE9vyod7rrIpK657uTVwyAIwpHDRwerm3fK2rTGo3TtgrpucDmQY2IRM3HR4WNZEJQw1hCIzg+Lw4sUa3wmRfvSxTeeajZlETsWqSMvlW65oPZmbZ6urfHYqUdLSJ2ys5jjQyDUNZtNTfAVKUbmu7vE4mVQF0Gs6hrNmZ35bnEMNT8L0TQs4HpQ9EIqCth5jw+BJGb1dOIJvSLpry7P5VTREKzAFlA5T1M1SPFcAIgp0XUdF+a7bO8uDB454atgAuYcwfVF7QTBmZIWs1QGhJwsPHS+jMSUijcAVMGK5Tn1VK4it5l5u0vEwuXEOVxYlHnJZEmoM8HGB9p5JDgLifWSqUsYp4gwm61Z8cIUQS10VBUqBHEVWRPJTKcGdjXhrtcmuCqw0Mxuiux2S9rUMVkvYbyqaJeoqprUdSznS9xkgsY8KJy+OVF++Ev/GVdMn7DPEQtJlrqCaYNrGlvwocQ++2BeBTGbsjZXGSsaCIg4vFrYrXMOdZ7QTHA547pADhHvhCp4yIkc4+BRs+hasz77QAoB30zQ6QxcRTUNOF/Bmil6LzAVQb3QJSUrhGZKU3VWUC9FlvPdYeM1m9R0yzm565hOZmat31gnHD8Gjz/G4vRplrErcx5KKHQE54aw6JBB2kibIucfP2P3PXyIa2+7jQfuvofUtQSUsL3DTALVZMKR6RoXLuyw7MNgD9nmIDhXPAoqNCfy9gXmi12yZurKVJQVU1QrJIiYfOVsQlLkoffayFlZti3BW5h18IHgXPGUSRYO2i7t/Vjord/eJW1dILRLZDeQfSAvl+jWDoQprIeVbivtwnyHx7fPcuVaTTq/xXIxZ3c+Z9oAdYWqcub8FotlIrlMtVWTBVzw1M7C3K3EYSr97lC1jVFW20xq0RMiDqfmhWDyVDZrg3cCY/sU28i0kWmfbqbVxSPPKfjngGkIbKzPmDQL0DwyjZFp+5hmF1+0NmGyXqNb5/cxbVK+vIvdLouljky7DNrItJFpzzemjee05z/TuvxkzfWt/+eftJRDImh+LT/wZ97B3/imt5N35+iZLWg24FBhWgjolUd5PO7S7Cxp3PKS57SRaZdXG3k28uxy55mmzDRUuIkybWoublp0z8izS/Ps4fNXsH32KDtHztp9LwOePdUZ7YtueYLv+OF38Hl/8zt47Pz6IAN/+a8e41/+TMtv/srj0AHeo0c2eSLucOaBNe5MmxxxG9y+/mE23ZkDy7OnNzA9pTZ/mpcvVvDln8X4x9p0ykte/GL+6nd9F7/8C7/AI/ffx4XTp2lmE5w4Njc3+cZv+Hp+/Vd/mUcfehCXlWVOrM9mbGxskLOC83YKBt74pV9Gypk777yTRx99iJtvfhG33XILV155nD/87bfw0P33M1vfBODkNSe5/SW38+b/+GaWMTNbm/KlX/bl1MGzde4s733Xu5gWhaOqeISNtXVmsxkpJabTKd47lu0Cr5Y3tR+DY8eP40S47757qWvLy9j/h1qoam/tsyHqLY2OygdUs4XmOU8uFvpcBrAuFnaw8EynkFRIPhG1BRFybxkungmhCizbZQmJzCzatnyqp5pWg6IPIRBjJOdMzBbaqTmTMQtr1j1eCmUinXNYXlYTxhAqUso4BVfyc6oadHqPiewiHjEwqdIuWnKdBkH1TsqatTDSvDSFDybUffgqYJ5KGYKvaFVZtB1tSrzsq76Sj955J/d88INU6zUULw4nNtreeSpfkVFcVTFtJk8S36ZSmlpAKlwVzHugqqCqUBcsVLU8Iy6A86RswFYvRGFQRiCEyaRMi5TcpWqhm96jMSKiKMpiuTQPhBIGS2338c0ExCGzdWQyQ1yNlwpxnk771W3W/5xLxIxYioEsmSxY2GxVG0AF1Hl8NcE7M4JEr4jzxq/1TWSxRNreIwRyMiFwMRalmnGYV45Xx2JnF4Cwvg6HDtOsr7O7vc3OYsHabIO0XOIVnA8lf64f1kHUjOaOdpmpa9v0NZXlD+43fgApZ2KKeO+IsSPlSEqR9bCOKf6LtFEG8bLHX6D3KrCNVe+hAOZVIuLwKJPg6dolZ8+2XOgS+ugpZm1m46QH3dz3ETFn2hR7/Y/3tg5jbIvHUfmyrmmYzKZUVUXSbOs1W+i9Cw4p85hSRlFiGweAmQpYKXWcIL2LkLNndCoE/ZSynb5w2si0kWkHiGnaf7lfkoZ/Kkz7zce+iT987EsGsb1+/QG+4rrf5NBkQaiCeSmNTBuZVpi2ebJ88dDu90JToEuJ6iKmpVQOaCPTDlYbmTYy7XnKtPGc9okzrZYFN28+gGsTW4spj2wfu2yZNqkzf+1r/4j/+I5bufP+KwHo4t56D56f+71b+MD9R1ikxF/+Mw/yms+9n6Mv6utbeKT2XH/zzUwunKWNi0ue00amHcA28mzk2fOYZwRPXiw5tLnGxvKiujuIPUtwL3iePdUZTZwn0HKFvpfTy6tp48Hn2VOd0WIKPHp2wv/8zW/iF972cn7lPS+z1zphOc9w4QLsTKANuMpx/c03I+d2yVun2D17AnFyoHn2SdFOn/TTngPqoNVXrwsWAVf5wMmrr+LmG2/g+muv5dZbXoQuF8zPnePGm28mhMDGxgbX3XADJ6+9luV8zplTj5BTYuPQIW666Wbuve9+Dh87xtr6BgBXXnsNKSvndnZ44OGHmKytc9U113L02BGOXXklOzs7LOZzAI4cO87Nt94G4s3rqW44ee115nXgA75uqOvGQhgxy/1sOmVS10VRuIFeQwhceda19XWzVqopS8GUkxOHRZdZ+L5Q8iIi5jmqlutTikKzsNVM1DRYOr0LmFdCRlVIMdPlSOdaC+nco+h9FXDBU9VmfReBnE2wYhfppCPXExCbpj6kLqdscEHQLpG6jkQuOsSucd7hnf3bUVLWacYF84BwfuUFEbMgmoqStb7FGAnYGLbLJewJ1+1D8RQlxYjGzkITyYQQ9siUNSdC8IGl2pf8uylxxa23cOrx0/h77ibbkA9QdSJmrXaOqInH22t4ePHi4X6zasE1649Th/I8DqQcXNQHU9Le2ybDlU2sN0WPCuqt5kHWXPJVCorgS7G4YTW48opzIM6KJ+bObMnOF0iAhArxAT+ZkPC4yQyaKWhAfAMidL1nn7OQ3uyc+a+IoJjnQll9eJmsdl4U8zRmlc7ZvF7UC6yt49qWUKDati0xRQTFx0QR5SL7pfhla7lyc5cAx2TzEMuY6HYXpJjJMaM+Q+1xVbCfsRRzFI+b2HV4Ebz3VFUgqdo4uj7nMQZodPAi6D1+QIrtUQpewTk3KHkLT/W9wwFSPqMfa+uGFeQLXmhTpu0yy2UkbF3AEaiahhxP7JPBmBNt15Gct7B05wz4WF9VddAblbdimqJir+Vc8pu6QYf4EMk5oQyBqkP/RMoqErEHLGkkpE8nsX95jO0ZtpFpI9M+I0zr83vzqTPt7u2X8+6zXzyI7dHJeV538t1MqiMj00amXZJpAN02wLFhTtHVet7LtL2HqZFpB7+NTBuZdrkzbTynPQOmyQXCJBJyi3eVpeu5TJkWfOYNL72Ph84e5sJyxn2n1rkYCHfed5w77zsOwG03XGBSneLVs1MAhEPrVCeOcvX1NzK/r6M9feGS57SRaZdPG3k28uxy55mIoCrErmV9eoz16Wy/kEvPMzfy7GnOaF6EkLbQ7uhlwbOnOqOJJGKX+epXvJ8PPXzFYGACiMvI6XvPks87RDagOszV199I5z/Coj0NzQbe64Hm2ccxMMklftrz6Re/KrZYQ/CgeWVNAypxXHX8GF/71V/NbNLwF//Ct/F//NAPsTmb8ND99/HX/8b3MZvNeOKJJ/iRH/1h/tyf/mZe+cpX8OM/8qO4pLzkpS/jT3zd1/LX/8b38UVf/VW87vVvAOCHf+zHWF/b4M988zfzlt/9Pe598BFU3sXH7voAX/c1X8OLb38JP/+zPwtAdoHJbBNfTy0Ezzf83h+9neViF1Li8NETnNraLgtC2JxtMGsaC5cTIEVyjOTYkZNShQmTspCD8zgRJvWEWTMhpcRisaCaVIQKslgooYIV9utaUhchKZLdEOrovJBjLhZGMf2GkmNH6iIT39AuF3RpSVom1tamSCh5T4GJF+pQUU3XmM2mpJRYzhekLqFJ2d2dD8LVF+4DSF3iwta25aqM9pyVD0zqZsjhurY2JQSHF4WcicslOXasHz5E00zx3peFZ0q9bVvqxgGKaibFjqyQYmTZLVEpIZzAom3JogbBlJkAThMaI/V0gg/BiikCla8MiAo7MRLV0ZLRuubqm24izxc8fOddJqMKaKKua4iRHDNe4P+95y/ynx76U4Ps3nHsXn7gdT9DM6kMXLVHpjU5BDQEgq8t96kPWGJNINRIVVNNLATbLMGA2vg65wZFr6q0u7u4kutXnFnWu0Vkseyopuu4EOiLGDbNFHWOTgRXTXHNFFc3tG1Gqgk5KzuthWA75wiVYzKboiV6M6rD17XJj0am9cawTNN8zu7WFqmLrE2nJAKiGS+KO9JQr61THTdjykLsj25vU7UJjxIEkhNUHU49TQooSlhE5KFTbL7oJrrJGue2dlgsI9OpeUTIdErUSIo2Tm1smUpgWtd0riPHDkmJuqnIIoO3h6mWCu8FD/gghORRzbRth4gvc+IM0E6YTSq6bklOfb7gipyVLibEe6Rp8CUM1scW7TokdUCmWpvgq4btJ87hU6I7d5Z7H3qEdn49MB203nLRsrOzQCabxAwORzOd0SXLIauq7MxblosW5xZsbBwu85GJKQ+FBPsQYvGV7Rto0cyQrrbXt4qUDYqtmxCCha+rklL3JA09tku1kWkj0w4O0zTbobIKgUkVPiWmuYskugqBo4c28E5wwY1MG5m2j2n3PfwIAI/fdytw/X7VJ/5JTPOl/oAqI9MOVBuZNjLt+cm08Zz2iTPtzGPwR9szqgu7+DYSJF/2TPvzX/lR3viqM/xXP/yGp9B91v7ev3k1v/IHD/BPv/ufAXD8xbdx8xu/kBte9jnce+YMj997zyXPaSPTDmIbeTby7PnJs265IC6XxPkORzZmHI6rtGiDSE8bcs0LnmdPd0ZLOuH+rVeiKGuXEc8uPqMdm2xzx8lTnJ93F+k2eOKxXf79v/ggL7n2YaorbyFufgk3vOxVPLh1hu177+EV1z+E7CzI8eDy7GkNTP0XFnvsj8NPZtnS4Ze9RVhzJsWIFAs5mLV6EgJHD21y5YljTKqKxnskZzbW1rj66qv4h//kH1sIJMrhY8doZlOWu7vF8ud45NFHePvb305oat575508fv48ADu7u1x1zTUcu+Yk64c2UYHtnW1UPK6q8HVVMg1a2o/5YsGy60g5s7tYsrW9zcte/jJy7Hj7H/4+TTNB6gYUqhyJXSTFyPpsQjuf08aOnCNrszVUheXCcn8+dupRs6x6z6EjR+naljYmnHMktRyM85IDUjCPgdnaGl4cIo5ceVxwRkVnwtCXD6ucw4WAd56mmhRvpM4s8jFaQbqSG3HZOfICQrKcqSLCZDIlh0xsI+2yY7FojcniiuJxxNSRo4X/9dbMuqmYThuaIrTNpMZ7hxPFi7D0QmyFZm2GF4/gVl4HJYdsqBxtO6frlsWTQgHPdG1qxfdMM5JNiMxbIThqHC4nsoh5ROBwrELktchczkrsIsvUgToL14yZdrFEUVwpSujaDtGMAyt+rZl9GxknBrHKrN1SB2QyMaUQAoQKnCfjSIWqkjLOJcutmvq1IGgpNhdTZuqKpVgVX9d0pcied96KMLqAryfU0zViziTtPUJqXFVT+QDVBFc1iK9xJFxoEIVqUhSEgPOCD5V9vpqVH+/KI3qylMJ+qiQc2QWyz0R8UaaZSKKuBLy30FZgcuI4aGIZW1wbbY0m2zQ4CdQiTLoWRWB3zqMfuovjJ06wFiquOn6C7sLcLN7eQyV0JDotxS8rj68rpK6YNPVg+c+5pHISRyxePYqB0QfQbEUBq0apqgZxFeBJXSzq0Lw1UClWeE/XdRY6rcq0rq0UZ+8xhJUizCnRxsh0bcbsxAmq6ZTl41vEnW0mMeJ0PwAmVcV6M8GLw5fQ19QtmUxnoGZk2jq/Y4b/nIhtR5I0yDlAUh3mPKmFdrsQLNd4NiVOcsUjZuXFNHg0qW0c9aK+je3SbWTayLSDxLT+gKaqnzTT2jzlf3zPP+SBnZsGaf7uz30Tn33FPSPTRqY9JdOaLvKPfusr+NhjV+zTkQ+fO8wX/x9/nR/9+p/hNdd9ZGBaXcbPiTfPw5FpB6KNTBuZ9nxj2nhOG5nWM+2aw1v8+He/hbhcDF9Cby+X/Oc7X8Zvf+COXjJwOTNdWBrA+sIWPHoKkcwk5ac8p41MO3ht5NnIs+crz9BMCI6jx47y/b/73bzr7OcP0v2Vt7+fb3rlu6hnFRrcyLPnKc/2ntHO7U54+8eu5Px8zsNn96dLFFWabsnDD22yFisOz87AmS0mKVP7dd77wPXctvkgm+HCgeXZ0xqYBLNfPfXrst/FoBRJsxBDG0CAE8ePceXxYwSUaVXTVAEtGwXvhMmk4cN3fQhVZX19nS/5ki/m/PlzPPbYozRNzfUnT+Kc46Mf+QjBeZ544nEWnVnODh86xNpsxiMPP8RkUuMV2q6lqgOhCvjgC0AwEJFLbk3L1bnsOqZrM1MCIUAXy4JVlm2HaMI7IaVEzpafsw6BygdSyuRiBWyXlpPUciJ6UrDiduqEnNSKfA0TZcUABVe8L6Qod7VwzD6EbRhXG0wRU/guBANqF+lSRNQ8EAC0hUwmpWSWRmfho/tC5bPlGBUyc53Th9UqZpXuFVaoKpqmWXlLBIcr3Q1OiN6TnC26lO1+TvpZxwrVST/u9oW792KhtE7wRTkOGwlnnhMepUaQ7MkidEQLx+uf0fYDRfih8p6Zd2w98BBbD59i58z5IQxYCwgkRrxAlprfP/1GHt5deey+4sq7efkV99GHA1rOVwxM3qHOFeeSEga6Z0mUGS0hioKojX2/8FLOg/U3q1myEcV5XxQ7IM5yo6oM8qTiwQUk1EiowQVUbCOgYgvf9zlZURuIsmmw/LuOjOkDLUDs5UnFWbG+wFDIsVeP6f/P3n+H3ZKdB53ob6Wqvb90Yp/OSd1qhVa0JVm2ZNlgDNgYg8E4crkGPNj44c6dhztjmAszxsBchocZYOAyXIxhMM5gGyfkKBtJTpKsHFvqnE8+X9p7V9Va671/vKtq7++c1umWkd3f6a63n9Nfql21aoX3t6relBO23A9Atb6BOdpgmgW56ehyAinFDtF81n0RRFKmubSNXNrGWqjKWBkjiMngDS54pJjIc5fKA2QJSy1j2tcvEtGQTqCEgermMmXRqOVSwBBjwTjE5qE/VPnpfZlSoK4fN0lauC+XDrGaVJRsNBep9Q4/qVmb1nQSITbUxvKG2x9hLUQ+fUZzgn/qydt51ydfx5//wo/rXBNtV/ABg4aHq+5RxSwpkU3GON0Q6X5hZQEUTwHn1UtC/8kAwCHWtsx7Y0oItllVwqNcTUamjUw7VEwrD2iYovv4PTANx4O7L2eelhvUWzbPc9PmRfrt4cg0gw+Ok8dazGwN0x59UTNte9fwkcfv5oHTp7iwv8mqtNHzoUdvZWc+OcC03vPN2GK+Gpl2KGRk2si0FxrTjLPErht05YH5PD6nvaiYVpnES69vyF03RHJcnO9z7KH9A/PCGVgzeu6qXcDFCxAsIUYm1kJOVzynjUw7fDLybOTZC5dniXlc54M7f5z3n3ktj8xuBIQvvvXTvOGWB3nJyTMYtw4vUp692N475hzYj7DbJNoDNQZVj9VGOBpaKpnB3i7s7RJiZOoMyc1xJh1qnl09RZ7hs1qoRMqFihLtU0L1GQjtysnvecldvPbV9/Jb7/x1gjUEY2jnc7q2QXLGOcfp00+Tc+aGG27gbW/7Un79l36Rh+67j42Ndd72li/hscce40Mf+hA+BPa2t9nb2wPgS770bWxsrPO+3/gNpnUgp0TsWqZrU0Id8METKm2J92o1r+ua2KnFvIstGCGEwNGjR9ndf4ocI5Izs90dplWgrjxN0+CdwVvHpKq1c4xBeoigVnFTPAfECLbyZISYS2iaN0vFbgwxJlyZwKkUrsOoxdS6MCiI2HZFyQDOYryD5JAu0nWdcsJrO0yO2KgeDVVVEYK+7Na5Vgp9FUuuSKZpGkiav7OaBLwNumAkU9cVk+mUtVIwLmcN43PWqCXWquKLKSFJ56wvyscYg7eWlDWzowBdilgfcM7hbMDiViZp75miuTErEUzOJO/I3aJ4qPVF2lTxJnS+rVU1a3XNU7/7Qc6cPcv5c+fwRbHlnOnaFmJH5Tzi1/jn9/29Ay/jvuned/GaU4/oPUpGcsLm4vVgrSqUHDFU/QrTDxbbQc4admickGNk0SxDSGOMy3FMiaZrMcZQhYouRs0nC6VAnnoh6M+2KGQPzhfvhILnrEprNdRRfTGKBwGa6zfl/vf6kqlsBTQXaFUj1msfxQhiEWPo2gXeyJDutVrboLKWtari7KVLNO2C2CWm1mBFcAamZcyjZBb7+3DmDFQBuobKgTUZSOAM9VpNlbXI3/a5S6SuhaQeCsMasJrbNA8ejCUU3Hokt7RdJKWkCl76/KeZIW9qGdesO1WMcWUTmkEgNlp4st+lOqfgS1DyrxqMgypYSC3EhvXJOt/25vfyq5962WBg+vkPvomPPXon3/LqD0HskJQQAecD1lpy0hysFoNGd8eSgrbkL9URG3SoWAvW4ZyQoiDGFA4sc6Mutc1S/1pzYFs4ytVkZNrItMPEtKyKNhtDtr83phm/fLhaned9M0amKdPqOnPPHfvI/hTp/IuWaSlGHju7wb9651deOW/K1WofkRzLQ6syrU9RrEyTkWmHRUamjUx7gTHNB8+sM3Qr9iVnEt6W+h4j0170THMegs90UY8xGGpTEXxk2i3g/FlYC4RuzpQMrdZ4kZFph1tGno08e4HxLARPG1tszpyfn+AffvLv0WWNprRG+PY3vIMbj+2Ss3tR8+xzfe9oyCTJzPd3r3meDeM7zExDIHDX8V3aUHP6/A5sXyR0c7Zcx80nHoeuO9TPaM9Sg2mUUUYZZZRRRhlllFGuXemi4T0fOkLcnwEv3loI/+63/xjv/vCpz/r3W49d4J3/3f/K0bDzB9iqUUYZZRSVJq3z1z/8c1xsrht+9/9+y3/kNacefv4adQjlxcy0b/jqp/iqP/Ux/srfejUpwYcfv4XX/X/+Fj/9nf+S1x6Lz3fzRhlllFGukNu3nuBdX/8t/NVf/7t88Oy9ZDH8d2//Nv7Ma9/Pt77xPc93864puWvtITb8RfZSZMfey5zwfDfp8yYPnL2Ob/+hP493CbDUteHj//6XBmPPtSBXNTD1YXOfTTQ0a5mP0WAwohbLl919N1/1leohefrJJ/jYhz4MKanV0loq73Fo9GEInq/4iq/AGENd13zogx/k8cce49KlS6SUOHnddezu7uK9R4zhxltu5uiJEwDcesvNLBYLHr7/fsgZV6xrkjPeOYL3VCXvri85H8kRIwnJkWY+58EH7qeugoaTSiJnNYuHqs/92f8zavk36i1Dn/5Kz6rFsZxltpiTS4iiGji1AKGZVEPfGWuRFLHGEpwnkzW8zmioq105N7mE7GVRCyrqOeCqgES1Eq/OOhGQJFpgL0NKQk6ZlDKpywS3DMXTlItqhQ4+kGKrheCsZzKpCcFhShisFukD7yyxa8kiZIT5YoG3GhabNKspzji1wEvCOk+ohBg7MGqxNtbhi7dEf+7ekyCnCDmTjWiorzEYBFM8IDRFgkGMEILDGQ0T3Tt3AZk3TI3rjcoIhhgqzTdpIKYrN57OuWXYpzNgXQkxNJTOoMuCL+G8g6XZGDKG1LXYEg4sBqR4F+TyGVO8cYy19AXTrPcEayjuAQgGX0/wpZCfdR7B0MWMMbG0y+Kswxi7GrVIFqOhl8XcbIrfgIgMnhzO9kG1EJzHVgZsxqRUvA8ESyZLRY4NTaMPKy5m9Q6aTFi//hSprsg726TtGcZkrIVQIsucWCRB3N+DVOGsAe/puoY0M6yd2MJXQdsuOo/63LldjiWkOxOjYIMWpjf9QBaPDmMrjNf5munDry0Yi3XLe9Sx73WU4Lwj9OsXsFlYiWwdQom9D7ickNkM2gZHxplMajXnscSD88eghUSNtaSU6VLW9ZY1HFYyB9Zm3zjJgvQ6s3dMcbbM/+LlIMWHQDJ9wOrqevHOEbOojrLXEnaePxmZNjLtMDGtZAFH0NznnyvTPr37Wt57/qvoso7DkXqfP/3y93Dzkd2RaVcwLdI04NoXN9PmTWDWTob1cu+ND/NHX/4Bto5sQWw5Uu1w09Y+xEzqzArTitdjZjlQK40bmfb8yMi0kWkvJKZhtG7JbnecTpZ6amvasbWWAD8ybWQan3jwCB984hR9FsWYHef31+maltn2RZoc2TpxFDOfE4whWI38OPicNjLtsMnIs5FnLzSepRTxzlJVnrlcx7/75J/l6dlJ7Qvgq172EV51w5Mjzz7H9457cpImrbNIkXZvTq7Nc+LZ8c2Wm67b4cFHp8RDwjPnDrIhiWVnMR1+9ovM3/2BV/Km6/e59+g+x05chEPOs6samC4PUu0btBq92oeyak4+/VvtPXfdeQdf/3V/GoAf/aEf4t3v+FVuuu4E891dqGs2phPq4NlHyDnypW/9UqoqMJvN+O3f+k2eevwJFrs7WBGm0ymTyYQqBJoUufmmG7njrrsA2NraZH9vl7OnT2PIrG9ssLY2ZWf7El3b0nlPKGGc3hqMJCwawueMUAXHmdNP452DrkMkFYUiVCFozk6rOQedsWhJuV7RGy2KBZrXEnDWsVjMYQU41hi897iqOhBQ1pmEMwbvHF3J/2jQ61i7EsbpPSkmkmiInjGaq9JUamTJQ5tXxiWrcs9Jc7qmnMvPGT9d17EsSqEPP62rwCy2WGsIwVHXFS74oTCYoLlQnbM0bSaXazZdi6ks1rihFdaqYss5a05UB77zZRIVRR+qZXG+opgkZ6KUPMkGkumDMAXTbzxMXhqYvC8qyrBz4SIAU+O0ThMaAmp8wkiBR0xXzGvrHS7oUsgODQe2jj4cEGNJqc8WugwnxahKbdqOYJb5Q7Gq7a9Y47YoLmMwzuNLqCkCCQihxpYibQIkSlhqylgjWGuwJfenkZUw6axF5IY8raoRNNxTpISV9+vXaBi09WAzdJ3OSREkd1hfEbuWrlVFX6WMx+AnE9avu45UBVoLs72GLGAlE7zDUHL5GiHP9jFk/LTGeMu8bUiSWJNUrq25SYP3WGvIOWlBygKmpot463DOE1b6WnPGBoyLGnqNQawruWEtpt8bFcWt4b2qeEOlGw8nFisCXRpGR3IethEheGxMyN4+0jY4Et5kJBYDU77cS9DgCnAwljYJKWaMsSV8v1zFaJuMWYb294mF+zWg80OI9MtkFVll7IYdjOAFnM1auNGuapZRPpuMTBuZdpiY1j9MZ4RE/pyZ9uD+vfynR/7C0Edb9ZxvuPc3qYJXHc/ItJFpB5kmuS8/rfLyU4/xV9/6dm686QaknSHtHG/KnFhhWk4yjO3ItMMjI9NGpr2QmDY+p8HItKszbWexxfs+eh0/97s3cLlIt6DZuUCzc4mJNJh2RhDBMTLtWpCRZyPPXmg8S6WmVmtO8GTzCv5/H/1mwFC5jmPTGX/63vdz8ugcY8PIM67OM0uitpGJJBqOMM9btDli9hcg9jnxbGPacmKz5dNSEYXnnWcheDaniePr+1zYXwOu5ERKhh//pVO4Nxzn1tes405cgkPOs//qFHnWMkwUBBzw8pfexWJ/j3/8v/0jAC6ePcupE8fZmE75tV/+ZSbBc+89d/PSO+/gfe8/z0c/+GG+9Zu/iTpUNMx58pHH6OZznLFUdc0v/uIvkmNkfTrl4lNPsT5d44brtQ7Kv/2Bf8fZM2fYnK5z4dx5/sTXfA1f9uVfxr/4//4z3v3Od+GB/e1tALrZPpUIm1VFconjx4/z577xGzh36TxPPvE473/PewiIAgGD80KwlmCtFmrLUhSo4KuAkSXoYteRRHB1jfMeazUfaZsXOOcxxhC75YO9sRaPwWQhtx3TqlareckDKSsbbG8s1gomqWXeOYcxnkjEmgpJkRRbQEFjjcWJjk3KmaZtEaMw9qF4U5QVYQys1ROma1OOHDlC17W66XSWLjZMTY0voEzGIjnRxpamnYMR6mmFc4GN9U2c88zn83KDalEXVMsY66nXprSLBoGSo7UARKCLiZQ6ctICbCL6c9s1tF2Lcxb1PQFfckZiIHVduZBlHYgxE3MuhcxKLlTJxXpvCSX/58FJrP/EgDi0poV3RBEt2mczrqrAW7I1w7mTCDklupyQGHGUwn8bW0jO5JyH3LCUXq8mcfjeGN3wS9kgpSwQS55aF7Ah4KsJ2Xr6gnJJKNZnM+S/NMWbz4WgJxZVis55ha1kivPaAK5B+zuw1pFTJMaOnKFJQhMVql3MVAibzsH6OtbAxBjmF3aQeUMm4kQ3CwgYEbwxuOBw0wkEz6xt6GJL3tnDTqf0jdk6cpTFvGFnZ5d6sqEbAgxUsGgjsVeWoF5zZAKOmB0RyNZh66kCLmc60SKuxmg7XD3FukiKqaw9zclahb74ud5jyhkMGOuoMKS9GXuXLiGxxcRICJYkXVm3l22oRGibBRPvcTgcZii8mXNGijKX4gXknAOB3EVs8AXK/U0q1GPT4ozHOF0cfSFMARzLjRECLulmc/SL+/zJyLSRaX9QTOuKZ2A2kMtG9XNh2jMV2nTW6qQ1I9NGpl3JtLhYHJgv3mSmtiXuXSB3LcSWNgcqGw4wLfbRuyVf+Mi0a0dGpo1Mu1aYNj6njUy7GtMyge/+ga/h/O7a5TOk9HPCuw4rZmTaC1RGno08u5Z45qxlsdjjXz/4V/mV039+GI8vuOkRvvcrfwqCgA8jz54Dz25yT3FbeJyq03dyKWcWbcun/R+iDZvPiWdPX2x4+MkF1cRj68PxjPZ1b/w4b3npZ/iOf/3NpHzlc/0kdLz7r/8DjvgLmNTStf7Q8+zqBiZrBgUOBz0IepG8bOu0rrjjtlv5uj/1texcvMgvv/3telBKmJzYix0L4O477+A7vv3bedev/Tr3338/N193kttvuIm1tSndiZP8pW/9FnLX8uTjj/Gud7yD0089gckZiYnaWY5srHPy+FEAvvHr/yxd2xKw/MgP/hDN3g4Xz5zhG7/1W/nwb/4mTz3yCJNi4Q0CtC02RSrvWK883f4eW3XNYnOTzckEktDHXDsRKuc05C4XS25v9TNGDYGl40MVNPTWO7VeG4vtR8eUBVbC14ZCdM4PA0lv8c1aWCxJHiCSisdpH3qbcyamRBcjWfT7tiiI4A14h7MWYwzOZbwXvFfrbYpxCHc1KESMs4iBNnZgDd4F6lpDL2OMtOWeU4rEGOm6hq5rCT5QhcB0fZ3pZAIY5gtV9FLaHXNS7wsLlJDKXrGppVSLtOYcSV0ip4T1BuMsxnhM9lS1wXtXQo3BOkfO0BGJXYdkwYpFklFAxkz22uYsGUkJa4syaNsrPWSsXRYr9Aa8x4agCzF48B5CQKwD6/QrqpgTQj2dKsyM1aKAxeJvxZZrGfpFFKqJWo99IGcNj8wiWOMR40j9krUlBBNDzFmtzJQ+K94Y/XrU8NcCDmcQ0b9b09PWgF3RJ1mwBRhYV0KuHSZUpNQiPmCrSTm3kJLRUF0bMGGCrEU2Tl3P7Ow5ZhcvEnqPaJFS5C9iRYYwU6Px0sz39qgMWK+h4y5M6XJmtlhgw0Q9K6zXYoqifZn6vkbXT8pCZx3ZOvA1TGpMMeakrinXE0zXDUBRrwPKpMuA0z4crPKqxKy1pNipvkpZiwAawXiLN33BzmWYdz+yIVRYKZ4IzjFvkypngclkMkTepZRwvUKg77I8hLVS5kJKafBKsdaSSIOyP/BCWfQzBlXVozwHGZk2Mu0QMa12tY6LczjnPmemXRGhbsBYg3FL76KRaSPTVpl2udJ790P38pd/7L/DOce3vP7X+RMv/wDBV9jMAab16VfqyQTvRqYdGhmZNjLtBcS0D5x7K//50W+iyXqek2s7/LU3/zJ3njg/Pqe9yJn28cdP8h9+5162ZxNkhTmrMpkEJrWjW3RIt4CcMIaRadeKjDwbefYC4pnkTNe0xLbRNGaspAq0EIIhezvy7Dny7OzFlnMXPVXXl6sQck601j5nnrU5s79oMGF6aJ7RbE4crff5H7/qZ/mJ97+JTz190zBPXnPjg3ztve/h8fNrPI2u3/XJBCuGdTfj5uniUPLsuUUwLefp8leX/S54x2RSs76+pqGXbculixcBmATPxKvVrIsdXdtgEO77xMc5e/YsU+8589hj1HUNCFNrIQSmzmGzKpf+Abn2ltnOJc489SQA08qzHjx0ic26YrGzw9OPPcIdL7sHGztYLAhlRXSzfc499SQuJ2ob8CKcfeoJfF2z2N2jcq6E/2mveW80BSX60sbZpcfw4D1cOtgbnULW2RLKqIvPGjOcbxmSpiFnyaQVjsoABWdMzwT9S86aT9U56qqibVtV1jJ8cggRFTRkDvpFZgZASM56LweCMAxJMl1KLLoWMaJKwejE7NqWXMCgiz7SdsVIYw3WWXzwGGeHvK99/1D6yBjFoQztLTl0zdDooV6NiGg4pgOx4CRgUeNSKOkRRCClErYqCUmC5ITJlhwTOSbisHZEF3IWzs9P8OD23eQSx1q7jntOPsXGpNXwSYMCxjvwDusdOA/Ok0suzj68cHVdhKpSazSAteQsw9ivKmNBsN4vrfklzDhlQWwB/aqlGIP0Sp3yGfpjljk0D4gxGOwAUlPy12qAL6XPGLwRdEw0/Fasw/iAC/UwtwyCTR3ZJMQIyXS0SZgeOYKbL8jzGc3u/nCsK0pXUkJSsdCLYHImti0hZQplwHtMAWkq92yMJQJuOsU4r14dQEyqEHPO4LyG+k7X8GsT3VhlITdFjWa9V4mxKHADJaRc11vfE/3GS/9nrYY8W2Mw1pQNW3GcLOmm3Eoe4eHDxpJzREQw1qg3TGmDKuCy6bgsbzSiuU77dVGauDIuvWLXHaP0Y290Xq8Ov+WZJsMon1VGpo1M4/lnWmU1NYF6HpYw9s+Baf31D4hV/TUybWTaMzHt9hMXOL19hgfPnQLgye2TPLmt+eBv2jjDjZuX+LJ7zuo8vIxpZXoNOmNk2iGSkWkj07j2mfb07CZ+9+zbhjufhpYvvu0z+NqPTHuRMy1lx6ytP6txCeAjT91JZRtu33wUSZ2OQZkPubxAH5l2DcjIs5FnXPs8yzES247liFw2oZ0defY58GxvvmBvV6BZ4RnCNBmqa4xnlz+jTUPmzXc9zK996t4D3XvzkQt8xd0fgWTZixVdrljkKSZnFt6w6dYx8dKh49nVDUwiy0ndd0o5oXOlwFfRSJubm2ysT7l4/gLf/6+/n9y1w6UthuAcR49ssdjd5dGHH+ZvfPf/wJG1NbwxkIR/+vf+blEKMK0DVjLWqDHwxLFjpKal6ebU05r3/cY7ec/vvBtYCe9rI0fW1tk5/SQfPH+a3/7lt7NmAxMgtRrGefrBBzn/6CP4HJkYkPmC97zznXQ5YaxlUlVU1oERjDFMgqNrG3JO1JMJBlFrX84aLmqtblxACwki+gLa9YYQIVdVKeKn4a3D4FlDhxaNUy+DhHWaTxTbL1btW1fCXkMI1FWthfNiYhIqDZPEqKUVcN7jnEcyxBQxkvFW08u5ELCTCalYfnMWui7SdR1NjrQ24QwQMyZFLNC5Znip7rympcMYQgg4rzlHuxShazFYQl2XewTrDD7b8pJdiF2r1nYRYowEk7VPUUVvoNxnRXZgEXzl8YD3doge6bpI23b4khs6kohRwxhzinSxY7bQMVervSOnjt9+6sv5/ge+d5je129s84+/+odx02pIZdYrHpzDVhWECnF+yLcpxg4WaFVshlDVpKYdFvJ8McdaS1UFTAlTLppX79UYEoDVAm2LmIGWyjpcMWR4DE47Eec8GgRuQSjKW70tdA1kLVKXcvEg6Ver1XVbLNSU5Zz7scSQczco0JRhurZB9hVd0HH00wk2ddA10M3Z293n6bMXuPumm6jEMPWes/v3D1ebYKizIE0LO7uwuYHLWlg2R8H6gC9zhPUp0xDw9RrN3gKMJWGYxciJUzdgq5oSMUs3W7CYL8gpsrGxznR9g/WjRzHr6+Cc3udsT+d1jCzOn2O/aZCcqQyINeppgSEbIZPIouvAGatrzzlMHZBkIKtXRuoayMsClX1e4F5EhDYmbPGWAsipLfWXhK5tdU0WYOm8UfDkElK93DyCFYv3nhx7fatr52CuY1X+howtD4uXR1aN8llkZNrItEPEtK5ZK5tR7Y/PlWmpPVgTzlA2l86OTBuZ9oxM+4tveQ9vuv1h/vp//IaVWaPyb977lbzj/tfx0b/xPz8D09SJolvMyNM4Mu2wyMi0kWkj00amvQiY9gV3neN1L3033/y//3HO705YZVc/W77zR7+Nb/2C/8I/+7rvI+eE8xXWB422iHlk2mGXkWcjz15IPOs6UtewVVdXrn+DGphGnv0eeSZYDBO4Jnn22Z7RLndkP762x2tufoSc4dHtm3l6/3piTpAyF7uKS7PbePnGhUPHs6samKxz6okvgnWuWPr00ilGQvBMJjXXHT+OSB5CPJu2wQPr6+sA1M5ReUfbdaoMvWVtOmFtbQ0ngo2JtSoMVjJfFB9kLJlmvsBhqCeal9PVlYYQotY56RI5G9qmoekaxLCisC2hhLsZA0YywVq6+Zw0X+CcY62EZea2Q6JgSlhY03d+WdjSn8NZ2rYdvIRBi+31FvKAIYloKGmGpmlpY0eoeiWoyqfriqXTeR0w53He0XUdMcZB0a+trWGtxVpL0zRDwb26riFGxGakhMM55zTEr2mJMUHOVJXXQoJZFX5VlYJuov3TxQ5B6Lqk+iSqpfeGkye1SF9f5C5r4cAQAtO1mhA83jtSErquwxiLt6GcOxNjpHJeQZei3rc1WCyVr1h1l/DGkqyG/jnUu0H7xiFZ88L2/ZFFwyCna2vMZgsykUhSj4lJwFaeuFh6aOScqZzH24OLQdA0ASQtoQhgTVVcM6ToSFXQvqqIxiHGFcUCVXAY58kCXad9uDbVUFRjLS4EnPeqXERYlPR8pvzPe48Llol1+Lomy9LzoIuJTsBkCNP1wWtFew/AYMr9OOMwVpg3DcmIbhYwOK9hs2TTp1hFBOrgQRSxLlSkrlM17TxiDNlYclnnbTaYbPAm0JnI3ATmBPbEMNnYYGIt9sxpPXfXsbOzC7MFbczUbSKkjK8D06pifzZXj4ihKKHFeo+vBekgoqGpkyPrVEeP4SdrSAFf3XbUsxkPfeYzJBfI1YSNegpW7xUMTKZ62hgJkzWs84jzVMESjIBEskRS7tSTbelbgYiqHGcdQiSnVApwujKRijLuE+H2nxTY6TrWrWI1x4716WTIk3ypbYv7Ql56FIh6D3T95q43XhndOHrv6VIHorrHOotk3dSueppko14P3jqqFZCM8tllZNrItMPEtLziDTgybWTaHxTT7r7uPP/XX/hR/qef/Woev3T0wBz6bEyblGLRDjAj0w6NjEwbmTYybWTai4VpRlr+8V/8L/zkb7+Un33f3TyTxBiZzWZkhCMnN5hsHuXC2fNMjRmZdshl5NnIsxcSz7oYySlz8sR1TJ+sD052QdPzjTz7PfHs9iOPs84F2gsXOLt4zaHg2d237ZPnC04/fez3/ow2GNhVVnl26w373LV2ifNnzzO1BpszuWnYdP7Q8eyqBibJaq/S7wUphkmdMMX6mDJVFTScMWesoDkynR0UvURVXJGMtQZvvd6cMTjvqapaO7UMWeUcXbcgp0TG0MSEQfM3Tqc1zgdMKCFsbYMYg6sCHkvMkZQTbVILojFCLrPcGsFZwTntIIfBm37LqgNih9jRYskrCjaJqAHZlDCyEqo/hJRZMwxCF5MuSiULIoLkzOqUkdT/3KfhMgSnFvIck8Im91b2kgvTaMChht45NN+q4IzF9/lbvaYRiIuWjHosZKPWUzFqxTTlmoJOEqxo7tWc1VVM9L5vuOkmLly4wO7eLkOPGFUgzusYar5OBY81y41AzkKOWUNOUybFiKQEWQoQvAKl9J81fTimLjZnjBZEExkUZSz32IdtmhLyKKilvEtR2+Qtoe5hlskp4bJo8fODM1wXe/GOWP5GF6YtIccKeFUmxpphYWJ0Duti1E1RKl4m/TGaw9RouKa1aHhiCd91DmfBWo+rKvIy+pgm5ZLjVUBM8Vpx+jmxxTuh9MeyG4f7MqZ4G5Qxs+Xa+qNdruO+LWVyZlFPB1Pan6NFjMMEBa6ppviNLTrvqVxNVVVsXHcKROgWC7pFQyybN2M7TBcJdYWxjm4+Q9oOKl0flDVgXTl/0tBcX02wocZWNbiiwFygErWdL7pI1XWljTpvFP6leKGleF9QUtWJKsuyxkEVqCw7oRRQzIiUMN+iB2wJKV4alg4qfkHY61pCHaitwTqPz1r8LotQea9ePn3/lnzgZWkWRJV1XgCg4dq2fJ+HNqpHSBp+xIjqMWvwY3Lv5yQj00amHSampZxXniBHpo1M+4Nh2jQkbj9+ka98xX2cvjShaSPvfOi1xOzZayb8+/e+ja955ce4bev8wDRXOrRyHmfSyLRDIiPTRqaNTBuZ9mJi2s3Hd3jjS54gtRGD8LsP38zT21sHZo2RspY21jHHj7D35FO4qjrwnDYy7fDJyLORZy8knmWjXFmbTvD+GV65GzPy7PfIM7dmMblmsrhAWmSdw88zz2ZzT5pru3+vz2jPJD3P6iM10xNTzp7bw4eKGjAmUWV76Hj2LAYmoW+V5ESWMiFEG5qzWotD0PAzIxom5iQzrSpOHjsGwGxnh/nuDm3XsT6pCN6Rc8aHwGS6xvp0jdQ0IBq2WVcBFo6UIjkn5s2CHDvIiXD0CKGeEKZqCW5yRrK2a2Njgy4mFl3DbG+fWCZ0VxS9M4I3makLeOeoigVfF7beq0XKpNcFYJ0DZ4eomXI0rtKid314Yh/SFlMipg7rHD4E9bwq5xs2zlKs8kXpWnSggrVahM9YljZD7dNswbqAdbo5UtCCw5VQQz238149CQBxeoVs9SU51i6VTZnEDrVI26xF3axo8TvnPLffeSfzpuHi9rZOcGtwTi3k6kFhSrdpSJ81boCT6krROZE1tDZ3HaSMcwrZ3Cvusnh64Dpr8VY50KaoEZpSwpL1aF0lfWQTquznsaUyFSFYqrooCBFyytgu4czlDy6G3urfK4J+A6PeMx7p84pKOda4oR5P1tOXzUrQwoQxav+Yks/W2EFfWOvIZAyae9Q6izEagmqcxzm7VPSLhfZt77FnrYYGC5hSJFD6UH8Rkigs+znjnB1y71rswYe2/iGtbED6uS5SvDWMzjFAi9oJ2DpgBML6JmsnriMFkEmNc4bjN98KwGJ3l/nOHmk2oxPBxEQoYDcY2tmCPG8R32jfTyNYpxb0ohOygRACuIDYAKWvbfD4GoyvaLvEou1WHjYLsPqdqNFihyLqHRK7jEyDgjFn7WedaABDXmEtaJcxxYtEvVo0/63kPqw1MfEtixgAQ8rw9L5hGgyVt1TB4Eq+XYsw8RWtZFIBSU5RdYx3BGeRfuMI6t1QdIotqQBip2tENzN5KJRZFgrWWVy2uHT53B7lmWRk2si0w8S0rj+3MZoC4XNgWrsIZCbD3A42UvtuZNrItOfENHLk//am9zGft5y+0PE7j76SmD0XZ+v83V/4c7zs1HluPXppYFr/ID8NFcHFkWmHREamjUx7oTBtfE4bmfZcmfbFdz/Ml9zxaUyG7/mZP3LAwGSNwTtPNoI5skk+eYKdrmEaApXzI9MOsYw8G3n2QuJZMhZrioHpstRnGDT168iz3xPPIreyaJ/iSPsk6ZIjpuefZ6cvrJH2EtPcILb6PT2jXW5kulZ5dvUaTACyPLFZvWuBu17yEl7/utfy9X/269hYW2Pn0jY//7M/y6MPPcjrX/MavvPbvx2Ayln2Ll7kr33nf8PW1hYO4YlHH+V//Qf/gJtvvoXZ/owf/eEfoW1bJGcWs32+9mu/llPXnWQx3+cH/u2/ZT6bMZlUbJ04wRe99S3cefdLAJhOptz3iU/yUz/2H/i6r/9znLrheibra5w9fYb3/9bvcP6ppzm+eRsA+7uXuHDuaSbra+S2Y75YUDlLZetSaCsRghsWS2/txTnEGrquwRgN1Qwh0DYNXaf5oqVASnJmMplqyF+xKk8mE7zzLGbzZfeJkEVBYq1l4ivSoiVKQz1dw7CESM6Z1EYkC9WkLh4BlsobbHaYmIklb3XbdcTFArFumLhYgzi3zMFZrPNScqHaYqWuvAMykiNtl3j8ySfY3t2h6TSv6E033UhVBbXMFjjGlIhtR3bgXB7SGwTnWasnLOYzrKhFddE2NPMFi5RZ7M5wRsM4RYTUdargvSMRyd6QrRQPCKNeE8PIgORM1yW8C9S1Wse39/aIbYOJLSYXJWcMwfqSd/myVQu62FcL6FlHxpCSkBYN2XmMD1RTDVfN1g6F9VT/F/hLnxdYPRYwaNG3tqX3UMiSsVbDQSUL+/MG7zzBVzgLMacht3BVT8Bp8bsuJbCx5N0tnhtYTK8EEfXQ6JVmb96QvpUKmX6DpnMq6fy0rjy0GcgGa4TM0otFFa3F2Ip6vaLe2OToDTdg0gITNUeqv0Hnx8bxY7xkY4NHPvAh2sUcg8FVtW5sUqTylnZ/hxwbAHzK2Fo9Buq1DXabfeZdQwAoXhzLFKAG6ypOXXcdzXxOVdeUyaz9YnReA+SY2dve0TBpH2guXcDHgHNgHAXEy+NVuQokKc+yRosixoRQPGySNuStt3+UV/yZj/BdP/1d7LdTnto+yVf/83/JD3/b9/OH7/oUpKh9iXqMhLoqXkQybCIwBkfJpWyWWjWl4p3VbzwB4wzgcHlljgJ9MUtb7uMAAEa5uoxMG5l2SJjWpwRQB4rnzjRvPX/9vT/Aw3t3Df3/l7/gHXz1yz6AVggemTYy7bkx7V0P3ss/e+eXMe/05ckNR87xI3/xu7n7uok+1Bam9V7FodaHl5Fph0hGpo1Mu8aZNj6njUz7XJh2+4ldbtja4f2fuX7ltXjpDqsOf22MsGhgfx+yYK0bmXYtyMizkWcvEJ45a6h9xZHNDTViHpCiL0ae/Z55dsRPefVr/hD3/cRZ2u34vPPs1XfOWM8XWZx9mgefuo2Y/ef8jPZC4dnn7E6xetvWWUIITCZTJtMpk8mkWJQ15HJtbY21tTXW19ZZW1srllhtoIhQTyZMplPqejLk+jRWj6lCRV3XVFXNshiV/s0HT1XXVHVNPZkQQkBEc0v2v6trVd5YtXL3xfF0rq9YIGUJr/78/b/yy+G+RZYK/eDwP3M/FaMsfR7LXmSw5Mtlxy8t/L11d/hMmSjD0WY4CrPS9sFLYGUCrU4QnrXlyyv2FvXlaYwqEqOnWf1bMXceuMyyyNzqUctwPMkr30sJy1v5uT/fgftY7RORoZ/6vpLSjj4MUXqL8NWkP++BfurbICvXH/53sF1XiFz5nVx5hKz8Wp7hmGeeZctPLO/bLNcW/Vy48lw6b/X44Sy9QqG//eX5DpzE9NewCoeVtWqKh4opIbvDBLns3ob5MPzLB6z1yzF7pp41w1oyl98cl8/qFUVplr87OD3NM59LhKH1z3AdbxNrVTNcT7DsN2uk7C5rxHIslgfLM06ag/P3yjtbbePBNi/H6Jn6ZJTnJiPTRqY9b0y7vIXPmWkwS2u0eRnBVPuOqW+Xumtk2sg0vfhVmXb3iaf45te+i8pplO7OfIN/+e5v5NELNx5c48/UuJFph1JGpo1Mu/aYNj6njUx77ky7OJvw2IUtBPiqV32SP/v6Dw9//t3H7uFv//JfYN5VKzd3ZSNGpl0bMvJs5Nm1zDMdioP69sAfV1s68mzlBM/Os3mseejcKdrkOQw8c1bwLuNMXjn7Zf1x2Zjfed0ljm/MP+sz2hVyjfDsWSKYDl7W9gML3HTzLWxtbjGbzdjZ2WYx22e2t8/x48eZ7+1SVRVPPPEEABPvme/u4Kzl2LFjeGt4+vHH2d7eZlJP2d/fZ2NzUwvM5cykCuzPZ1y4eFFznQKT6ZRjx46ydeQIXdtx8eJFABazOXt7e6rMvWc2nzMrn4kpkQU2jmjodNMtyAiZrAp/mKRmUFwHxq0AySBLgyYcVLiX6bqDylGV7hUKk17h6XcIBxQeqOUvl8n9TJ9lUOb6X+910J+jt0iv5sQUSois6fNAlvstIOr7RFAviEUp7LfaH9pPBnOgR5azpW+HNXZQ5Mjy64F2WOlpW+7NDPeQs5Rcs+bA7/s+KA1fLqLLoGhE+8YUDbbat6udefkCW/ZH6ftVZSoHP2OGcRuGo9zKVRbdoJhXr1qudfk9DtdaQusA/K+YF6s3sXpJc8Wx/fG98h9GwAgHdMYVD3SlnVmGvKJYDW/G2uL94DTstz9WR2Joh+SVudMDnpWXYJdfjMv+ULov56TeFGVHJUnzeEvS/Lf2snMOMJNhKA/2X7kfGXrjs6D8mUDU36zRvL39MfmyQ575dPJZjzBG8x9frh+0D1bmyijPUUamjUw7RExbuc7nxLRnlKKrS1qCkWl9O0amXY1ptx49y9GXPcF/+MiX0qbAfrvGf3j/V/GNr3+QV19/emDa8lxX3soVtzgy7Q9QRqaNTHsBMG18ThuZ1t/3c2DavPHEWDMJHfdcf5b9+fJ11pM7x/j1B17Fd32xRWJEuk5Tg/U3OzLtEMvIs5FnLzCeFSZc8exmVs498uxge58jz9oYeHz7KMkEjOued56BkMXSxADSG2L5rDwDOLI2p+umXKQqvXJZZ5YxR+Sa4tlVDUz9CfrLr0/XCN7jnOM//9zP8XM/89P803/6T4hdw+OPPsZ1J07wL//Fv6Cdz/iNd76Tv/JX/goAm9MpG9MJm5tH+It/+dvZ2ljne//W3+aHf/hHaFNkf7Hg73zP32E6nagySIl/9X/+Cx579BFOnjhOR+aNb/gCvuIP/SHCpOYXf/mXePsv/gKgeQy9dRw9foxjp07yG7/92/z2e9/Ld3zHd/DEhfO0KfKV3/gNAHzgPb/NJx/4JHVsmBhLVXuqqsJ5nSw5dn202nDjxoIWbIMoJUNpylRVX2iuLPoMQqbrOqxrwWr+1JiEtmmJXUcwpYCZZGLXkUuuaWsisetomoacM5O6pnLLoWlTVDhJwmhyUR10axGxpJhYzDUMNgnEDIumZW06oaorpnUgtXNSjMQUCc6XRQdiNSwPoI2RzbUJi8Wcvf0ZH/3YJ6hCwJeckd5XeOeJXSQlwZqSrsdYVXox0RYwJBvJPtI1TWlvhlLAVUymWTSIpxS0A2/UkwSExf6MxmZM5anWpzjnh8kOqmB70RyhXrODGjfktewXgTMWHMQYrwjnM8YQqgqcQ4snao5SrNUQZWs0X+mQMzNjjCfUmkqm7Tq6LmLaDslgjMN6S84ZY/SzQ5CgUa8x4zyIgZSwTjRU2bsC7Uwq9+htKeJnHVEuD0PswbMEpDEWkb6A41IBKFeK98cBZW91/PqNDmD7XKJitLAmYL2AZHAeSWr5NwLzWYsn4hHs+rp+rta8v5NTJ+l2dpDFglYSkrR9k2lAUkfX6P1MJSEpklpwcYo3ECyaM1eJfFDRG0AywVm8gW5/l7CxiTEecqbZ2wURUtsy295m3WacgSo4XMnja60hxqieRf36w2mRwZRIKWIdWCu6vTOCsbq+VTIxdf0WaRBpE3QCzrPbtkOe2uAsGSnhpyv3wcrmrpwjpTzMnUH3ivZ/zpGUVuaw0aKXiIYX+zG193OSkWkj0w4T02J5IMtSIu95bkyr3JXb4p45NoSRaSPTPu9MW3T68NYi2JFph0ZGpo1MeyEwbXxOG5n2uTDthusvcsepS5jU8Y9+8Vv4qd991dBvX/Py3+If/4l/zqJpiU8F/E7LpjhcW7zXR6YdWhl5NvLshcSzGGPp32qombU62zWN4MizFwrP5LqWeVvxySdupbYW56/OsxwT77/vuI6LVVP05SafnBNd15CajvjU2WuGZ1c1MPVqzFgtMPU1X/1V3HzzTQD8m+//Pj72kY9iMUzrGm8tkjUn53xvn3a+wNtekemk3drawgCL2Yxz586xsbFOmNZM19fYm+/T5QgCNiUtHuYCVVVTVxOqqkYy/PRP/RRPPPUU7ULzL548eRzvPEQt6hVCRT2ZAIZQ1Vw8f4Gf+6n/BMDm+oQ/++e+gV/7hZ9n1jQkLKEoGIOuP2eXuVCdcyQDUTKLrsGW/JJt20JU5dWvnS6rgrHWkrqEKYtYrZ0GHwIktZZa56gmNXtNS0odixxZdC3Wam2FtuuY+GWBtMp5Uk4k0bz5fR5LMCRj6VJHEzUX6qvf/MXces89fN//8c/YNJHkN9icbiCxwzgtQqZ9p7Mu4RCbNJej82wePUY1rzDogkIM/dzavrjNZFLhnSEnLXRnBMhQUqCqMgByFmLWwnJdjKTY4Sz0oY7ZZoL3w2KbVMVyK0InnXpwlP60TvO6pnLu3jsAI6SYaduOtouD8l61cmt+1cA/vO97+OSl1wxz+8/e+z7+0N2fwniPeKfKENS7y+jPFKUvxhJzRkzC5ojJumxS0nGIXYexJa+tc3Qxlva54m9hyrh7jNFUaiYLzlmMsaQshGBxwWJcmVDWIWjBPqHArRSKe6ZQW0FhZo3VfyXP5vKvBz9inQOxS6Uz/N0sUygAVl1htE+sFmPNMeqGyKgCxzhAMM5gJhOO3XIz++dqds+dpbOGmCMuGiZb62RnkHIdJCMxkmOGtqMSvd/5zg6bszneBggKVbIe2+ztapE6SexdOMcRA8Z5Uhb2zp5VxZgSNnWYrO13VcDmhOSkRShRWPWe/jmVInzFU0anV/neGWC1L1HwXSZVpaH13hoWIZCzFvpsU1eOF5AS5uvcwfOtiDEltJ7ei8kgJl9xvDFoTTLrdD5cTqRRnlFGpo1MO0xM649VXeqeM9Oc91yhQYzBWAc+jExjZNrnm2mm9F+TEt6OTDssMjJtZNoLgWnjc9rItM+Fac4YnDUrnXIZT8gEA3mxoM0WnzJ1GJl22GXk2cizFwbPPNl7/HRKWJ/iXBjONYgBY+3Is88jz/bPn+Elpx7Dm0gnFU9295Cd/QPj2SOPTfG5ogrd5/iMViLcruzmoT+vNZ5d1cDkrMWQcd5z8uQJ7rnnpdx5xx2ICD/5kz/J0089pZA3FmsMzhgshhQTKaUh0sQYg7OW4D0P3n8/OUZySsQYmdg11jc2aJpWI86AgNVQRREkQYqZ7YvbPPLwIzzwmQfYOnaUO267HYCTJ08w25/xxKOPqwU0a+GplLJaYbvEIw8/AsBL77qTW1/9SiZr63RxlxwTYMhZSv3Q4j3ThyIao8pVNM/qtJoSY2Jvb0/DT/MyRFS/apifRtoIYtTirxZkyPThpJkYhWpjymRzk7WtLR7/zP0E43HeECWRy7kMDKGlksvPZQZKzmSjneZqzTV87KYbuP1Vr+Cu19zLYraPEyEbMM7igCCORKd9BSQDXZewVgjOlWs4gq/KgsqDBXo+WyApsTat1brZK1MN1sOsrgrtgmEu5KyKPZPUI62umEymGiqMoa4CkhJZEiKGLAlbqdeK4WD4n3oxZWJMdDHp1y4SQlCI5IREpZO1umA+tfM6Hp/fMTTv1iPneeWpJ8BM9aVc8RYwrsDHWn1hZ/SriEJKYCiwmLMMC9CWonXWOVLub98oLNBNhGCw1qj110qx/uscC1jVO73iLdfVeVjOUXQuRnQTZpe/l967pFyLlfBUheQzKBZjAN2slCMHiAxWdusQSluchjB3KZV70/zCQ0FOkyEEwtYWdYp0qYNmQY4tgmArP/QFKCj1ewcx4sXhgdQsaHd3QMCtrWtfx0zXtPicKAjUwnT7e2AdMQvN7k6BfMbnqBtMEaxRxZ1FkJRwrrycLfoppwQ54UQ3Gka7mCGE1gi9u7fkROrilYpVNOQ2KgHpF68k7dz+BZ+1mk9YPUd6WPZzSMPXrTVDbj0NaV+J3uvnBGb5wtDoPB/l2WVk2si0Q8W0ss5zVgeK58q0Va+wpZSdaW9oGpnGyLTPM9OWfxqZdkhkZNrItBcC08bntJFpnxPTRLCSyZKu2A/lnIhtS7ABb8BkjZLSoucj0w6zjDwbefZC4VnOmeAck7paqXu3IsaUVKUjzz5fPEvSIpOzxLggJourwtAX8PvPs9m+xQs4E3/vz2gvEJ5d1cBUB49IYro24TWvfhW33nwz1193XVHACXLGO6dWOAzBOtbqml3tVbXQAVY0FJEU+Q8/9uM08zmbG5vUoWZjbZNTJ64jth1kwRrLdLoGUYiLyGJvxt72Lh//6Md55MGH2b60zdu+9G287nWvK52Y+djHPsb7fvM9SJdYzBdsX9ymbTrIBiOOrtWJGGMGsdxw4y1c5EmanR2qqiKnjoxQeU8VwjBGXddpeKMxHDt6lFMnTjGfzXl4f0HwFTF2xNgv+qXiq72HLHRNxDiD8x7rHZ3VIsqxa9ndn3HbS17Oy77g9bzsC7+Q//N7/o6OWFXRWaEjay5UIFhfJod6ZdTeIyK0TYNIwlWBja1NAKpjm2zceB1/9X/6bt71a7/Oo/d9htlTT7PuHLVx1BmMnwxKAWB/MVdlWFVcPL+NN+AJrNc1MXakku5rMVuQY6d/d5a+WJoPlY6vGGxZxc5ZvHPs7y3QEFv1JhERjLVsbGywtXV0sP47DLFriKlj4g0RQZwq35Sz5reMOp9SjKQu0cwbmqala/Xf0ePHkZxJMdI0TXlBop4fl+f1FtGwXkRw3mFKuK73NZF+naml35Sc1b6uiTmzN9OwYB8qfFCvhBB6gIDHKYhSwrilRT8lSrikRcpmJvVhvEaBV3Q3Yq0+UFmHF0H9ejQMNqPrxBdlnEXK3O4Xvin32CsJCrRWlYEc+NrPhmWNht5rA3D6o3WWLkXmbUtlPM5bvDdI1NOYovDTdErtTzI5ssHO00/S7u8hqYPgB0UK0C4arA84b5A24q2lFoPrWnaffJxqbZO1I8cAmM/nNIsFW5Ujth2ShOA9i0sXSr9Ad+mibqyA2pd0UQiuB2BKOr/W6wNzIcWo7cu5hDyXnokaeqtt7udex2I2X+k/laZZsL+/ByQ9V7lPb23ZvFAKVWq4rFhbeLAcF3G6PUgJUk6aS/jyPMorm1ANYz0Ymj3K1WVk2si0w8S0VJjWdZHFvBmZNjLtEDNNj/fW0Ns4R6Y9/zIybWTayLSRaS86puWIxETuOh2bFYltx2xnjxuvv4FqUmNsYNFF2nZB17WMTDu8MvJs5NkLhWexaXD1hFBNsH3Ux6pYiwlh5NnnmWcPPT2hne1B6jhy5Brh2eoz2mX7oGuVZ1c1MEnquOuuu7jhhuuZ7e7yM//pP+G9Kp1v/sZv4nd/932841d/RUNSJZNTJMcOK0LlLNNi3a68xxlL8IFv/sZvxAP/8cd+jNg2XLpwjjZ13HjqFPVkQk6J2fYubbPASOLk8WN0s31uv/1WXvbye7jvvk9x4vhxqrLR/Lmf/Vkee/RRjmxtYq1hfTLlxJGjTILHAkc2N/mKL/tyAHZ3LvJf3vEOds5fwoplrV5jf3ef6bTCe49BNOywDLa3juAB5ziyuUmwnnkUmtmcY0eOqtdEgZnQQU4gEHOmqiqm0wmdWVoWvVNLqg+eRhKves2rueHuu+DIJrYK5E7zdVZ1BX0BQAy+CohkUvG+6FNuWQs2G2Lbsb1oAGi6CPWUUAXe8of/MLfddDM//a/+DVvrmxgyi8WCi3s7at10lnD0KIvFjCwZC2zv71E7x9R7Fl0sIZ86uYLzmCzM9xfUlR8mWGXD4CVByVfqgmM6nXL+/FnquqKuAyIJ6x3ee9a2NqgmdcmFqpZ+UW2iSsWA2BI22RclpPfaUEurxWCdB5PUwp4o5nYZ2oFcVtytF6tWfjFmsJ/r79V6HqwlWocLHhM82RpiinSloJueOtOH0IoUpV2s8MYplKdr62o1zpn5YkFGLcka3Zkx1mgeXQFZVcRiAIvzAWsMKQk5FWVcXA5ijCs3ZAaYHlDoxSuMy8F2mRz464rOsH0MMqVP1aVClRyJLiWy9MrH4aqKOJngNtaoqsB67jAXPXG+TzJG11lpX9NE9TAKNUbAiOCAqbOk2JJnuyxS2RzFiMmZ6WSdeW6JKYF4PLV6cVgLdVXGAyZW8JKxIj3/Sj8IKXdY64dRl5yQGEmSwEh/i3Rtgy/5U50vobvBUQV/RX/mHBFJeO/p2gZE86hahCzqdXSgbIqIeiKZ1XOod4SI4KxDjJCNFhHsx7b3XLoydPXq4zuKysi0kWmHiWlDorvMyLSRaYeaaf1LPBmZdqhkZNrItJFpI9OGX75ImPbI2SlPXqjo2obzO5MD/eScpZ5UVN5jJJNiiyMhOerL05Fph1ZGno08eyHwjJxx1vJLT3wVP/OB7+J37v77gzHwChl59qLn2eozWs4H619dqzy7qoHp+LFjTOpKX5bmxIXz5+nD3h544H66tuH2227l9a99LXfceguSM//l13+dS+fP085n/Omv/VoANtbXcQZ+7e3/mUcffljzBS7mfMVX/CHWt7aIRvj0pz6BtRrmun3+Ine95A5ecfdLuPn667lw9jRNM2d3d4c777ydY0e3qIJOwJe+9CVMas8jjz7CbH+X6647wRd+weuZhMCRzQ3yomFvdxeAnUvbXDx3gTRvCCnjSu7FHFVN+ZJ/1K5Me2fKompaLs0uMtufUYXA2mRKqit8Cb/b3d8jd60qJqPF2qxz1FVQhZ2FbqFeB9ZYal+xv7vHYm+f9S4yqWoEi7cWZ4yG+Upv2RXNtamqTUNXc6bZn4N1aiGdqaJvz12keeoM9cnjTOs11qbrBFTRdCnSSWT9uuMYYwjTKXe+8Q285zd/i4tnz2NyosaQy+SsolPrcJlcdVCTcs5C1ylwTBYa0yBeQ4N7y6tkzdnaFxDLohPZeo8PQUM6JQ/lzDIZ4xzOoaG6vSLOgi0eCrZfFVGQ2BublvqnbVvICsT+YaUPXb7cM664ZYAtIbsDF0pYp9X7tKJ5M40xxJL/NYRQTqELzzkNpTUiBQKGPh7TFMWWCwRy8cYzg2LWTYB6WJheG6nSX1EMgnoMOOdw1gFmRZnorfTKvr8PjBmuc4Vn4Mo3eqnVz2nIJFyuQNSDIYQKV1danDJlcvE6AA3VjKXhAvjJFFftk7pW2+lDycMKuYnFy0EB3+d71bQHERMNUkIwvS4crCScUgFHxkrClOqLlS19gBAKQOkVvTUYLA6HFqxMSNmcOAPZGqwsgdD/GzwxbN8fyzyxq2KdxXuH845YnGz6/uz/63/OxZui9z7K5eBc/pZzxmJBlscP5+rHXIZZ/AzjNMpnk5FpI9MOFdOKnpWUR6aNTDvUTFuqkDwy7RDJyLSRaSPTRqatfurFwLQPP3YTD509wR975QdJcpAVptRAiSni0JfJEiMuVLiRaYdaRp6NPHuh8OwdT38977vw5Ty6ewv/7L1/nI+cvu3gZBdl2sizFx/PbjyyR2CByR0WYa+ZsLuY8EyIuFZ5dlUD0/WnTuGsZT6fYa2lWcyJKWIw/OZvvJsjW5vcdustvObVryJ4x9nTZ/ixH/kR9rd3eN2rX8O3/flvBWBjc5PFYsZP/Psf4APv+128Myzmc9721rdy4tRJLuxs89M//VN0XUuKiZ0LF/lTX/MnufPW25hUgd9416+zs3OJJ554lLe85YuZTity0mJ7r3nNvRw7tsnu3iXOnzvD8aNHufGmGzAS2Vpfo9nZ5anHHgNgPttjvjvDxo7eX9PhVNGLqGXdLgthpZRK0SuY7e0z250Tu8SkqllfWyMbqIuls+k6upTwVdDF5TXf4WQyAaP5XNuZttmIofIVp594ksnRoxw5dYraeUxl8MZicsKKhvj2A0xRPtZYnHPklGm6bXw9waYMpfjg/MnTbH/qfq57/WtJKZKbRG0cOSVSijSSuPmWG7DOsX70KF/0NV/Fpx9/lEuLOSnDfL6gi5kOmFQem3vPBQh+ijWaOzZKB6VIGTmTqprgPKF4eOSc6bqoyj4nYoqIqIeFrwIYowUKe7EUz1qLRNFifiKYLFhxmExfMgCJGrbaw6MP32wWi8FC20c7iQjpgMV9WBlQwlCzHljaraGO1jpS12FSRpwUr5EE1lHXGurYxUhMiaL2izIHrCGLetxh+7yrOn7q2CDY4iVhZKkUrTEanqot18+Ift97LWgxPYcxlkxazo/htno6CBpq2yt/ueI402t5luGj/Sf7X5heiecMov1SVTW2qogIXY5Dm3PW3LdZjPZbF7GhxlY1pl2Qc8KGgHMlx7fs6xiiRS5TVEiTM7a0ySa9fq9ETe5wRpQNSNmIRCRlfEm+bEBhQC6bFsE5VfN9guYsafA2sUZfpmloey6bNYNxtngCrPSdMUMhvVVx3uErj/Oedm4ZUKr5PQr3VYmbbJACf10baRhxKfNIx6so9xUPg96zYciNOswbyyjPLiPTRqYdJqb1/ZHTc2dazI6dxZQs/ZoXNqsFwaeRaSPTKMP1eWfa8mFiZNphkpFpI9OudaaNz2kj0z4Xpu0vKn7rgZfwq5+4hzff9WliPvgqS50mPF3bQbBY48ldR7VuRqYdchl5NvLshcGzxE8+/u08MX8JAN/7zj93YJ6v1S3Tuisv90eevdh4duN1e2xV+zjRqMnHLpxgv107kN5w2W/XJs+uamA6emSTtm3pOl0EMadi0cw88MADOGuoQ+AD7/1dJEZi17K/vcPUB9599gwfes9vARCswRvDyRPH2Fpfo/aeY2tr/OgP/SBJEvuLffb395eNz8LP/NRPUIdAcBZvDZNgme1e4pfe/vNa3LG8wO26jj606z//9E9RTyZU9YS27ehmDd5Y3PFjgOYgDSQqZ3A5a2icBesM3lm8deS0nPTBV+S2oe1aZnv7ZW9rVQkkLYgXi6K3zhKqCmO1+KD3vig0yt+NhneW/qPNfOYTn+DiubNceuQRdp5+miNbR5isrdPEROXDkCeUMvipeANUVbGWhkDtPK722C0998Mf+zhPPvAQXy+Zhx96iCcefxwWLZ1AmxMLyVRHtrDOETY3oPJ80dveyhe99S3ccdc9/E9/7f9Bs7PD0bV1Fucbjq1NWSveEl1UbeuMoY0J5wzeqQVYc3FmjC1WoC4WFZHpugaRSKgqXPGwKHZ1tWAbwBiyVaNyiVMt+U8jRjI5LXNCphRJXUfKmRjVGu1cGPKfIhBCAbaUEMHLRJCy6BWcWjzW4OoaU9WIDzSzhXoK5IyTjPcB45Z5VHNSmE0mwxmJxXMiZY2cDW07WPKNtVjrNaySFWs+RpWfWWa0NM4PizeLKlzvOaBAlmGksBo225+zn3yqJJbpJwZFiwwQl/7oAgpzQPWDkAfvjBACMUMyBpzDllQ7JidE1BMAIq0krAG3NmFiM3F3V+8lS7GQq+dA7CKYFsmadnVaBWLOWOeoam2rsQYhs9jdxzqn4fHGII3C3QhITmqhF6HJSYtFIuTU4awnx0jXtcSc1GOo9JWzCtSYEl3X6tzxHu88KUdVtKV4Y04rXi4rYi34OlCdOE63mJFLQcaua7HOFY+kUmAQ9VC6Ih9veemXYyzhv2YYEzso8pUNTDoYRjvKs8vItJFph4lpuWw2c0rPmWmf2n4d3/Ohf0WbNRXI1Ld8/5/+PrY22qK/R6b1bRmZ9vljmnVl7LJgw8i0wyIj00amXetMG5/TRqY9V6Y1HXzXD34DZ/c2SNnwbf/2LxHzwRddIkLMQiLhbUZsmfdmZNphl5FnI89eqDxble/5hnfyujuexjk/8uxFyLNPPXkjJ9cucffxR2jaGV3cRCTjnV/pz76fr02eXdXA5ERwGLJxiNGibpryCVxbEduGdrZPMBZihBSZeouXhM0dVkohui4TJTOZTsmxIUvEG0s730ckY7rImg/DZDMYHAaTBe8tJkVyzAgZTbuaSAUKXdNgRCevc4FJXbNmLa017JNI7YKd8xrG6awhOD13cBZvHGIy1grG6WQKrsL2i0kgdom2aRHDEGJpnWfeNur1WY713hOytlH1S6brWoxfWhVDUfQpJ1K2TJ2l293j9MOPMDGOkMGkTF2UfK/os0G9UI1CJLVaBGy9muKMw1tD8KqMZ4sFbbvPb/7S25nPZuzvz2hme2RZo5XMPLZ8+jMPYKxh88hRXvnAw1QZwtoGYWuLzeuvZ/PoMU5uHeXJz3yGdSwTq+3usipm5xwGR86ZVlRhi/Va6K5Rj4Y6eLyD1LUYcRjvWJtO8T5oHkrr8L7vP6GLHalLqnyshnDGmNmfz5i6CknL4o2pWOyNMXRNCxgmVUWTMzklkKyKAMC5Yk0/OLf7cZSS49sYp5ZcaxADYgy2qsFX4DxJTCnE6PrIXUzx6pDSJt0MZaQvkmf9UNhPjOCcLD0JvNNw22LpjzkX676e3Igr/DOQ+82H3oeQQQzGHPQ66L0IVu916aR8paW5V4rG2DIMQtO2OOuG+gwDjgz6xqmYu2PqkCwY43DlgmIMxlUQEs54gs0YEh0RTGY6rUn7s5LPl5Ln1WJyxqSEE6P9YRmKS1I2UiXNcMkLWkJEc1fArt4jrt80iLCYz3HTia7lmDFOYWBFQ1ZNNhqyChinln3JkJOQjLbblXQMIsuONGJwZgnYXpwUT5eYdFOSeijWxPICz1qtfWGtXlskD+0fuhgGj5Pei0THseQL7nOEgM6vyyf2KFeVkWkj0w4T07qi38rzyXNjmvG0eaXmgIEqRLxnZBoj0+D3h2lSHrRDqLEmjkw7JDIybWTaNc+08TltaOvlMjJtybT7ztzAT77nbi7M1kjFqNSmK19jGaMv7o0LpX6XRoo4Y0amHXIZeTby7IXKs1UJXggeBDvy7EXIM3EacZaTkDshdZkU9RltuP++z65Rnl3VwGRFNCy55BHUfIJ6URc8bdfQRbXkl8A8gneYGHFGCMWEm6VY20hI6jTvpQ9IjHouoVjtlo22Ral560ix1XC0nLDBDVbTfgb0neqtpzaWNe/xOdMYLay1mM0AqIKnnk6wGJzVvKcp68QyVr967zTXqEgJmdRBN6UwsnUW5x1tCXf1JS+mtQ7n7DJvIRpWlnOiDxe0vlhok/Zr5RzStuxeuEAwVnML54wLQS2dVi2h/YI3qEUxti0kofKVKl5jCb5YLlNHbBc8+KmP431AxKiyDYEoQhsTZ86cBWNYLFpm585jrdUQ0xCYbm7h14Ujx07w8KfvJ2FUcQEpR3IWxOqETyKIqDXTieYCTUULOmfI2ZFj0qJ5QAge65yGaFq1QPdWduma4qkiOOOH32s/W8gMY55XFkaOSfNTOk9nHZIzIgx9bkQGZXtwcvfpYlDYmN66r8pSAFM8B6QoOGv7AOeijIuFV1DlnUSVPaLzyTqdS/299B4o/VLtNzYYQ5aMETssXEMp+tYfuCpls7W6xvV7059x9eBhXV2ZL/Pg7wUpxSMNjlUvhaJ1CkhEzPI++/+MwswYB9bjrcU7gbqiazwET5hUxPmCJLrurXNYsSCCybnce1HwVkPEe6r2aX16xWhElkWBSzLcfo0kgdhFpBbos9UJRdEDWNVXA8NUe0kBRs6qVJ2xmgd3ZQx0LT4TNA1WBGLSXL1FJznndU2UOWuNuWIcLh8Va/oMyDKAehiHlZ9t0WHLMRrl2WRk2si0w8S0/tz9wn42pj09v4Wn57cNc2qzmnHL1nms0TRoI9NGpmkvPHem7TVrPLp9fEi5GGzHPSceZ7OaX8E0gzKt99KEkWnPt4xMG5l2LTNtfE4rdzQy7VmZdnpnk3d88uV8dhHuOXWa245dHFI69fPEWqtpqEamHWoZeTby7AXLsxV58sIW1x+dccOx/ZFnzzPPrDGsr2nyvy45Fq0d2vb7xbO1KjIJqkdyokTL5SHq6GCPmWuSZ1c1MI0yyiijjDLKKKOM8vzLv/zU3+C95942/Pylt32C7/rCX1CvPOrnr2GjXLPynideyf/+a3+U/pHjps3zvOMv/HW2jm4B689r20YZZZRRRhnlucqPfdv3cc+Rz7B/6fluySijjDLKM8v/9jNfxBe/7An+/re+8/luyote1uqOL7zrEgk4vb3GJx87+ft+zZffdJ4Nv8ti9/f9Us+bXNXANJlMoG3JsWXe5WI51r/V0wneWVJVMdufEUQIojkX16c1kyrg+owuVsMzd3d3SHWN1DW1D2oZzhlJkVzqe6oFXnDO0wF7O9tsTALBGoIFV/KBxt6CjRYucwK5adm/uE27P9MiW7mjDo6tY1vaDjJWErlp6VJSK7lkqklATGbRzAksPQlm2/vErtMCeNYO+U8TQsISU8aVwqRt09DFli51bGxsqIVZBElR+w0Zwicla/GvjUmNyag1MDt8FXAhYLwD6YtuQagCMSVSSqS2o/IBHxwuG7z3ZISu9EdwlrU6UMkEFwJRDMkabrzjDm6856Vc/7J7+D/+wf9Cs1iws32R//CDP8BkY4P1o8c4ecedfN23fBNrW8cwoiGIO088wd7ONgBTgShCTB0T74kpE1PEO4uNEXFCKPfYdR27qWWyNmG6NqWeTkglZ6RxTttd5gsCpUwd1lgm9YSYIs45qqpivmjUQ6oYjmOMWtQtJVwpjpij9o8Ui/IQ5psSi8VcvVlWRIpHr1rzlxZ46zz4AKGi9hVYp/lPrSd2HTEl2jaWNTBlY229FIsLxJRZ7O8z3ZjifKXnGqzGxdOr91QQltZ4yRhnIaViyQeTBGszTgwpJ4xxxYJd8qWKkPtFg1mJqpFn8Bi4UnrL82o+VWMsdV0dOK5tW3JOOFeKPGahjVpE0VmDt64vx6fOhT4gREQ6utjRtQtmzZzUtqxtbmIn9dAnIhGbgASpbdQhAEjG4kOFcW6lfQKiIebO2sGLQ4tkavh51ywAzRHazGdsrE+xNlB5p/lXYyJ2kbqqsJah3UjCiOB9pWmBSk5eSVqQT7LmJwXIMT+jV0q7WDDb3kH29sld0og7hCa2OrfLPeecdQaIep4gMniPDB4mcrAw4pAj2pgriuqlmAfv9FGeXUamjUw7TExrOk11l0te72dj2ne+4p/yRed/i3/+if8RgN967OU8vnuC/+WP/iRORqbByDQ99XNnmqQ0zJXS9Xjv6ZqWGNOSaeVemkWLrUamHRYZmTYy7VpmGozPaZ9NRqYtmfa/vf3NfPiR6w/c94n1PX7iv/lXeCKkTI4dd2w+QY4RI/quwAaHdx7n8si0a0BGno08e6Hy7KAsmTPy7Pnl2cVW+I2PTHC+IhEYFMPngWcnpzscO3aeB87dCsYO7f70k8cweYPcniTHli55vHX6PHZZZJCy6Nrj2VUNTF1OREma57H8kxJfd+8rXwk5MdvZ4dMf/hg3XX8961XFkw8/iCmK0ZbFdurUdRw7usXepR2ObW2xPplS+8BTjz9ObBuMMaQUh+saLCeOH2cyqXHGsHvxHMROixIHN3S4ftMXrbIghhgTSTLWWTS1osWXPKFGkm5gzPIVsS2fFxFijHQuDoo+5gRG89MbZzUnqdFQVYwOwLKDRcNGS/hYPwltKUyZV+aLMRqOVnmv4XPGYqSEcZZwwJxSXwcNH0JRD1rczjunOfMNuCpgJJOiHuy9Q7KH6LDBI2LwtXD05HFO3XIzN999FxvrU5wBi3DuqcdZX99gtrPNbLbPl7ztbRy57iTZT3jZa1/LAyKcKw1ZbO+QUsZaiyk5HwFOXneK1LWkGMmxK72hfVOFgKtqfF3r56zm3RQyXYxowTcp51UF7awj51KAOmtOWfIytLAPCdVaTEPAZwmXLA8gVr9eak7wvjOvYh43AHAm8cW3P8BNRy4tlbAxYC0GSCUM0qDhphSlYoyh67pBEffXc85p6Ggfrlhy2PZFBS8XYwylA7SfStipLddcKsGy2MvE6cOVe5GV/5uV/3/uYhhiOTFF8S8na86ZlDT9kkZ9lqsJGE0aW8ZASii109zJWfsqpUwXE7HrVIdQivQd6BOGSFgRIadItraE6/apF3QFuFCV0Hmja7DkkNU+y8PXnBX6FCAguYSipuF2pdxnLgrdWQdO15UxggbMolCNcfj+mcJNQdsyhLg6vZeu6zSbRAlXzrkvMrns/9UziV6iHN/3SSq6tw/HLZ8rIc5j2oXnLiPTRqYdJqbF1A3a97kwbbs7zsX2xNDvE99wcnKpGBDMyLThcyPTnp1piQ88+RLuO3Pj0O5Xn3qYL7ntk1qDAHOAaX1x2iFF8ci0QyEj00amXctMg/E57dnlxcu03Sbw/odv5iOPXs8TF7eG9tx749O85e4H+eI7H8ZJJMdEt5gDkLKUtF4d2elrZO811dTItMMtI89Gnl3rPJMEqdT8+WzyujvP8Oo7zqsRceQZPI88yxku7Vp8sEzqyLH1BRf21sjyX8+zGGG+WPa9c4lja3NyasEIdmIgwqw1pM7qM9pl86Y3bF1rPLuqgWl3PiPlRCyW29Wm/Ymv/mqsCKefeJLH73uAN77xjVx37Bg//kOPkkTIRgcF4LWv/0Le/EVv4onHHuPmG65nbTKlmc35iR//MdqLHZV3xBSHG6mqinvuuYebbrqJ40eP8Cu/9J/ZPneOpmmpwhRjljkjcyk8hliMc+TSucE4hEzGLvNzGoNzHl+UuTO9Ys2krBOyMd1wbue1gJwBXPAktEhaNZmQcg+X0pHOF+XiUXQYtb56WwZP0Hyw6o0hAlIUvcECWnhOyh+7LpaJqopec4RaJvV0eImAtbhJjSFhG1WM3gawQtvNEWcRo/kmj5w8ztrRLcy04vbbbqZdLEhdx/Zjj1GlBbIXOb+/y+yxh5nWNdX1N/Pat7yF2DSD8nxk55MsYktVB2Izx0hiUgfuvfdVnHnqCS5eOM+li/vaNGPUc8JXmFBhQ4XzHmtATKZLDYumGyZpSonJZIL3lW4UMEjKtIuGtmn1xX6/JLLmiBURYko4p32h3hvFOlsU/eOLe/hHn/7nw9ytfeRvfvnbWdsU8IGMKfDRIntd12luS+8Q4zC2vLDJmbZTiHmv81oVU8L5QEI3AZPJBFuK7DnnFQL0w9Xn9SyLWWeD5m81BuutvhAqc1gVP1gUGtaqh0kPgWUOUyjZW68wfDwXr4KlNls5pyzvO+c8sMCUNdRXv5MBttof3jlysqSkmy5ExzZ2Hd1iQdupB4iI5u0N4jBWvVa8cYhAbBsF/UrfGaPru/ZayFDbkwZPFDFm+Rw63JbmYMYaSNrWFBNZEhJ1PQKk2Oomw1f6Mq783mAxRl/J9frPiHo5XS6ueMfEnIfCl5TPmZJDGTSfrxH1fOgflpe9v4QC2DK/cgGmKvtgl+fWIon6IDV6xj03GZk2Mu0wMa3rIv3Kfy5M+/EHv43fOfOlw7x9/fWf4Tte/3NMqi2Mr8C7kWnDGfTryLTPzrR/+94v46GLS6/wb3jVu/muN/2C1jSwHoGBafpAo22x1o5MOyQyMm1k2rXMNBif00amPTPTYgePnd3i7/6nt658Qggu801v/CB/66t+FYlgskVIdG2r5xbBe08zm5NcxDmPC35k2jUgI89Gnl3rPMuWEunzTC/hBW8z3/ZHPsZr7ziLFCPSyLPDwbP1as5LTp7n/PbNpBT+q3l2fnfC4/NjrK1njAhTt+Cl1z1B7BqcMUyqGmLDw+eO8vD5o6TskWUuCryNBH9t8uyqBibrHTnpRExdx7d8y7dw2223ISK8+13vYvfSJbrZjDtvu53Z/j5Ptw033nIr+7s73HzzTbzlS95c+jvx0Y9/gle84hV88MMf4uKFixzd3GLr+EmcC+xdusj23j5ZBO88Jze22G8anj53lseffJzHn3gKJ4mt9fUS6mVWipL1XzWULpXCW3pdIXUd87l6tQRnsZWjS4lJXeFDRWx0UqVieZzNZ0NnH988xmJ/Rtd1uKDF9HyomEymdDHTdZGuWLyNEbyxeO8HZY1o8TFbTKWpK96jV4SWadihMQ4xhqZtsd5pXhMRZnt7hOCpqqqE+Kp3R5MjtmvwzlLXWn9BgicFz3y+T5szbUo0TeL4seNsbazjJfO1f/KrkRTZvXSJn//hH+TY1hrGefabxH/5hZ/n+ts/yS0vfQX3fNGbec0XfxH3fsHrAfh3/+Sf8vRDD9HO56x5R+0twXkWsWPeNrRdq4sKMNZhvMOGQDKGNmVqm4mxKxZfw+58rhMfgw8VdT3BWkvqIu2iY39vxsWLF9lc3yjeFmUJi9GCaDENll1X+mCxyHRtS9M0gD6IXC66jmxpa1EGlJBZAUkRiRFCKZZnLSkL3ofiSaLLJmdYLFqqiSdbA8ZS1VPmbYsHJt5fNs6mQEw98HovExGjELRXKmUtbOmAUpSQYo3uLeTLA/lcvAkOAkDn51BUT/0WAKiqqtxD8dexjuA9yWgIrT5g5OU5nUWcw2SH9R7JAecqkm2ZL1rEWCgGmmg6smiopRGj88VavLW0sSPlTCyeFM56vPfMy/j310spYY1VDxuvkLTGsDadqudDF0kxYp0WC/WhrM+c6HMuphh1PriMpKTQ7I8RMFkGj45+m/FM4ySiSr3rlhsY59ywJkC9LT5b8UOLIZOLx0Esm4Gl94IxBr9i3FrOh9E77rnKyLSRaYeJaSFVgMF5hw/hWZl2+YbOWkcIQXXJyLQD7RqZ9mxMy/zdP/yD/NpDr+HfvP+PA/APf+Pr+ZlPvZl3/KW/g9Mnu4FpsXj76rimfpBGpj3PMjJtZNq1zDQYn9M+m7zYmfZ9v/4mfu0Tdx3oE28zv/M//BPuvu5cKW6e9IV5Fki9N7lGBPRpjRAh5/JCdGTaoZaRZyPPrnWe5ZSK3r9S1998bJt//M0/zdGTgd5YOPLs8PCsWczY29thNjtGlvrz/oyWcyLnSGxbXWshqPErJS7uTvnOH/t6Ls2nfQfzk9/893ndDQ/CNcizqxqYQlWRW8HlzObGBk89+QSL+QwR4TP33cf+7i7SRe666VZ2dnawCPV0wny+z3R9nZtvvhmAp596iu3tbabTNS5e2ubRxx7n/GSNL37DG7h4/iyf+vjH+LI/8pU4Z7HWsbm1xWOPPca5xx/DW8P61hZ0LVkSzpQQrv6FrwhICV0TwRTbnxFwpaMkakfHFJlHQWKHsx5rHKvhpiEEutgiota5yXRKM59rzZ+UcN4NFvLowIrDl8GyIlgD3lliSoNVMJcQzNW0WtJ7DKzYgLMIkjuyCF1MeFNhi0L0VcAaDYWTlJCcASF4r4YWIBQLdBIgZw2ZjVnbJcL0yBbV+jpihAc+cx+xbWkXcybrNcdPHiUn2H7yNBeefor5rOHimfN0TcP119/A5sYmAC+75y6Obayxf3EHmyKL3W0u7O9x/0MP0c326JqWroTMEhxVqDl28jqEjGT1jHDGkUUfKIL3OgFFxyZ2HZ3zOGtomoaubZEs1FWtoX9lLuckJBLWO7XimpIT02owZc6ZZlEeXIpH06pIr9FzJlurSr0oSj+p1ePXWoVVGbOUMiFU+tTTr09jMaaMtxhwDmctIQTNgyqmKELKZyyoz4H+yhgNi4YS3uzoc5qa0p5+IyM5a1gi2lSzMp+GzU6fQ7D/jKxapp9N1BJ/+WecW/ZBFl1bakjplSpkKW02ZYiMej87X0H0VGECdcKFgKsqtc0LBOuJswVx3mjwphGMLWsoJ7BLmGmIpm7c+s1BCJXCp/wj6/o3omvDgHrBxQ6nOevw3uGsgkwGg6VuZnOKFK2tekNE39dJyVdcjj0YXEqZH0nD7YtVv/f0sMbp2hbBebccd1uKYpR+A80HbDJlDuSlZ4S1WNGNa98XvTeJZHlWJT/KUkamjUw7TEyrYtFhVh9Un41pz+QxZMuGk5xHph2QkWlXY5q1lt949F4++vSdQ4+9+ZZP88fu+mBJ71Fe7q4wTftH08KMTDscMjJtZNq1zDQYn9NgZNoq07LU/Ohvv4qPPHYDO4vJcPevuOFpvvELP8Cdx8+yEZqh/6S84PclPZTkDAm8DxoxgiF2EetkZNohl5FnI8+udZ6RNfpjeGlZ5Gte9VHeds/9HFvbx8gasVMjzsizw8Ozpqs5vX0Sb0vk9Of5Gc0YjcbudUDqOih1vLLAhdkaKS/THJ7amHFsuoBkrzmeXdXA5L2nix3OOaZrEx584AFtoAiPPfoozXyOE3jJDTdz6dIljAinTp1kd3ebejrh5MnrADj99Gl2dnbwoWJ/NufcuXNcxHHLn/kzVHXgvs98mje8+c1UVYW1lo2tTT7173+Qx598iq31NY4fO0bc36fd2cY4i5G0XLa55K0sk9RAyfPZb3bAFEWecqJLLc6Atx3OOCwyhAWGKtDO21IETQi1LuyYiqIvL411gpiyCHSwXAGMt6h1VEqsQ0o6oMYu9QPLBdUDQRBijqSYiTljk9UHfMA73WClmEi25A81Fu88MUadPMWKSspITARjyUZIZZDDZIKrPDFFPvGxj9LO5xiEelpx4oZTdG3Hw489zmJnm91Lu5x57Em6vT2aV76S62/UHP1333Un199wPU89fZZuNuPJB+/n9KMXuP+Rh9msKipjSEnvxwWD8YEjR4/TNDOaxT6kiLMOK9DljrquMVYn7e7uLl2jC7KqPc2ioesiznoqX9EXLwSIJCSLhm+6HnAZX8ZCcqJLBe7dMsT6gEiGnBBjlvmeAe/XEefV4t2nZNDpUEKGoSv5Xo01GKdeBjmrovVWvSsGqPdJcAUwZXGii7m3DlO+15/tyhxZ4imLDDlJ6f82zCEGbxV9wfR73cj24Z5LMNihD4QYtdikkb792kAZPAnKfLYW4zxGAsYFQlVjJGGDY1IHnNU+raqafblEs5iTs0U0wSrOGVxUJWmL5VzKs2ZKmRQ7MJYq1KpbyzqXVPSCCM6V3OA5k3KCqJs37zSXcY4yRCXpOGUtDJtLbtXi5UHZkA1eG8Xr4XLJ/YZiZdCkvMTTcF4AizqclFypK3MaFO7GaN8uoY1uSA1gVza35ev40PK5yci0kWmHiWk+aOi57RnwLEyTywxMqi805YEZmfYMMjLtszHNWcsvfOaNPHzx1PCZr7jzw3znF/4CTewQ0RoAw6BJ36MWETMy7ZDIyLSRadcy02B8Tvvc5IXPtCiBH3/Pa2ji8hXVifU9vvjOB/mbf/RXMFlfrvbe3GpgYkj1JGLICL6kjUIMKXalOPvItMMsI89Gnl3rPENyMRocnNt/9JWf5M9/0Xv53afuKO+L+jk/8uwPkmcLc4Ec98hduIJnbZxwbm8D75f3a02nddZwz8qz4CKGTLYNOVaYy57RUvac3jnCmsxpk+HC3hom1ezO62fsHWe19tm1yLOrGpiaxQJvHb5ytIuGaVUTykZiWtdU1uGAhx96gNg03HH7bXz3d383f+d7/mesMRzZ3AJg59I2n/7UfVy8cAFrLfVkQjtr+OVf/RXms33OnjvP//mvvg+AI0eP8E3f8i00KWmIYxaSGIx1eB+wSJlQZXJZtaRajBauc5p30JVOLnb60s06catKBz0lVZJkDflq25a2U+u1MYZLO9s0rebhjE2LDRbbWZpFQLxXq3HJcWhy1hc8/YAJqHND6i9PvwQ19DdCFmyxKtZ1gNYgJCYYYk7FAg5d0yA5kVMimA1dfFaQrsMBtmzQyujjjKEqaXOsMeRsuPTJT+D3tlk7cYwqdfQpB7puwe1vegPOec6cv8CZ0xfwJjDxU07f/2keuu9jtCUc9y//t/9PXvkFr+EVx05Cznzg197BB3/tHeyeOcPaZIqtKkJZI845YoYz589TOx2JZtEQ6inee6bTKR364l1BbZjNZ7SLhuPHjtEuFpgsHNs6ckUoXu/5Udc1Ozu7Q+im9zrGzjrqstGsVurh9GIMlGpxXK4UrbPgHXivOX1zRqwjVAr9ruvY29NwZl9VhKrGhlrzg1u1LFvrB/OwGRazOdB2692B6+o9Jno7BgbULi6ajzorNELlh9DbA4u8KEJjltd67tKfx1zx2cEibh3WClrILuO0ExEMqa8KKX1GC81XKlZDla0PmOj1gErrhADYzU1MbJHti+SckBzLmGgrnLOEiU6oFBMxJpwBsvb1pAp0XYvkROzTAFHAkyOxKzmQy+a0f0D1wel9pD7MXC+bUyppgTRaTotuRoV1AXZOSc97mX41pT9yhspXg3dH27YYZ4e6TdYabKnXlNFNQ59nuvcUAV0/VyhyWdkYDuNiGeW5y8i0kWmHiWl9Udl+PT8b0y5f77Z4rpnefXJk2vLD/UVHpj0j0ySVJ9jVMUtZC+NKHorI9kzrGRZjZBIYmXZIZGTayLRrmWkwPqc9N3nxMC2nKw2OP/gX/zVvvuNBchJlmljItkQndPpSvX9Oi1qPKaeMzYJGG4xMuxZk5NnIs2udZzklreF0GbceOn+cDz9xsxo1VmTk2R8sz64/coa12eM88Klbn5VnzjS84c6n+MjDR7kw23hWnt194gxH1xdIFt730M00nT/wjPaJx4/zR/7Rt/P2v/ZPeOrSUf7SD/5FAP7vX/Re/vgrPnVlN+V8zfLsqgamSV3pAkuJLkVy7Eh6NXLsANH8o02kChXBB86cOUM7X/DUY0/wq7/4ywCcfvwJjq1v4oEvf+tbeMub3kTqEh/7yEe4NFuQkxAmGnaWuszv/M57eeMXvpH1tSlI4r4Pf5j9c4nWOmyKWvytt+BbMxSsS0kLQ+YSMeCtKhBflLHG2elDtMmAScyblo31NZy1xK6jnizDsWezfargmWxt0rZtCSXU8EOL12Uo/YvnCKmEiBpbisrBomlUqRtWjk3EriW4gCuDbn3ABk+dhPm8oa4rzEpuzEXTsZjPCc5RVWUi5YwtZtY+JyuitWB6j4rKANby5Gc+zfknn8BNajbwZK8KKHpHwOFDzbGbbuTpS7vkZLBW2FqrqKLQJW1Hc+E08zNHWHMGrrueozfewA133smHT59mNyaMz2xWWoguo+Go58+eZRK0SJq3hpw1pBczDAcCBO/KpG/Z3dklRc1xWVXVgZo25RaHBdGWooTBe6yt8N6Rg6Oylh998Nt53/m3Dp974y0P8ade+SGqoDlUxRisNeDUIwDQWF8BjFl6g4gUYGsY43CsUS8pUTcCTM40bSTUy/DO3sO8n4A9vEGVu7FuKNGzGlK6ih/1TAFVJHk4bpkuSb24rLWXffJypf/ZAXBlWGuvUPR7Y1TxihhEclkH6jHDymdSzuphUcJ4rfU460nW08QFNQZn3dASHzz1dELe2SOnqOeMqlARMCWnNilhcsK6oA+jRcHlWLz6haEfVWdlBG2ncrGYfETU26bkPIXiiWBkSCXU/95In2YolUKPuknL+fJ+ZljHXcpDvuZebP+TAGI076uxunnLy0KmZYhVp5biejr3NGxVN7UMXib9Bmf0kHvuMjJtZNphYlrXdfooKkK/zbsa06w5qMONtepFWObFyLSVI0emXZVpqwW0e7GupNowaC5zYwam5d4r0piRaYdIRqaNTLuWmTY+p41Me0amXSa1FypbWGaMskaSvuiOiRQTTaNe/pISubTJOS18PzLt2pCRZyPPrmWeATy0+3L+r4e/gwudRmFVruO///Jf5t5bn0YYeTYc+TzxbHdxhO1LtxAlE56FZ1Hg/qevo4kVoa+DdRWePXxmgyqsYY0h5dLP5Rnt5z74St796VvpkuV7f/5rWXSBWNLh/ep9L+NjT91Y3i8upesiXRevSZ5d1cCkHpFZLe45c/TIEdbW1hAR7n/wfnJRuhtraxiEFCOfvu8+brjuOua7e3z6E5/QMeq64nlgCZMpUtWqcLuIpEQdKm675Vasc+ScefKJJ3jpS+7i+LFj5NSVPJFGC1yly6auriRMseKJUftmXum41QWkoWP6V8k6KTTs1IJ4vF2x8MZMXdcE64gxYm0JyTa6b1rt4BQT5KxKvg8nNCApk3IeLIB9vyKiHg/e40PAhaIgsxbx29zcKNZJMN5z6cIFmvkMSRlyxpishhYRDXkrs9y6/l4cFs2FKjYxu3SJ+e4uxjk2irk/S2a3zUgT8eueU3fdxX0PPQKLiLGGjfUpoYOY9H4unn4SMVBdusSGCL4O3HTHHTz68CPE/X322461UvSvz8HaNi1ES/aO9emU1C9cI8WKr/2i+WIhirC/t4dILnkrHW1uVKGsjKJO9BLZJOoJYp3RPnW6tD65+wV8fOdNw6du2rzEW267H3Uz0XuyOkGGSZVzLqHNCkzJglgh5owzGgoZyliZEqpsrC3KzgwZap7L4tPQalWUpoSEDiGuIjqXjSleItpO6UFuVlR6sbipJ4EZ9G6vpK+m4Pv+LJ+44lhjGPq633iY8j8dvWX6OJFMSpmMjr3tfXxKjtcuxtImtflL7LBAVQX1KpFEFvXo0X2LIfdKsKSHElMK1mGQVDy2i0dOHxYssgLf0l/WF7hI6c+VsSk9Tk65hNHq2Llyzj79AsClxToPXzhBKmHktWt57Q0Pc3Q6H8YvdnE4vlfEJgu44gGyOn9Xo/MuH3+RAlNtbw+Lg7OqH4XxweW5yMi0kWmHiWn9plH7UJ6Vac+kyvvx6xX/yDRW/j4y7WpMu3w4dRqV9Vb6oGdaPvAyYWTaYZGRaSPTrmWmjc9pMDLtINMud6QBSiCb6Ms9Z/VFl+RlnZgVBvbzbvnCdWTatSIjz0aeXcs8A9iJJ3nP9tcMn/M285aX3M/aWkKwI8/0TvoTXXHs7z/PLC6sIbJ7gGcT12BtRY7a15ISKQtnt6eIqGHw2Xh2aVaXVJlgvfZLSo5PPH6C9z54Cx969BYAfuOBew7c8yMXjvPIhePPMF7XLs+uamDa2dkhdarkg7G89Uu+hNtvv52UMx/4wAe4uLuH8ZFXvfYLeeKxR7lw5jQ//kM/xPf+7b/NxTNn+Pff9/0A1N6zduwY6z7wcz//szzwwP0arlhPCD5w86lTfOs3fTN1XXPp0iV+8Ed+mF//1V/FWcP6tMalhO2iFnCMq4XqNOUHpeNs78lrh1WLkAfrrYYxGlxVaTEzQfOvlsW6eWQdKPkpRVMPrvsKK7C7u6s5DK3FFwtiKkX4AGLsMBn8ZDpYmvuBzKWAl3clryQGEwKTui7hjhWTyVpR8h1dm7jxxhtZX18H4Pq77+aRT9/HfR/+EMFowT8jGSPQNg3WWaqpekBUvqKeVEBF28xpFy25nROc5hINoWKyOcUYw6JtOP3IOeZnz3Pshht4zdf+ad7/wQ/TnbuAxXF8c4vFvKLrtADrRz/0QS7Of4undvd58x/+Y7zm9W/gtW98I9edup5f+U8/zZlHHmZrYw2AYAxrIVDbQGwbmrZjUtXMm7mOgXflbUZRUKIFOXOCs2fPcOTIJsZ4ckpljFcns1qx9+dzfFBPikzGOYP3ltRZ2mcqiC4COYH1vfbCOEhFkaryiZgYMV1EcKScSCbTpYTLEKoJW1sagh1TIolQTdaIonlnMR5bLMExxgNFFk3vySKiHseSS35Ohw0eScsXPn1+3t4TwRiLYEgpDR4t/SY8fxaorG5wLpdVz4HlcSsbItNb0dWSHWPE+WU+3yxSPC2M/t4YLUoZI4mINzAxliwMXgWLplEPJK+F79rdbWzsmE5qumItJ2dme3tY6wk5E/uQ46yW866dq/XeWtoukroO7zyV98wX8+W9JVXu6oESmVhNU2ed1ZDYXnECOSdyF0lthwsBybppw2qouzMGyqbrg6fv5Z/8zp8c+unGzYv84p//nhL+qvphb39vmHtra2sDGCZuopsCEZqmIWfNa9xv6PoNxypIUkrqVaFD0S+Z5X0aGTauozy7jEwbmXaYmDbo3vKZkWkj0/6gmCbOs+xOGca/7TqMeCz6ANMzrWmacm7tu5Fph0NGpo1MG5k2Mu35ZVq/lsznhWnhGVIm5pSIbUtcNLi6gqSGApzOE2csdXkJ20lDV+ZV23V6fyPTrgkZeTby7FrmWT8uV4g1aE1BRp49zzy748QOt922xy8/ssqzXe659UkiGzy2Mylr6fPDs0We8v/6sT9+oKZgf5+X9d4Vf6u8x1t7TfLMXvWvo4wyyiijjDLKKKOMMsoLQh64eAN/9T//FZ7YOQFoJO7P/Jn/nj/3sl97nls2yiijjDLKtSLH1me8/rYn8DY/+8GjjDLKKC8ycVb4glse5bqN3ee7KaMAT59f42MPHOfVtz3K8fU9pqHhNTc/xOMXT7DbrP+B8ezLX/Jx3vWdf5v3/rffy7e/6V0H/vbqU4/w3v/mb3L38ad/39vx+yVXjWDSfJtCFi3qNKknTCdTRDKvf93rmO/vE4yhaxY4ESrnyF2LAzwGX6yVlbWaCxMgJUgJHzySOjKQupYHP/NpvA/MZjOcCKduuJ6NjXU21qbc99GPkGczQuyoEA1ZHYzKRkNCRUPRxGisSx/KJ8KQJFHz/NoS2qiGO7WO9vY5/XtvxbNZSJK1qKR3atE2miMzNQsNy1spAGdKZI13lixCLLkdQUO+ndPwbwGM5IMWYKOWZussPjjm8xkpaQ7Vre1LpK7FewdJyr2LeqWW9nXFcumcIQeHD1bzrAaPr3wpQmiw3hKT5uVPWdQqba1aKVNmOpmSnKdpOpIxRGOJxXZpnMf7irU6cvHMaWa7u1jrWD96HLynSYlZq14HtbV4Ues+xmCdJ6ak4c9Wx8oFX8Lz0IGzGWvU6jqEz1NilyQViz9gtH+dt8WrQPs05Uzsa+D08ZRXiAzj33sPaN5J/Y0bDinH2D4c0SMUC/rQruK1YkzJv6seEv24qqW393wpDTWlJo9RC7u1K14novO4TCh6f2T1PilzXdRyrPO9/3s/56+8X51e5dPmma3NSy+EZYTY6u+MYRmmTT9nZViDsnoOa5FUfmvQYoOln/uCc7KyJozoz8F7DLnkytXQc4SlFb2sUknq4mNW/uvb5J0v30PM3RAG2p9PpBRFp4S22t4Tg2G8RPJBHwK5bMyfwWNDEA0nzZDpw4WXngDOuyFPbc5piMVLKYHVMGXo51Lf/yXE1vT6jbJO8jB3jem9YJaeVaNcXUamjUw7TEzr+1rzXeeRaSPT/kCYtug8D1+6fvidNcIrTj7O8ekCkRJhdRnT+rnXp14ZmXY4ZGTayLSRaSPTni+meZOYulmJkv38MG21Zsjy/vPyX1amLVP7aDRw/zkpulCHctlPI9MOv4w8G3l2bfOMZ9DxwnqYs91Myk8jz55PnlUusuYamr11YrLEJMzzGvNuAgvL7kw4Pt1mdzFlr6l/X3j2x17+Yf7I3R/mFaceZa2quG5958Df10LDK048DGiJrmuRZ1c1MFWhps1gxOCA6WTCesmF+jV/4qt1UcTIT/y7f4+XxNQ79nZ3sLEj5ERdOmHNWTZCwKcITQNdy2R9Qtc1WshPhN/5jXeVBQ4uJ1750ru56aYb2dxY5yO//ZvMty8xtZbJ+hrBuWGB9AtSjMU4S8yZJBljRAvgAdbqbTpr8M6R2kbzTDpDCJ4cc+k4q2FjvTYm08UEMRHqGus8GEvMwmIxQ0PEdEAdBmM0Z6oPAZFM13aklIuStwS37O4kuRTb0pcDWkjL4o2hqiu2dy4NIWuba2vE2T51VbGYzXBlkafYQVZgdLEt9whVcPh6ClUAY5hk0ewoweEqT5M0J2VMCR8CBqtvJLrI5uYRZucusr+9S7KOaD1tb8kNwgTLcWN4/IEHuPueV4KxHL/pFphM2E+R7fkMgKmzmOAwzjOpJvgQWLQN1nR4ZzAm46tqmOTOViAt1kZdSDkPuU5FEilFYr95LMB1QUOT+5yRXYylYF+ktsucoEsp+VdzAqP5YwXoYqRf/kOpxSzDorPW4nxF03Vk0fkFFEg5xOhCF2PxlafrugMGiUFBZl3wuslQ8FvrFCAxAhYpQYUaqqpz2/bKUrQtPRSGnKxGCjiWCuby+y4NHn5zEAoHlcRqKHgvzpXQ4vL7PkwSw4F7tdaSuvI3azBWi9BloK5qfVAp4+iMIaPrYDKpkEVLSp2GpGunDrm9NVesRVLChKA5TikhmFmQlKhC0M2WZMiJrmsREYIPWGzJS5xwTsN8TUlV1PWbivLVUHKAl3m1+uCS+7Ef+kofbMiadxxZ2VDSG5is5k4mE6Pm08VoaK8LHjcoeoY5lzU1OWWqL6GaVqBstPAn8syQH+VKGZk2Mu1QMa2kUkg5kSSNTBuZ9vvOtCZamrjM+e9MYuqb8iCmKSFMAdAq0/oULlocVkamHRIZmTYybWTayDR4/pgW247cdXTRkfGQG6KtSeIxWNoo2GRIyeLdtLz3FGI2mDwvr6pqTKdrcn9+ZXWHnLQ2j9YQzFhKqrLy8jPntCxcnjPGOKzVvh6Zdu3IyLORZ9cyz3Tsr5zXKSpb+hf9I8+eP54d25hz4+ZF3v/QLaRFR246HpvfBGKZzR33PznhC29/hKc4wf6i+q9/RuuuTJn417/87bzh5s+QGmG/8XTp4KQRkWueZ1c1MDVNQ4oRyQkD/OzP/gzT6bQMvCG1Ld3+jKcff5wagxPwIvzCT/80po24oujTYsGlc2f5lV94O5fOnWZiIZDJqSN3Le1izv+fvT+PtyS5Cjvx74mIzHvve6+WrurqvdWLltaKNoQESCxGrMYbDKsN2P7BeBnb47F/Mzbj+Y0HM/Zgez7YY8YzMAYP9tjG4LHNKpBAgBBC+y61Wt2t3rura696272ZGRHn98eJzHvfq+rqBtRSdSlPf15X1X335o2M5Xwj42xbXWc3UxTdZz51N8cffgCH4kVZn06oNBG7BicVEuwBOcaMiMNXDs228EQcSCblYpHzvUemWv7CnAjBrOK+muBcJnhPVU9JZckryu7ONtIlXFZCVdGqTSgpABARStpTupiIJFQc86bBCo8lukVHCA6RMFirs9qhgHhHEMvhmnJmNpshzrG9tUPXNYPF9fjxx4ldR0qRnR1T+LX3OJTgbMDVsFuKOFuhOk1KzmoeDKKkmNjtdqzvBESVazYO8vC997PTJV5y3Y2ceuJJTp06Q2pabn/FyzlzfovNbVPeDz5wPzkphw4fYbs5webZMzz2yMPc+uVv4tjtt3H6xHHOnD4BwLr3SF0x2TiApERS8Kp0saMlEduGlP2Q1/PgwUN02pGz5YZUHCkpsW3puqLAC7iTqv1k84jImlksWiQrUubqpeyqvbevM/cABFO6oQr4kvtTc0ZTtCKizmFbHFtEVT0hhIq67osV2pxq2oaqniI+kFImJmtDVdVoToOyd94PCqPP15tTIiXbSOQV/dp7KKg1HPM68IgTUjF6tG1T7ss+533vYXPxor+UR8XyfU/VY73SH/5hbxVHTrZJWS3CB0oVPD4FJEdiTgRKvte6xvk1sma6rij6lJDysLBo5tQq9vlpYDFv8Ch1WeeuqsFXoAtT7CS8d3isyF7K0Q7IBASh8hWiAiVnuBNBcYizOZD6mhRQ1rQ5HPV3kot7SuoiKUW6AogUL/ZEKB1sfSyUmhjWb77onpQzQ2FFyuFi8DbG2sOz/x/FsLX8ubSXSPHqGB9cnrGMTBuZdiUxre06wAqM7uzujEwbmfasM+2f/t438q6HlwVmv/nO3+SHvvTHmNDRJo/znso6eA/TXLlYFQKQRqZdITIybWTayLSRaSv/+NwyTWBaV9Qh8O/e/1p+69Mv5J9/1//Lj77tTbzvodsubuy+M8b/8ZveRsyOf/Brb95zC+2KEwTYOnD9GFmHF6bZ/ItdN9SgSNGe2eq6Bj8y7bkkI89Gnj2XeSYM6mGQlB3ve+ROJtNcomtGnn0+eXb85DrHH72O3cVZahUOrSdeccdxdrYXeOdZW9sgOMfZdkI9W/tDP6P5S1gce55FDbzpx/8uj5y/ds/vc9bnPM8ua2AyM5YDTDEdP358sOA7B5IS0nakpiU7T/Cew2sbnHjsCVzOZVsI5EzbNDz+6COk2DGd1EwqT46OnDJkxeUWwTpLOtg+e5rdC46UIpUoflpRUyHRiv/Frg8XNIueZoiaiChRlTY2SMoEhK7ti+0pHgODAjFnYs7UVY1znjYmksa+F20gcXgFFwJ+bWbF8SY1m2fPQraCdwCustDCLMKiWYDapD90+DApGyz7w2ZR+/56YopDVYs3QMQ7x/rGGl0bBkXfW2qrEDh06BCpbck5UVXF9CjCSnlnUupImmzzJUIVKlKXyZhS6a8rKJOcWZw7z4n0GZqUWZy7AEnxkynXvfI1HM5CWw7VF78z49zxx1icPU1VVVw4e4YH772XW774i3n+nXcQz53lHQ89AIBfX8Md2OD89jZrYcK0qpk4yqYhkxG6psOJtWWbHWLxAsiaadqWDmgcSw+BYmBSscJm4q1IojgtCjDhckayEouC3SPOIT4MZlnFrN3eBbS35gZPFgtHdt7bRO8VWa+EZanYEMGpH14zwJfwfe2twCXFQ85DYT8fej8BQVy/2N3KtXvzsZa1b9b5PjzTyapq7sNi7XrD+/srlWvKoFRWrNF7lD0rr6/8qygUxRROr8REpBQQzHve23+XgUkJdYWTCTkqqdldevjkjKgVHPflulL6MJcw7d5QY14YFnZq/aj24JmtvzVnqrqmh1bv5SYYAC1sNQ8RSDklC/XGutk5QZ0vzhJafmztpRL+vHp/q5LLBrCfVyku514f1moednnpZVC+J+dUPJLABwN5DwwraGieQq70NysK3cbExjPlpzB8jbJXRqaNTLuCmDbfrUGxh5kujUwbbmxk2rPFtC+7+eNMZJtfuf9LAfjUmRfw4x/7Lv72l/+/TKWzNa9cxLRVBvYpM0amXQEyMm1k2si0kWmfJ6blGIlR+dfv+RLe88DtnNja4Cff9QY+9eT1bDcTnk5+8WMvI6s87Xuds4gMtGcatv5LhokYEyn1jALn7CCOUqh+ZNpzREaejTx7DvMMuGRKtKiBSruRZ1cAz3IGyX7gWcyBh84c5pA8xlod8TqxlHuFfX/oZ7S811kCljwLHi4s1ljEet879DnPs6cxMNkEzaqQMxc2N0u+d0ATlUKlMO0iBMsXuTGdsXnuPAFlWqz9qpmuy5w9e4bJrGYyrQnBl5yhpugDCaHM3qgsti6QsVCv2cRTOU9drOQpLTcSzgdwNrlTVqJmupxZLDqCCIgbLJcluoyqsgFKKBnFhQoRRxejhWCXiS4+4J1YGKp3TNbXmW1ssHHoIIvdXVLbDYq+z/eYc6bpIk6EyaTmmiPXMN/dYT7fHcL9XAmTrCcTXLC8qZKzhTh6z2w2pfJ+mCxx0RSFHZjWNVsXztM1raU8EXptUobMQjtV7T5EilW160hlsaYYzfoIBDKL+Zzd7R1OnzyN7izwClQ1h5//Aqq1g1BCfU+du4CK4+FTJ/EhcOHcOR75zH3o1ibHDh/iwg3XDV4bk7U1Dh09xpMPPAABnJaJXCZtztA1EQtihJR2sQ1FtIUcLQSPku+5ix1dCVV1ocKFQF1VgMN5xYcweE5IUrq4zG+6nM7LFDKWQbJY+J2D8rr4pdL33qPeo84Uvxaluz94vw83pYBVs+WrzEAIBcaqxGQWZSeWf9e+H/N86VG9ahEWGd7XW4zNKwGWIMDSs9GHz7JfTz+NrCr7y39Qs8HEwmnVmsBK3tSiJwxwBiPxDucDohWOSJznoQZF1kzQkifYOcQKR5iHQskT2q8ByQlNlg7B1qj1W87ZvDFiQmNCxfo6R7uGiCB4g0E5jHNO6FMqAASxMHWcPaywwsLekr+8xYv7KAOs6I2Ulg/NVVVBf7anJSfqykbBIGD/cpRiuLwAAQAASURBVGpt7UdC0eW8M+14saJf+RnlGcjItJFpVxDTmtaYZmk488i0kWnPOtNeeM3jzFsZDEyP71zPbz72Zfy38kvgEmBzIydllWl75t7ItCtHRqaNTBuZNjKt/+3nmGmb88BnThzh5z/ycs7ubgDw8x/5omd8Z7/7mTsv+/vKR24+dI61Otn3O7WiFKWvwdLnrT53geCdx/tQYkNGpj1nZOTZyLPnMM/AItwuJSPPermyeNYl4dR8aukPc8aVEMGuK0acP+wzWrp4PsRc0eUJUeHWQ2doYsX5xfryvpXnPM8ua2DqUmTRNKSuY+Ic3gfLuwsQO4JCpUqMc8JkyqSqzVrmneX+rCsA2rYhdh3rkym7u7s07YLZtIauQ9uWHDsm1cEy3ZSubVDxhMqztjahbeY0MdLlTOUc3gXLWQr4UNlmVDwhCDEm0MT6+gbN7pw2JaoCHK9mGJ5UNeJM8Uw3NkhZSICb1KSWZZoRbOE758E7Nq49ytFbbubWl7yExx58GBXLJQqWti1reYoPFXVdcejIEW64+RbOnz2DnDvX6xILYU2RyXRCFmhjN1hXBUjOGWBK+CTemYURpeu64SChbRPOWf7Wqrb+cMEP+RX7KeGdI6UGp8rEB5rYlimpKIlKM14crqqI1ZTcJnbmDcwXMD0Apa9f93XfyPrhw3zyQ+8jJOXCieOcO32a7jOf5hPvex9PnjrFH/2Grwfgpptu4oUvfBH/6kf/iW1InaOLGYfDiVl6W7p++uKaiLpM1kTMRUFrxgkldHXpLeHrilBVTKZrttC9xzvPYmuH3HVo7JC4LHTYixXK02KlhZwUV0J58RY2KJPavEcQQlWRnSchViiwH7uSLk1FwDmcq2iaFkWoZjOz+JaFN1iNxejm3LI+D4A4hy/9u5IMc5++Lisjm2XZajfIEHaZi4fACiIuugJP+Tt5ivf0zV7+3ntP1lTy7lqIqnPLgoGqmdha7k8t/Ss5EbuOGFtb70XRiqop5pLWJ3hPbOakpkOT4J2BIqduuLYTO/xyBXSSTTmqlpDh+RwRO8Brm5ZMtpB074rF3tZnVsuh2t+b9w5yJkdFU9myldvu8+w+lRjQl6d3Aha6m+36dVUZAESH90u5rvNuj4KWst5z2czmbB4Se+BfpH9PynmYa6M8vYxMG5l2JTEteOvrEDyTyWRk2si0Z51p/9dHvpl3PvzioQ+++fm/yz/+qn/KJASS1WlHl37AA9P6a9dVhfdxZNoVIiPTRqaNTBuZ1svnmmlv+9Qr+NF3/EmGQvGfZbnr2JP83n/995kEgexIOZJjshOB4awzXzSPVMrrOjLtuSQjz0aePZd5BpS0ensliw5GgZFnVxbParb4opsf5kMP3MLOmTVCKHWmXMDJgj/sM5rmxH5jzL2nbrFUe13LW7/nH/CPf/eP8SO/+yefsk+eizy7rIFpUk0hKR1CcMIb3/B6jh45AsBaVfH4Qw/x4L33shCPZmWnWbDbzPnu7/pOmt0dfu+d77ALOcfs8EG++mu+iscefojzZ8/Q7GxDgomvqaspEvMw5WauQstc1EWHZMWLwweHqC6t59Y7xYCZ2G1bZgcOcv2t1/H6b/oGHvnYx3nywQd5+MHP2P14jwsVMcGxY8c4dOQQ2zs7LBZWqG4SaialgBmqbG9ewHv7jnnT8KKbb+HIsWMszm8yma1BPWU6sbDua667jrWNdSQ47vvk3cRmAd5z5txZck5M19fQXJR5URRdKbY1mUxIWQnB48TRLBZU4sxTAKhDZQcEmpnv7FoOVC/FgqyIQlUAHEJFqGp8sgKAIHgcGi09i6sCJLGFBmhS1tenOF/hw5QqTNiYTNhOkY/+8lu4441v4uidL7B2V4Hb7nohf/qv/jV+49/8KxbnzuFT5G2/8B956LEnCPWE177kZQAcOnIUWVuDENAuklAW8znBmwdJVVUGgLJEHOZRov1xfbEYJ1Wqqqaq/FBcDm8p8nJWUraHkZwVUYOBOEdMeskFooCoIM7hfPEACAEpwKbPOVoWnYjDkJkIdUCcW4YFOg8KXdsQU0a8N0CEgGCA1ZUQQlOKFtY6WIlTJGe13JorklHQEs4qJYRblqGsCiXsFfpcq1DU9nDbMhhILFXbpZS7sl/Zy8pFVq9t+xibUwI2/wrEpbzBIahzaOpoY8e0UmJMtI1t6lxSyPatbduSc6TK0b7TAWVDE4JDVQaPbAuvTqhaHzrxZCJC8UIIgbZtB4+Lru0QL2QnpBgH5asK7WKX4B2hD09WNQ+lAiInS0UdU7TUjOXfeR88RSxKqQ9FzbF4NRQF3TQNvvJ24MfSI8E8fOoytmUkCkCatiUnG38RWy99nw9h5iKAbZxUng7Xo/QyMm1k2pXEtNl0DQDvK7yvRqaNTHvWmWZdvOwf54SqDub1h72vqusyD5dMs8KuViOh9mlk2hUiI9NGpo1MG5n2+WKaOI/qxTUm/vabf43X3/4QJbnS8Pru7i6q8OTWYf7GL3/vcA+C8n99989yw4EtVJXd3W0mleeatY7g7ftySmYI6JKlGCq8a6PdR98Hznkb35FpzzkZeTby7DnNM3Tg0qqoGhtGnvVX+vzx7OiBC9x44AL33r8ODhbdhLufuIVbrrnAvG14/MK13HXdcTNKIqCexzevY7vZeNpntNuPnsaHilPNLcMzmlvpw15ecPQkL7/uYdqdOS7t8J0v/S1edORRfuCX/jJ/8VU/w9fc9h5SSWP3XOXZZQ1MWlJrWKEquOOOO7jtec8DYK2qSYsFD957LwkrWOaArmm59Y472NnaRKaT/kKE2ZQ7XvACUKXynifnc5JaTt/gAqxY0oNY4xULS3QITvr8j7ko+uWkzNkKyVntk0wVPLfdeQfxzBnaCxcGRa9i1t8uZ9YPHODosWOc294mgt2nKrXz9Ibgqp7gQoU6IUXH7JprkOmM08efRMVRT2qma6Z8ZhsbTDc2yJLZOHyI+bYjNS2LtrWw3LqmXZQ8q87hqwrEBswstNEWpZr1MMsyPK2u6rK5UrxzVFVANJG6btiaubKB885bf5YFavtBg4TznnpSE7UtoXOZNndUIRggfLDDBueZpY5T993H7Prr6cokP3LH7WwcOsDzX/FyPvW8WznvoDl/lvMnn6Tb3iSsHyDOrTDffGuT86dP2wA5ISk0KdLmjM+BqfNWsLMMei2GpT63bc42rpoTKakVknO9kithwUlJJfwwxkRWmzsGSLnYkG46xkIpnYUf994AMlj3y2fFPt/n53XOlVBjNxgZlt5TqSzYpYXfbnsJhUHFFqVtade0RLcWK7fmYWL383+vibr/tU3QvMcKvVTM/QdW9dmqwl59bf/7LieDkkJw4hlColMP57ysL4HBKpVcnv187KEFMngqZlW89rdaZoCzw7jh/rXAPyecOBtEMoJ5gBhU86Do0ZI3thy0LcdXiTESXEDE9zc2hMCTcvEOMTCmZPlrXe8GtH9OUTxDTLmQyXvaEWMsKaUFV77HFU8QRAqAlp4YqpSaT9Y31uWy8tX9YGnxGLS5cFGzRrmkjEwbmXZFMW3lQWRk2si0zwXTLppEIpYSYriXpXfgRUzD0sfklEamXSEyMm1k2si0kWl73/+5Y5pzextW+cjLbniMNz3/Pt70/M8gOLabdYLLrFUtW1tbKMqTm4f56jvv5sPHb8MLvPqWR3jzi+7h0GzOvPVIe5z1WcWkKsdUmm2exVjSFll6WYAYEzEt6804YWTac1RGno08e07zbHWirHxOFR4+e5StuM5Lbz8z8uzzybPyo4VnKQsXmnWOznasxuCwzhXN7MkWsfqMFrxjKtvMu4qm8+UZzVrSP6Od2DrIfaeOkbP9ZmOy4A23PcAth85weLLFvN0hx8Tth57g8HSHP3L7R/mKWz7AF137KbI+t3l2WQPT9taWPfOqknLkpptu4gUvMKuyz8onPvxhTpw4SbO5xR23PY9DBw5w/vx56gMbpCAcvPF6ABa7O0gIXHvTzcQuUjnP7tlzbDcJsuUNrENlN65Awqy3pSNMiSVU+wLGbrDMgeWe77pIQFhs73D25En03DnWnGdjbbb06hRHVEgpsnbwINfecAMf/MQnca5CENpFw7QKg7fn0WPXW/HIFKlDwB05ymaMfPzT9+KalunBGbN1yzm8vWg4t1iwvbvDgY11VJRz81NUVW1WercsWicC6wc2hmJxvbU3dh0ozKoaTXGYLM7VNnly5MDGGsFD6x3zFati70FkFnJXJndRVICTTF1POHhwAxfsuilFUlxYLkogOIPurAoc0An3nzzO3e/4TfSD7wXga/78/4ejt9yMzCZ88Ze8jntJPHzhLDcfPcLUB9ou8emPfhiA6do6GwcPI5pBHCkrLTBvWqBjmpT1Ssp6EaZrMwN5znTJFkqKlre1cU0pRLYM3cU5nLdw6hgjsWmXoaDIav23QTKWcoGcCRJw3tsYUJSY7RpQSobWXBLziXk+eBeGsNC+ry1sNFEF826AoghUyzcWUS2GCofgWOpoA3rXdfTF24CSU1bozcQigtNeQWAPYSvj32+sL5by5n2il9AMq23a+7qWOaql+z3eQU7mzRBTZ8pGzcOsz/8qIrQpogK+qqiC2EatKOvovM2PZHmJewUuXnDZtii5pF1wYv3RNGkJJtUyx+0+Q0l2rKo4qRBn3tkUBUpZZylnzAen3Ge2fKo5xuHWB0Vd8i73Bqb93Sb086BcbV9kdIwR8bIcyrLeFSBauKo4u2ouc0R1mfKhL445eOZJP19te5dVLDfspQZ0lItkZNrItCuJaW00L8YYE10bR6aNTHvWmfZUrOiLM4u3MXFeLmKaYjndY0oj064QGZk2Mm1k2si0zxfT/L6TwoOTOX/36/8TL73hlHEtK/efPsbhacMLrz1NVXTCrYc3+fff/b/xbf/2rzOtlf/4Z38CVzkeO3eIh84c5MXXPG6HX2V8crYaKanrbILosgZEjDa/9vfvyLTnnow8G3n2XOfZpbgWs/KfPvwqPn7idn7qv/x/7X0jzz4vPDu9eYDz56agO3t49vC5GwALZrjvzC17ntG8D8azlWc0L8rLrzvBI+eO8MTmIZxUPLF9g0XXBmv7O++/g59895eXXlHuPHqKX/qBH0NSJC728uxQfZ6f+9Z/wLkLF2i75z7PLmtg2qim5UlXibHlve9+D5+59z4Amu1tTjzxBMeuv54TKfOlX/WV3HrrrfzyL/0Sc1GuueUmvunbvx2A97/n3Txw36dZ5MyJ02c4feYsa9N1uskumjM++GFu5ayktiVUZrVN2MILLlC5QErLMGgAzQmnMA2e3XmDaiLNdzj30AN87L3v4ZGHHrZKXZhHRJgEVIU2ZbYXDVnMO2ptbZ2bbriJrfNnSSkRvOflr3wVi8WCRduSvOPBT9+LeM8LXvJinvj43cwOHOBQCd2997FHOXXhAurg9le9gp0LE554/HGC88TYmtW8AMc5R6gDLgScN8UcS4FKUSU4yEmGw4KuaU1jaWYynTGpKtp2ynxnp0wooQ+ITCpkFXxw9FbUqAnxECqhDsUjQyF4z5GD1xgoEDQrKS9MuWliErfZPdcSt0yB6WfuBl3ANUeQHJlOag5urFMHOLI2oesiqWsAiJstpzc3OTCdsDNvWAC3vuguzmxvsb27y9bWNk1sES0pWtKUtaom+EBWEF8RarPeiw/ElEmxFNsrC6aqBXEe58AHJXWteSA4qCfToVBsL1LCUJtugdShpKcUUtuhfVG+LGTnyd6j6gnOI5UZtFJOxbDlh3EU56jrGvEVvfewFouxZlNK/Ws5JrOYF2t0LpExzluo9+pSXdqLofdq0KyoFA8R5/D73jucUpkWWP3NRTIoVdnzTVxKydulC0xUUU1oSogqHgYArK7hvtijxyG+tvYmSMUrqT80Mw/rxLQKtI0BVlbueVWcE0KQ/ktIMRFqg0rOmSpUQzPMKyeWt2YkmRLNMVJXNvZ90T+vJZ+omHc4unJQp9ZHQ+7lfQpfUVJOwybFDgIdWnYYqYsWoiwe8abMZdCpqaQAWYax9t4hzgUgo9pHSCmoQda6x9qZvKI48+gb5WllZNrItCuJaYvWdFTTdswXzci0fe8dmfYsMG2f2ANrKByzBxdLgyF7mNZn109dJHVpZNoVIiPTRqaNTBuZZpf+PDBtX/PrkHjDnY+yViWbq222A1/N5NQxqWu7djkc/Ilv+zeAfUeWzDXT08yuPclaYVrqbD6FbOmskJIqSVfr42o5E7CUT875kWnPURl5NvLsucwzgNDsTT236Gr+q//w59hczGhT4Pv+z2/nB7/93bz41rNlAo48u/h9V/4zWpcynzx5JzcdusBrDp/i40/cSCoG6ZQyf+eX/xQPnTs6XOuHvv4/8u2vei+p674geHZZA5MbXCkzThzz3Tnbk21UlWZzi7bt8CEgTpitrbO+sYELwSZyqNg4eBCAalJbO8vAppToczv2VtJBRIeJYN3Rv44NhOyfvArFWm6rAchK6joW8znNYnHxdBchU0LkysVFbDDFOZwqzjkm06mF0gE5BNrz53BVoD54sISGu2Ehx5TNGhzsNe89fb7LvgifKxOpt54v81u65S2uNrO/q3JAIOWw2zk35B6+tMjK/8vnpb91WeoANYt1cTLqs5AWwGjxWorDZ7VtDJrZDi+ciIWQYovQyxJOqYSROr88IAl1ja8qXAhD//dW0dx/chjf3pPEDZ/vjaVaDkt06LPSc/0LsMzPeYle0X6elJ8hBQ0UoLrhNYPvXnv8MAfLYDopbZWlYuwXZv/+S3kPD14Bw3suMZL75rvqsqjexWthVb8vO+PSa+ZS8lSv999Xrtrfl652+Orn9yLL5pyUse1/lv2jqwNXLO4XNWWln1Y+uvw+XbnPsk5Ul/NxGE/MS+lSCNy7avbf+1NDs29+fwjYv391rFfnL2WeLFMfrb6nD1+W4edSbbFrrYD6cu0bZZCRaSPTriymLfVTziPTRqatfO+zxLRXHvskMTa878nXAHDPmVv5qY98A9/50rcx83vzq6/O0WXTdOiRkWmffxmZNjLtucy0LznydmrX8J6z3wDAg2eP8Ut3v4qvfuH7qEemXUKuHKb91qdfwgcevnO4ypfe8QBfc9c9rNUtXoScrU+vP7DFNMSBKxfmM3aauqy9iCo8fuEwNxzexEtkEuLFfTq09Cnum2UfXqrPR6Y9N2Tk2ciz5zLPBLhudopvue3f8rbHv5nteAhFeHLr8NBTx88f4jc+cgfntqd86YsfH3l2Gfn8PKMpNxy8UKJtKx49u/6Uz2htqtlazEjZ6lyJCnVoObK2y5mddbYWs+ETxza2uOXQuT2t7e9ks1ljN064fv388PpznWeXNTDtl6wlpzuQcsI7x3QyQRC6ri3FIJcWuNXDVldCuvrwqz4XZL9YnirUShBU9k+oZYf2OUARe+9Fi2B1YaxcQ8p/ppCLZTYEULOEZqe0XUfXdZbbt67Jmk3ZWlJWCy0r/eG9I1QV+P4e8752WB7F8uJT9vGeXugniexVNPsX0up7l78vk2Ffv6oWj4Z+bC7Z7wUsRYn1rU2xo2tbaFtSyfHYf7wHkBSrsnMgGbQschEDft1NqVOink7xLaC55Nrt8bIMwe8V7WrbrT/KO3X5s9oVw0jv72aRvX1fFFe/Cvu/97NtNcr1ciqw9wDRpxjWvcp92cKLPYqf4gL758vFy+EZy1Mssz2v94rr93fNiy+sCriVX+2DnOwbi/4+V1CwB5yrGFH2zg2EIQ8rcNG8Wc7i5b3t+bz2379vVJ5BN6wus73HfJf6cBm8Qk3Z/6uVNvc/l4L9/r8/lf4c5fIyMm1k2ueVaXtaODLtDyIj035/THvTze/laP3EYGD60PEX8qlTt/LHX/TbTCeLfX2wn2mX7BVGpl05MjJtZNpziWlvvv4/cMP0kcHA9MkTN/PA2Wv58js/yHp5/8i0i1+/Epj2Hz/yxdx38sbh19/w0k/yt7/urXSdYvUGbY7ces15W4vJ1t253TWObx6kS0LWhGIe1kcP7FC5jqdkGv33D24Wl7/nfe8Ymfbck5FnI8+eSzwDuHntEf7Ctf8r7zv1ZWzHQ5e4V/jF993FyQvrvOGux0aecWXwrJ/vgnLbNaeYVI42zXjs7DqZp35GO72zDjvrw+9mVceth89ycDrn9M4GbTKD525bs7WYsjGZr1zDvnWzWePk/BquKwamS97zvju+0nl2WQPTJFhIVlahrjwnTpzg/LlzAJw/fZo3ftmX8mVv+A7+h7/9g/za297KdDpl0Ta0OdFpZndhnbh24AA3P+82ZutrPHnyBPffdz9ua5cjG2vU0wnVpGaxWFhvOUeY1PRW5KpyRI2oZitkpbYS+5sPlUe8kGNmY31GlxMT7zl66BBeM5o6Dh6yBZ4RkioueHyomM42uOulr+DMuQusbxzghS97BQ8//LDlgAV+9dffzrxZMFtf55v/1J/i+D2fYrq2xktf9go+nZWTZ8+zvWuhmbff9SJeee0Rck7c/cH3s3n6NJphfW0NckY0cWBtDcEg2TRzC8MUyCkSvGfRtKTYkZwzK76NILFZoDmZEprMDEwiHFjboMsJWHoVpBjpGkHw5BQtj2KKiCSydjTtgiOHDw7W7a6JLJrGwkyT/eTUoTly7OAGuwptmYF3/847CB/6EJNrjnLq5GmarQvkFGlizYEDB6m8I7YGvpiUJirzLpFTxs/W+Krv+g7YWKdtFmw9/DB3f+RDNIsFKSUe+fQ9aGqZ4Fm0HS7FkmwzkrNbFm0DYrax7KIVJfMuMK0qXKgsTNAJtS+Fs1fETSaEw4eJp7apnSDeE3NGcqQPM23bFpk4fF2TNBEcViywS/ja6kC5MvdWQxU1p1KUdgmJrGohyuX907oyD4eybnMG0YxmmKytQV7Rhc7yomaFECxsG2QZFspeJWHK8amU8/7XlxuwPa8+o82vWFtSLN/o0LT8nBNBNZFyImlkEgKkjCZIWanqCYKFffrYEHctbYKbTPDzua17LLdoSla8HCBhY+R9VQq9ZtquYzKZmmeNMOTKpTysSLmnlBNOhd4rZFLV5NiiZVOq5U48juxMxwyQF0GCR/pQ1XCxytRed2Mr0fswHAx2XTtsggBSTDiveO+tKKP3w+9yjOSYLLw1JfrCos4FcsrknGjKxlJzyRWc0jMct1FgZNrItCuLaSlZGo0YE03TjUxbubeRac8i09zF/dqlZLoEV/TRXqY5Zyll+tzcI9OuDBmZNjLtucw0gKqqLprXMWfUhZFpVzDT9re46zrm8zkhTMgp2RxYNMxmM2OIwHw+5+aDJzkyO8dHnrh1YFpMkRw78JZ+bDaZktrFkPa1P/AO4sjeDA+59JI6q5vhppaiKudMGztLM+vcyLTnkIw8G3n2XOdZipHTJ88MhsCnFFXQNPLsaeVz+4zWdR1t1/Do2aM8ceGYGaN/H89oF3ZnvPehm/mfv+Hn+MVPvpZ//cE3AfCDb/l2fubDb+DtP/DDF/HsxoNnuO7AGYuqu0p4dlkDk3dC07bEFDl4cINJPaGubSM4bxZ0KeG8KwWhLNfjhbPncFnZOX+Bj3/4QwBcc/gQr3vta4a8p6HyJM3EmAgxoVUmlPySmjNt7iyXpxPEQWwjUoIZHYLkZc7AzPLQpe0aOs0QHafOnCLmiPOOpjGvTHEBFyor4pNtIp04eYabb7uDa44eJYpjc7dla6dBBDbW1si+wk3WmF5zLa993Rvw3jOpZzRZyV1HLH11/MnjnD53hnPnznLLkSOsOc9jD+2QYsRRLKduqSCqusY7R59X0qkyCZ4EzHd2CbLcvrW7c0QtXLip5xZSK8JkMiXO59YXyVoi3uGio+0ack5AxqmCKJptUXVdvyBsE4+ArwK+CuiO5fuEmi5F1pwwLfqgy4ouGroLF5C2tY2hC5ybz0niqH1Au1JQUEuooPdISqTFHN08h58GprMJ7s7bePE1B0kp0TUNn77/Prr5gkmGWsB5QDNdbpn4qoRH2pinZJsPcZ7YJdRBJ+YdkUnkLFTeo/ut4c4j9YS1jQOEYEaArm2pq2ppvc4JLxCqQBZvigUQ8TbPVMhFseUUURIJxfmAeAshzjEieZl3etXK3OfNRPqQRMunGruI4NhLAVMdKWWcLyGx5BLi6y46bOy/xUTL9y77YGnxl/I1UoCflh4ZxdNkgMZKF9rvM1o8ZXwJT+7rE0mJLIul6KGIB4GkCc0RKQ8EpVycrWbxphSzrngcJMRXaLS5ARBCZcBTCyPHC04Syw/1A2ivpBgRZ5sZzbkYhqQoX8t12h/GCWJeKwViqrko9EgS8HXN+pEj/PS7Xs1Hn3je8HV/9EXv4htf9F4Lee/68G2P5mgRz6rEWPoqWb5VEYFcdJp31p99sb0uls1ZQlByTnQpUlWV9a1Y/4M9bCFKVVeklIdDy1EuLyPTRqZdmUyLRWddnml/9Jaf5eb1h/n5R78XgA8/cgM/9vY38Oe+5NcJwY1MG5k2jMsq0977xF287/EX2vpIifOL9T093KaKH/qdH+BbX/a7fOUdnzDdldNeppVmxZRJSUamXSEyMm1k2nOZaQhcXz/A9173N/n5M3+LzXQdbQz85Hu/kT/22s/wqttPjEy7Qpm23xu+P+wiWz0ZUU/03u5n4JHgUCah4/ZDT+CcFWGPsWMSMt7ZYa0dbmVyf7jVs6/0v6odmi+6ligQplMO3XADALs7OyzOnQXxREBjGpn2HJGRZyPPnus8S5pomqV+uZw4Rp5dKTwTX7Hut7guPEZc7HCo8syOKk4CxzePsDmfPONntJRgWicqv4xJa1PFvKvQlC7imUgCTey2Vw/PLmtg0pzIsSPFDk2Zo0eOsHFgA1B2t7eYzaalSFzmwMYGa7MZZ06epFJhsbXNg/dZYb6jr30Nt918s1kMUbx34B1ZLS+qpjxYZ7MIOSUkFKskzoyo5cBWKGGtJRQ0k8Fji0WtY3KrPHnyOG3XIk6KwgMvZnHMxRshR+X8uU1ue+GE6foBsghtTDRdxDnHxqFrWCwW+HrCbttx+NA1kDO72zuIC4TaEaYTAC5cuEDTLjh14gQvu+NrmDrH8YeFFBPiBRErrtZPRu+tSFyONni+KNskwvbObplwRdG3XSnCJ3RdZ1ZI76lCjXMtGs3TAoCUUBFi21hoLWrALAUNc0pDVIVi3ahSCgCGQOo6s35qsfyKDIYaL56kStc0+JypQsB5jzjPvI0smhZXlGAQYYIn+IDTTNPMOfngZwiLHSZHj3L4xhu48di1IMJid5dGHPO2pY2Zw3XAZ0Ulk0kgZsGXbO1QtVyoOSs5ZVCIKVqfqZIUps5ztHqCY/VjnGpvAWBzMeHhc0e4dW0bkVQKZHcG0aIhlISI4p0jhKpAz3K9Iq7sk8vC1Gw5ZAUQh7hSCK9XmLAcF0BxRaFIyfFqYyxSlLnDlP3qGmQZrqh+VWnrHiUMl86luhou3r/v4vcwvEdLu5/Ky8Dy+2ayZrOii7NDsv63qogKDiFLH4Ks1g/loWBo08r9k02PaDY42vmYEkuBV2fkt81ise5775bf2T96lvXVe3EYwLLddbknTaaUl/dUDuhW/p3VQtX9bEKqD/L4/BZ+95Ev4rFzh4Y+efVN9/PNL343OXtIGaemY8rwD/fa/90g2he1TAQEyhwCW5t2QKiIWPtyjOXDxVdv4K/93Qdv6M4Xj+soF8vItJFpVxTTZJVp+WmZ9kWH342jGwxMj5w5zOmtNb7/Tb+H8+3ItOE9DO/5wmYaPL59jA8ffyG/8eAXX9yZRZJ6/vM9f4SX3fAob7zj45Zy5BJMA4rzxMi0K0VGpo1Mey4zDYUD7gRffujf8dZzf4nNdB1JPe/4zKt41Z0X+KLnHR+ZxpXFtKZzPHnmCG0M/V1x55HHOLp2YekA4QPel3RgqsN6LjeBiLJe77JWR5xEutgR3ASR4kXfs6O/p4QZnQrnskLOyqJtCWszJgcPMTtyDQCdQD5/zliiFkEyMu25ISPPRp4913mWktJFvaSuXxUB29OPPLtintG8RGZuk9i1rM12uGZiBpzz84PsNNOnfUY7vXWIeRfIOXL7tdvI/lvvzwX38axJgUXn6brtgWfTI9cwbwM5erI893h2WQPT7u52CcHLPPHYo/yF//L7ednLXgrA9uYFumbB+bNnccAXv/rV3P685/E7VcWs8myf2+T08eMAnDvxJFtnbmbtyGHmO9vEZsHBA+vQdhZ21nVIZYOeNdFpB1nRbAv08KEDxKalmS+ocKTUDek+chloRKmnNTubc85tnedDH/4Q881tQFlfN29NJ75Y+kBUySmRU+KjH/ko1990ii9745dTVRVVCNR1zVe96Sv49H338tgTT/Cbv/iLnD51GgccOnCQ2265meuvv45jx64F4IMf/gDnzp7m4GzC5OABUowEFXLbIXWF90Kz2LVBEqGuLaxSnVmIp9MZOWWSdOS1NZr5fAhD6wvr9QuxL1YYQs36dEbTNCzmFhYcs5aCeFYsT5xZel0w76+ubXHBl2v1I2197xE21teJXaRrO2IbSTqUwWPt8CF8qMk+0Ehk7eBB1o8d49Vf89W8/ed/gUfvvY+NYJ4mVUkMOstC5YXtnR3+7b/8KfKk5rYXv5jv+v7vhyNHwQe0aVmUA/1FF5lNAm3qcJqo60AGvHNUpbBhF5Wkia7rStG+bIByZmx1Cj5l/vQtP8RrDn0J/8un/xUAv/XJW3nvfTfwH/7qv6fKW8SuRTJDMT3ova8y5GxpG3wgiyNG86gyj5AyHtj5Topx2DcnFwZlqShaPDxUlZgVcR7nQglPdDhXCsMBUv6+KpoTsawDXzwARPaV9O6VCrpHmS1DGJcv7s9rvcy1aa+L35/4QIYv6L0NhnzHIuAEhz1AoGb9HhSoZrImg6gPNufaVDZainMBxRGT9VPqIjF2SITKdXZo1vdrsvEOrm+Tgdi+J1E4gqr9GYIndh1ZTUHmbKG1qubNEcRCU8EUbEqRtm3t4M45VKDpWm6443Ye3r2Lv/ovv5b9EkKgqmva3QW1BHxfOFOkTCohVJPlvesyd7KIkF0uxdlleM31MMRyJw/eRkaKYfxEBHxeWcOjPBMZmTYy7Upi2rSk3uz1xtMxTRDmzdITcLjbUJNSMzKNkWmwZNq8Sfyd3/wezi0OXDRnLiWXZ1p5T1XjvI5Mu0JkZNrItOc601KM5BUd00uKkdiMTLvSmPbgqWv4Gz/zA8O3Vz7ys9/933PT4TltO6EKEyACnrquASWnSM4yrI821Xz85M3cdeRBDkzmQCalaL/PYodhAt75Mhc6Ytcxny/IOSPeoQ7miwV3vOxlHLrxRvCFfzu7xKQcPHQQTTbvRqY9N2Tk2cizq5VnF4uWnT8jz66QZ7RzzYxHN5/Pa26+hxgjIg1VmJTMWfq0z2g/9q6v46PHb0VE+Rf/xb9YqQ+3bH7lAim2e3j22M4NnNg9wvOmx7lz4Jnnk5+5nrwZOJQees7x7LIGpqgGexc862szfvs3fp2PffADABy79gjXX3eMg2vrrE9qHnvoAXbOnWH7/DmCKtMQmJXw08/c82m2z5zmz/2Z72Z9UjOtArlLVMFK5cVoG0jEJoivA64qYW6xpWsc2iV8Nsu4d4IruZgVJWqki5Zm5DWv+2Je+KIXQop84D3v5eSTTyLFkyDnREwKrqJ2jvXplPVpzfZih25nkypHXGqQ1JLbyEfe+y6uv/46XvXCO3jg3vuYzXfRmHEpc+drX8O8mXP/xz8GwPWHDnDzsZcjZB76yEfYOnOWdmub2aGDBKAC+vzRoOQusjtvB6vtIi0GuIUQkOl0WIzVmikWzZnd3R1253OcCFVVDXkUh3BBzFPIi5gSKeGxdeXBWS5NjankfxS8r4sBVgbvjD4qzjmhDnWx2EJwvoQZemocBw4d5sitNyMH13nZl72O57/sLg4ePAjAmUce49GPfIyA43A4yNqsZlpXbDeR+aNP8h9/4l/ygi99A5O1GYjw5/7SX+L97/gdHrr7U/jg2D2zgKhEVQ4eOMDE1wRn9zhdgzYmthcLdhZzmrZl3syp12eA4oGNeooAt3I3f+N5f5z/+4n/g3PxFhZd4L/9mW/gv3jVe/jSO+6j8h6nIMlwVvuApkwz3zWlUU3AVyAVOSfLhYqNoziHcw6n1s8qSkotbWeeHSKKD54epHWoisJzFF88G+OsWGq25ap1K3AXyUXB917Pjj3rux+/fWI64WIlv//vq9b9pbV6//WK8uppmO29q54KKmC94y1UN2e6doGjhO3GBtoWirJbm9ZorJFFCdV1WOj7ohu8bSbFU8e7gC99l5KZ572v6LoOweFcKPlDl83swVf5ij7ENyclQAmv7/VCtHXnHVmFmDJdijgc//I3X8nHTr9iT39MQse//75/wcuvf4z16TrnT55FJ1NyqKiD4AcAKd3uLiyUlIKt69LvOeeSx1jpQSrOBlnEwnt7MMXO+kOxzcAwIk4Gl4X9G4RRLi0j00amXUlM0+4AIBw+dA3XHb3uaZkGcOuBkWnLf49MeyqmffzE7fybj30Nm83aJe59rzwTprmi+7rY0SziyLQrREamjUx7rjNNRclyiVOLbJ66I9OuLKb1c231+9fX1pjNwPsaJ54YjWkh1DRNg4jHu2o4wK5E+aIbHyfQr5MaRNFkh4kBSF1HLHohJYtoqUIg5UyXEm0TcXi60+fYbRNtOVjd3tqiWyyYVhW+8kQfR6Y9R2Tk2cizq5Znl5KsI88+xzw7dqDjWn+S0FzgwUcqTrfLZ7SjGwtecvQE65MpZ3avZXP7MC+94YRFUKX0lM9op3cO8L++4xt4+Ny1gBlsfuQ3/wSbzXTPHWlWmsXCao6t8EwVurbDzfbybOuxiHRnWHPPPZ5d1sDUW8REwYvw8AMP8GSx5t580w2029tcd/Qok+DZunCe3DSQIk8+9hg721uEMncW29ucyYkzJ0+Sus42i0lxmoty682A5bZFSWTLIQpoSiUXKIgq2hfdg5IT0G66S5EDGxvcessttDky/djHyoCW/IHJNi9OAs18zvaFC+SuI3ctcb7L9tnTSGwJGpEknHziUQ6vT1g/dBjXLpjmbF6j813arU12tjc5d+oEANfffD3VdB1R2Dpzit3zW1ZorRSM0yQruStNGWuykDzx3gY1m4VVRAhVNQzixBUrbM5sb14wK6xICXcVnJMhXLAPdxNvIYTOCZlMcJZzM2NFoVGbHJX3g9IQSn2jaB4WTqzNfbs9PQQUSdnCYnOGuuLwtUfJBzbYOHgIgHZ3B6k8Th11rvCqHJ6t4XPDvE2cfPBhjtxyK5P1NXxV8cJXv5p7P/4JZgc2uP15t3Lvx3bZOdPQLFoOT20u9OHMdV0TKmhTYmt3FwmejbWD3PLC53PqxAnOnzxJdnY/k7DFCw88Qe0sH25Wx91PXMe5F20M+UtRHeaIrxwJyDER2xYv3lRXzWAtH0z3qG08vFg4MBa+qubfYONJyT9dlP0QUjksThn6Hl0JqS0RMCJS8orKcrIP/98rq6Dor98r+1VlvvQe2Pve5e9Z3mP/Wv//stbUWaizhayWppWqdb2XhGZZ0WPWdynGpRd3Hex6Yt+TU0azmpIUUzx7LOdQUiParjAIxTKf8av3h0UYm0eH9qqlzHsLnc0pDSHsmvOw4SqjSBcDHz75Yj72+PN44MI1Qz/dtHGCV99wD2+45R4OzzI5BVBK6HQ2UAygtHWeckZSJuzTtr23Rd9uJ660g+JFZa/nsgErs2hlbN1gyBofXJ6hjEwbmXYFMU2Lx92knrA2WxuZtk9Gpv3BmXZhMePTZ28b+mPiG950ywfwpe+HEVel8t0zYpr1z8i0K0pGpo1Me44zLTtwwfGyA++k3ml4dPEyAD5z6hgffvQ2vvj5x0emceUwrdffF9/H8iArFW9+EYixAxLqGQ6whMyhaWKxsINRJwFQEjbOGUq0RyzXtIL2Thw4R4yWFjZ4T7c7Z5GVptSEiE1j45oyPlSIH5n2nJGRZyPPrjKeHW9fzJtf+HHuPnEzj5w/tpzrCuQ88ozPLc8mdeLQrCG4XYJbQ3M9PKMpjpgrhAX3n76JDz1xF/efOckjZw8jwKtuOUFKiWloODCJqCqfPHELH3r8du4+cdNKLwn3n7lhz/2+4rrP8GW3fNIi3PbxbC00HKohOLeHZ9P2BKQtCM89nl3WwOSDR3O2xsSOJx9/3ML4gJ0zp7nw5AmOHTnCxDkWW9voYsGsCrz/3e8ix4grGwPvBJczH/3gB9k5f4HKeUIFdB1Z7aHYlRBHRS1XZ3ZUwbNW16ZYwEIEs+4pLuWCR4Knrit2L1ygXSxIbcv0umO4eoKq0vXvVbGF6x1nT51kvliwe/4CCLRbWzxyzz24ZsGkAGjz1HFOTgPd5mHy7jYbXkhZaOe7PHj3J2hSy25rhzzeJ3Y2Z2iOLLa2IEWmdYV2HQkl5oRM637egZiyFtcXUROy5CGMr/J+sCZX4gm+lChTHQqcTSYTm06qQ2h422RiyogLTKYTnBeaZj7kOhXnVjZ4jslKtEXOlne16yKxi3hKf5f3V5XiJCOaoGnYPXceTpzgFrFra4qorCgG6e/NJujh2Tp1DmxLS9rdYffUGdrNLcJkQo6WH3K6vsbX/NFv4uSJ45w9e57NzW1mMuXgNLNevAMOHZpSB08dKmLXMT2wzq3Pv4Ov/c5v57ff+mu8+/HHiFqKjznF1xXeJYRU/AxAQo2vZzjX55xMRUl5FDHP4K7DhwmustBHTVoU2ooSVCU4UxCm9tUOgkru3mVfCzl3UNnOScXhfIUpBOsjzUuIWAizhT6KmOLs8wBftKi1AGaAkMGBYau0fKMWpWSeCTbv+geD1TDW/qCqh08GJNiYCg7xphuylvDYFcZ45xB1ZBHbYORcmuVoUiJ3rSnyGMzjAMUJxC6S2sh6PcM5T46JVOpVEIybOSmaDSYiVngxZyW5RFgJ9RVxaEqG3MrhkbLJybaZWfH2VtR6SgRzvHFsdxv8+Ef/LEkN1MHZe7/8lg/xw1/xY1TxWjSu452lgBDnLKw7xqWSVi0QsENEg7Zbrg8U1aUHSc/9lGJJg2T3mfMSBn0/i5TCjuW79kN+lEvLyLSRaVcS0yItoITgqatqZFovI9P+0Ezbj4RDk23+2df8fWYTj/N+SPeSUiKm9LRM65mEysi0K0hGpo1MuxqYFiYV33PL3+U3T38P/8/jfx+AX737lXzyxPP46Rf/3Mi0K4hpbdviXSblZdsXTaZtEyFAijZPvfeIMwNTzhBdourTFgEivhiKMrlOhHJ4lXMmxWQHg7mke8JS+yCUQ2jBi62Ndr4gtispzFKixtPNG6pZGJn2HJKRZyPPriae/dbpP8MvnP5BfuK/+L/4u2/7Nn76/V+9nOxqjmEjzz63PMsxk5LgNdA2iW7lGe387hqPb9/Ea26+l1+/58X8s3f9qeE7Xn7j47zilv9ETJG16XnuOHSKKlT89PvfxE+ujutTyPd90a/xHS/9TVJMF/Hs2GyTgyGSdS/Pbpo8VJ7Rnns8u6yBKdSBdndObFoqhdr7IQ9kt7PLufQk7YVzbEwqPODJeM1snzlDThEtuT/VO3KOnHr0MeZb27aAvKOuKivIpcWqDsWylhG1PJbzGDmwvkFVV3hx7GzvkLL2vLHczAmIild44L772b2wxYtefBdbZ86RMuTe+uY9LlQEH9g8d57z5y9AzjhV2rbl8cWcA8CBugZVWjLthQuc2dnBIQSc1V4LwpkTxwnTisnElPfi/DnazfPU3pHncyTpMv9oXNAtlDpa2jbnHfWkKovWmSeAc6iWgctYIbt+QcfOjNJqB+Eb6+uszaZMp1PbuGkuSgJETBkjoKQyyYrHkdPBe9WMwgLdsji0JoVsYbTtfEGXE6GuVsJgBZISY8O6OJrzm+wu5px8y1tZdK0VvCuye2GTOiYOzNbQ7MnZoZphasCWynH8nk8Rc8Z5x9b5M3zyM/cTJxXh1lt487d8Cx97z/v4zz/zs1xoO1LepSm1J9rYMa0nTKdTquA5es1hXnLXXYQDG2QRYtcQc/HSUMUFx995yZ/lHae+jZ977G8A8FPvfB3v+PSd/JPv/GXLp1uUlcsR5yuCd2jwQCZ3LYtcxtN5XCiLVRU0I9i9ZQTNVgchoSQYxqVPbSO9YlwRA74nuxV9KX4AQP/5nJceYb0luTSkJ0vZRGAKec/3LD3Oeu+CvRBYyqriKDaSi/4uxcto9UVVy/Xp1LzOcozEJuJIOBIao4Wp9sYXAZViTU+Z2gdynfe1Iw/fF7xn0TYGmvJf8L6EIjtCCQ/tFWpdBVQzgf47zAPA9Q4Opd05RutfZ2HFv3L/G/mF+99IUuvfV173AP/qj/8jACqZU0ug223pXE1VCZPJ1PLyZsUFoSspIFBFneUSJ5qi9z5AAbbqcr2AhYZr6UxXBso5RxvjEgb7xqhX9KM8MxmZNjLtSmJazFsocOHCJufWzo9MG5n2WWNajnv5IkAtgWnJx97XZ0042ixPy7TUj/vItCtKRqaNTLsamOa91aDz5dB3EMUOTEam7e2WzyPT7rrhAv/6z/5z/n+/+B08cPp62hT4up/8h/y3X/2r/MUvfzvtYteY5gIOoQoVORnT+lrKxj/HI5u3IURevnHC0pWnbJ7aznqjR0HqOpJCKk4XsYvkLtqhrwsj064SGXk28uxq4tmbrvsFvvjo2yC9hJT26gDFnMxGnn1uefaTb3kt//4tfwJR5c9/2Vt5w533DO1432Mv5p++/aupfWSrme1p4z0nbuB7/82f4+997b+jdvCRJw5RhYrTu4cueU/75Yff+X384qe/nH//p/6ni3j22PlrOL61watveJjJVcKzy6fIU8svWgfP1AfLY1iskOuTmso7NHZkKd6XWizKsUNypu7zOSqQIrtbW1ZYLyeyOrSy3aKIUEIHEDzBVcMNI2KeA5LBCT4EUsyWOxVIMZOy2qGLBNpFx/kz53nsoUdpFy1VVVFPljnv61BZvvtiNRTvISVcynS7c3zBgmKWZHWCeocLwTpfBQ2e5CEER1X5YZAcFsJZOYcXmFQVIXjariXmhC/36J2zvIuNHSxUwRfFa9fBOYLrwxoVFTFlHC18tK4Ck7rGeUcQT84yWKxrDQg1KSdStoPuGCOx62ySlFyrZjhVUtsRS/GynC0NS28Y7hWLK/kXQ6jRrJa7NiWCE7SLnH/4EZJ3ZTNo45a6jqmvLO9qyoMyqKoKdQ71wqHYWWgfcOqRh9k5e4Zw+BAsFlxz860cuvk4hIrsPFncAOy2i9Bbu3NiZ3OTB++9l1tf/hJcs7DgUE2IWr85ETb8KWbuwjC1d5oJ53ZmZrV1rkxSiueM2rxkufHor4MweAknVZv7We3eEVTdUMzQez94YWiZO6KKR/DeQodVl54EzjmW2llWtP7qktT+avRJbvq1MihfGWbOHiXcy9JrYL+iX1qldbn86LmQy6EWORvgemVTPp2LJ7UVvysKKCt9bLQr1m/R4hVTFL9Q0h/mjMYS5uvtezT146JoVstRWvRKztnCeCmcy1YMUVXBlf5Ug2vsYkntAHUVVrDHEpB4fvn+L+WDJ17MheYgAN9w2zv5qls/wvWTU8OYp+RK6oZMDpmYSm7h1UJ7ap2nAjlafy29QKyNOS0LWdr96NDZffHA1THTPaHKsmdMx4eXZygj00amXUlMK4u8ix2LZjEybWTaZ49pl5AqhMKudsjXjSqeZ8C0obMpNaNGpl0RMjJtZNpVwDQRecqDi5Fpe67+eWea144j080hswMIJ7evYbuZoinTNo2lvFMhp0TwdvipycbFdL8x7tj6HLB0QaltSdGYVlWVpY1a3nZZe/0Bp9XEqGazwsGRaVeFjDwbeXYV8cznTWb5DJubN9K27UVzXXMeefY54pmq8J9+41be+7FjHD9jZ3y/dc8r2Nxd4xtf/BHe8umX84FH7uDs7sGLOwGI2XNmZ5233PNavuL2T/HmF3ySE9tHSWn/PV1aXn/T3Xz18z68TBfJkmezsMuRqi3PaFcHzy5rYNKseOfwVcVMnN1Q+d36bA2vCY0dsW3wISBVRRBMcWIPs2DhyqTMYncXL4JHyWRb8L2FzLnSCUolE7rcFuXtiSkDCbzgfMD7TG/aW3QLcrRCoN4HNCnz3TnHHz9O7DqqqmYysdRq3nmCr4jzud1X8ZTKCpIz2rV2f2Idl5oG6oB3tjBdBpyF8PrKGUQqu8dQDgVi01J7b6Hbk6os9kjMZtHuFUAVKuaLXQObt0JhqA2u9wGnBTgKKv3ESjiBygeqUJmy9pZWxbl+VgTEQdu25QAikVKycF0FfKYOYZgosevoupakiuKop5MhJ6OqIs4V6yeEYAU6U0poilQuIMDWyZO46RQJy7yqDsfUV5YOJqUhDDNUARcc6pSjsoECKSdOPPow3e4u1doabO8wO3YD02uPWTinD+A8lMWWNNN2ibZdQM5snjvHp7a3+PI3fTlu0eDLJrhfKSJmze1huLo4clmk/QLKOZfQyjwofyTjAsP87JV3SomkCk4NQgJCsPDj4HHOE1Nj3wVYDl/7Ku8DFKNZb1h33uELVHNWlKJwRQwSy5aXebFXeVvHL0NO0Us/rK16ISw9BpYA2fdum1tSwi+1KHvNmI/OiqLP1p9QDtL6y5U/nfME74tyUzR29uBSNl2aMrmLBegKSQdPDJKCy8Ru5TAuJQvXVB087Mpgosl0l+1YEjl2lopIBUeFGzDZP8AIiOcX7v8Kzi0ODHf/x+74bb7pjt8jx3p4zYtDk20Kcla6FG2j66QofAtB7/s254TmvrZGr7QtNHtvT1OKY8qe88FeydsUc8vxxV4bH1qeuYxMG5l2RTGtREmmlGi7dmTayLTPHtP237oIVQjEpiGlTF3/PplWxsAOAUamXSkyMm1k2si0kWmfD6btb7cZjTJt0+JcBoQUUzngyyVSIhHVskV4Sdx4cMsODrMS27akjBV8VVsqvP4O1fpkSPej1h8hBBbzkWlXi4w8G3l2NfGsi5Gua9m8sHmRgUnL/Y48u+jdn3WetZ2wtVPxs2+5jTMXpsM3vev+l/D42SO8/uZP858/8ioeWKmbtF4vWK8asmY2m3XaVAPCr9zzWo7MdvieV72Lk1uH9xhsLiVC5tq1Lb71rnfyJ1/0TnttH88OVltM11qqcM1Vw7PLRzCNMsooo4wyyiijjDLKKKOMMsooo4wyyh9QHjl/lPPzdb7keY9+vpsyyiijjDLKVS4fueco/8u/eC3xEtFGD529ju/7t/81Mfs9r//1N/0af/1Nv8SpU2f4q2/527z9wTcMvzu1c5APPXEHcPH19suh6S4f+Qt/i5qz7LPvXNVyWQNTb1XNKdGmjon3uFAGICcExXvBU1E5cGRSZ5ZTcY7e98T1njApDqGJ3nk6OlQhJmV3Mbd3CXTBLJdBnOUSTGaVdOLpuo520RBLoWTn7OpdLhZyX1FVNTEqOVvoWC4eoJbwsGM6nbJoO3Z2t1ksGmZVTUBocmQiy+niveUNbhsrUFmpR9Q8Dyb1xAol92ZUEbwTQj2hjRlNia5r8WFGPanwlVuGdmMh/yKCd47gPME5C13LCinRdR19rn6nYkW8uo5JVVEFb+GtwaIucrb3AwQJiBd88IOXQIwdbbcgpwA+oNOpWSezkmOka1oyUNUTHMXToarIDtbW15nO1uwWK4+IUs1qWu3IJCQp62szwqRGvKd3zu2LxG0vmtKZQpssH6aqgkAIZlF1Tji4NmWeIqrwrl97K1/05q9nJp5bbrwZvbBVPD9sumo0i3LlHd4LJNAYecdbf53Tp09xqJ4R+lBPNU8FL/DiA+/ge278a/zMk/+IqFPO7GzwD3716/nu17+PW4+etWbmCRbH7/FeCHVAvadNHZqseF3s81yKoGI1E0RAkSFc0Ty5FPoC7ALiQvGcyGgXLa+uc4h4fD3Zq6ZW4hT7IpO9V4EZjvcW8hverqveARRvBGXpBwbei4WT6jIlj1mlV0NYlx4GFtptHtVIn9O1D6Xea8kWt9LuYqHvE2nn3OKlhIcDXlu7ZlbiokVTts+ozcsUIymWouVVxgHTugax8OmcYtEDghNPKF4YAjg1DyYLWc54cfggQCmUHiO5z92rcN+5W/mVB7+K7dY8Gw7VW/zwl/84r73uPpt74odxUedoc6LrIlkcTjx1VSMiVgRUHWU24H0FlYXSLz0EtIxDZe/rHT/oPTWK7hWQbPmRc5lz3i37W1Y8Okd5ZjIybWTalcQ0vHko5eJd+ftlml5q+UdFicPvRqZ9YTJN9s8NVbqSsuQPxjTrHe8Dwdcj064QGZk2Mm1k2si0zwfT9jtuC+AR1qezwjRHjh1N29nzkASC99y4scW1szlOs9XNyQnRSOU8iCB4ckzktiPFrrTTvOu7trOUiclS7fVramTa1SEjz0aeXU08wzmc85w4/iTznd39k50cu5FnnwOeWWTXsj17RYh5aQ7xEvl7X/V/8NUveIRFO+Nv/8Zf5xMnXzDc7Pe+8td5xQ2PkbMSnHta7b7TTvmv3vLn+d6X/DxvuOGjpZ1XP88ua2By5TKiWJhzXRN8GaCcLWzTOZyAR20x5WQbBB3up9wklmcyxdI0y82bEVKGRVEAWWAeE6HyFvJZckVaulChi4mmi3StDcJkOiEDbbaieSpWWDLFWJQNw45V1cLtJrMp2nV0KaEOJusznML2uXOIc8OSqIMpLo3JCrEFb2GQCM71YbhlsTmPw+GchffGnEgxkjXhnMOXfistQTUTvMeLtz4WNyyk3H+2LORKxA4QUsJbHC2qGe9MqYua8gOQnJAkFvYYPDiY7+7aBHQZD+SYTEmpfVfsOhCHny0LSvsQ8KL2ZwnHVUxBO+9w3hHLolj36wTnCnTLxBNXFJ91fxZoc7SaAlhxT+sSIWdhYzJhERO7WfnUhz7MbS95BbJoOHL4Gja35zhhKPRoXaAo2fLLqiPnzEP33YeIMAvB+qm0OWebF8fqB5kdfJyfO/EPiAq7bc077n0R3/jyj3LL4XLos5Lr0wmId6h3aIwWcogMhdTE2VyTPm+tCE5s1WhRcn0eWRDEB5SSZ1wMV06s35xzy/yh9Iyy4mu92jWFfamFbWly9u/2+89YDs59nyiv2xyT5VrhKUJYRcHZdZyIvS3vVfRS5gZlM4da7k/pvy9F6qGPsCKZqji1ehJWGdO+L2c7jMslvNPy6SpVCP1Mh5yHde78aoIgg2xOkVLIwnSZGFRjaw9JfYHPx7av4xOnn8+7Hn/58PlpaPmjd/4Ooc+pO4iNdZ9jOIttHpahv72K78euv98SXqx9PlQGqMqQV770ZXlw03K9/nOq4C4KS70YtqM8tYxMG5l2JTEtZTe0w/77/TFNL1Ls2ENBysNh3Mi0L0ymPblz7UW9GlMkqLL3keiZM43ynaZRRqZdCTIybWTayLSRaZ9LpjVtxQPnb2KxkjocsDHJSl1V5X6cpcRrW5xYii9Rx4FqDpWNa44dmq1Iu5192uFZu1jYGigOGYgBJGkkxjiMXUqWCmhk2tUhI89Gnl1NPBPnwAnnz55jsVjsm+2KpjTy7HPAs0Pribtuv8BnHj1wGUOTiZfM197+Lip3iPc9+jJ+5d43kXQZ3XR0bZOjs002qjlNWr9k76xKlwM//+kv4Suu/11ef+0XDs8ua2Ca+AoRD0Tokk3KchspRYJY7saAIzh74PUKoTwY59zn/jNvgpiUCikAyXhXQcYm/KTkJU2Rs9sXWHczEhVOlQP1BO88la9Y6IIuZRbRPAliDCQy864lekedM1MFEtSTiS3UHJfdUqylzjmmsyk33XILt956KztbW7zrt38bHyorwoddI3gDWZc6pmEN5yvzCMhaCnHZPfp6QuU8zWJuCkFtITfNgrquCVUALf2hDu89IZiSGPJylgHuC3mZUiyG2JTQnIldZL4zR3Om1imT9SkuBCYluifGSEyJehKY5Al+7tm8cIHYJsiKl0jTzIf+UIUYO0KomU2mtCnaZlLMo6DtOvKuWd1D7ckpE2PEO0fXJbrYkVNHbkB8wJeckWbrF2aTCU2MxLZj3iwI4qi8p3JLL4wswoHJhKiK7sy596Mf56V3fYjJdI3D0xkLHyB1xOKl4rE8nE3XUlU1VRVsE9HGkrtUqJws788V2ogbvLJWRZMOhbdzikhyuJQgZ0QzoITgULECe/TZP2VvXkrUgbPNgYjDSSAEAwCAVBUxmgXfNGX5EVeK1O1VU/3izZqtGKIqkrQoZVla7dU2UcMNY78zhez26f/935FJqVc2DikbjX7tLhU9uKLkgy/KLvcbnQIktXygOSkpd6SSE1nQ4TpeGDZSxIgk81byKZOiFk/FRMyJ2ERS2dBF54niB3CKuW5YAUoHzmVy7CGpoJkUW1DbeAmljBewaBq7v2yZU//Pj34b956/bbVXUBhy/goOdUvPJSmgaboOciLUNW3vyeP9kPMULNeweQ1RILIcjJzzCmzBvDvMO0uceaf0kOzfu5pPtf9ZfX2Uy8vItJFpVxTTovWTOLv+75tpl3T3xh4ayt9Hpn3hMK2v7/RsMK00j9h2dG5k2pUiI9NGpl19TDOdvyoj064cpj165lr+xq98/752mgGqmy9KrY+iz3O5hlOcD+TclXcraCK2DWi2NQw47/GitPM5KUb6CZjLcXZ/mKbYoVmMkRwZmXaVyMizkWdXFc+Kken06TMs+siqlX7QpCPPPgc8e9WLLvDPf/D9fPcPvpGTZ6dcThTomoZ/+aE38eMf/Z59vxX+ybu/lT/7qt/mT971r3jf4y9Y9sPTSVZzOuQLg2eXNTDFRWMF73JGUialCNmUYCAPls6csy0AM2nbhHDLiajl4bZynkqEgIVfeudpupadxYJXvelL8cEzObDBC//om/l3/+c/55G776Hbjaz5gHYdiufQ2jq7i4bYmCX4zd/4tdx4x21Mjhzmf//H/yu6s4nLyrqvoYtIYihEJygiSrvYpW0a/HTKG970RibXHuXRRx5l8du/xVTEvAVEmFQVCUdMStt1OBZMK2VW1+QYzYOiXLudL0hOyLElp2ThoW3D2toaIgaM3sruSmHB3V6BVgFXihnmGM2yn/KwQF1O5r0VzdLZxRZapdluoYaKCuk9FbwVcq7qmsViQUKpZzMm9QQPBBxNCTk3A7EQ6ooQAl1s2W3mJEDFEYInBEeptUczn+OAA5MpbmODRbfGvGnY2ZkjOh8UGqW3EcFVFRICs9pz6JYbmbmK2LScfvIE07WZTVSAAw5X1Xhf0TSR9/3WOxDxOHVUMeFQXFGwPhSvDjdlOl3DO49TmOGZ7+wyn8/RuFyYs+mU3UWH+EA1vVixuODwoViCYzSPg+CQnKCLZU57CNB7DAPFiyDgvI2figdXDoyKh0AfYmhLtniUiNhnfTAPEiCnUozRLT1OBmu/qnmjOCEXqzcYAOy9ptQttHllDAYL/2rxNvNCsT2F4ryUtDvmqeGdxzmGe+w/k1Midq05DwRHEG9wzRklDe1JsRs2earZ7sn8VwguEHe3lmMz30HaBbKYs9jcwiUlZOiaFlVBVNgoYdIhBLwIMSXIIE6Y1J6uj2pLuayB4lmT1UK8VcldxImj6xqalOnaFhCO7x7jn3z0z/DEzrE98+G77/plvuclv8ikErLYhqQrbbZda0K9wwWPOF/Cys3k36VMEF+AYZsl6VnvHUkz/TD1CnpQ0sX9SrNBYeh9Z+O/3x3Ee0+MeXxo+X3IyLSRaVcS09qyyXNOCPXvn2nLI7dlE13l8DD8bmTaFwbTfu7+r+cDp14G8KwwrT8A8d4KRI9MuzJkZNrItKuJaV989C3ceuDT/Og9/zdtnvHk1kH+8n/4Lv7rN/8GL7zupHXJyLTPG9P+nw98De966OV77qFykZ/7lh/mZdc/QRBH19mhlnOe6bSyNELm4YDz3j6rkFLGY/edWotoaJpIjol20dg3lPmas/VxRtGV9EohBBI6Mu0qkZFnI8+uJp6JDwTvSAwmomVrRfDBjTz7HPDs458+zE/8/Cs5e2Ff1O0lJObAn/vVH+Xs4vAlfqv8d2/4WS40B/jKn/4hdtsJp3YPPu01h77p+/ELgGeXNTBpysO1/UoeTwWcF1PmxZpFb+nUfoJiVkws1MuVjpNs+T615FhNKRJjhw8OXwUmsynPe8GdvOilLyFtb/P4x+4hOLEw2BTNOhdbus6sytW05tC1Rzj6/Ns5cM0hZHNe+kxLnmBwvcIUtaCxbLlKNUYq56jqmmpSE4qC9P2GBiGr5YJcLFoqFxF15BiZ1RWusgEFsxha4G1GcyJFCwG1abMMQew7ULNZ5AVbiIk0DHrsOoL0PhuWyzOnhKYEogNE264lpYQPHles0BY+KaScLZRUlcl0gmRFsuKyElM7NCVrJoSA8442dgabMrkqX2Nes8OKwHtPHbz1k7cwvaZpLFJjxYNWXFkImnE54auKydqMWh2qSmxbZmuzIZetd+ZhMKsqDq9vcH5rTspC7WucZpws5yJFAaqYedsFR+0CU1fZ2Gpm3ixz8oYqEKKScoJLFHizN5ZLlzBVUUVyhpIXc9gsiCyVYAlLRc1RwbYzFp7qnEecI2u/iC0ctLc2iFv+naKIhzDFlfbY7ZbwxqLMRfbfQzlILH8WVbXv/nT1rSi5zMl+Q9avZS76tGJKpWkMkQ6PBleUpS6/X7XAxrY0BqeiULPitHhQx2itjR10HbSR1HQWEpotlNq8GryFvvbdUaz/ZvG30F4pv3MiJSTevk+GPLGldWrpg1I0j5x7L9zJJ8+9kAc3bxo6W8h8w23v5E03vp8XHHwA1drWggxvsf4RIWkmp4yQBuCiimYtSt4OAr3zpQU2Z/tc0Kvhqn2O5CEEW5deHGWEBl2bV7xNlv09yjOVkWkj064opu3Rnb9/pt24/jCvO/KLfPDsN5Kp2G0n/Po9L+bVtz3E4bUdu/bItIs+fTUxrWmF333i1Xz8zPN5cPPm4R5vXj/B62/4GKr6WWFavw686/P6j0y7EmRk2si0q4lpB915Kv8ppBwgdSnwmdPXMe/q8Tnt88i0bgG/dd8r+NjxO3l8c+nA8LyDJ3jTrR/nNdffx+FZiyY/pH+1PFUWZeLKIbqUlLFKGTu1dmQUq0mSiK2tJSeOkgWorEHKs1W5Qu9lz8i0q0VGno08u5p4lnKy9HQrc3mPyMizzwXPtreF+x89cOkx2D8kAi+59jj3nXWcnh+56PdPbB/lia1r+fiJ257R9SrX8U23voPbNh4f+voLgWeXr8GkFkznRaiqCscyp2IIwayEIjjv0WgWMqFX+DIoNxcClQ9mdc/ZPA+6VJR2R9bM+Qvn8CGQPJAaXv+G17GumYc++FHqOlCJgy6xaBY0812a4klw5uxpjm5vcs2k4qZbb6J58gx+p0W6jCsWPVcmosPWppYFnNuWcydOcM3aDLqW2bRmY7qGd6aMFvM5OSsxZXbnC9anB2m7yPZ8l+qaayxXa9n9tM2CnDrqypPV4JW6WHJ1miW2H9Ccs3kLlEGPMQ7WwB4A9WRaDqehje1gUfY9mKrAfL4gZ5uQveqSokQWbUvTWd/O1tcgZoiJ3EWyj8NEydnyneIci7YZJlfOuXgXdENhwzqEUugvUAcLtQ2hYmdnh7hoSE03jLmvAhI8qW2JXYePEV1bo+k6Frtz2ra1USmHKCkmnMLEB67Z2KB2NV1MxC4TG7NO98p2CAV2jpgjHoevPN7XzADnHfO+MGjw+LqizkLbdUtorYgt6n6xUhR9hpwgR0hi+Swz4C0qBSCrbQQ0Z1JW1CkiFd77IT+mlEhRWVX09Epb7a/lyCj3cIFhnS1zo6op55wR59mriXVQ8vTKQVarN1DOqHSYI1oKuDmn+N7STWmL6pBbt3/NeUeKVgDSodShWuJlCJO11IXCEvTBKzmbxxE5IynhSiFyiRG6SO46cteRVIZC5t5ZDNBQvVFL6LuWW1crFCmaSxi722t9z2noasTmcy4eOW2seNsjb+B3nnzd8H4nibWw4O9+yT/jyOQMKQHFewlxw0ZXnKVZTLEldy0kh3g35H4WrM5F331eLJIpsww1NXAvdecwn3ImaSbGtJwAZWPnvBWv7FM5gkEzl/y5F8N/lEvJyLSRaVcU08rcyzkRU/x9M+0VR97H89Y+zMfO/xGaXHFmZ51/9Lav5Ue+5ec4ONuyi49Mu2qZ1iW4sJjx45/8dtq89MyrXcsXX/cJfvSNP0JWJeb8h2ZayhmkPLyMTLtiZGTayLSriWnjc9qVx7TUZM5tTfix3/tWuhQQMhNvmSC+7OZP8mNv/jG7dCw1XYZmZVLb4VQJDmrvhyLmomrpWgVjgpT6OOWHrKjLaO4PsaXUpbADxpizoXRk2lUlI89Gnl1tPIttZ9Febv/6t/pcI8+efZ7JRUYRZeJbuhxQFSa+pUk1IspGNecff/VP8BMf/mPcfeb2fZ8T/s0nvpZnKl4iRybn+Uev/8es1R35C4hnlzUwBW8dYBkblbqk9Oh/BxYmVVWeUFc2yboWcQ7FOsxaZH848aDgxOFrRwgBvCBB2Dp3xiyaLsN8ztH1NW44cg1HDh1EckYkUYl12Pq0Rotm+8j738vxk09w9uST3HLkKGc255w/e4GcBI1CCJ61DUtJYvk5LSR7veSl/I2f/0VSCSGTpmGydrAUFBTWDx7m0NGjzNbX2W0WvPgFL+H0yVP8xq/9KlvBQZpBntgkcooPjqyR3EZcVuqqRrIgakUrZ5PJMEDzZsGBjYPEmGiahq6NhKoihJo61EyKpR7AB1c8I5SmWSCVh8qz7tYRHF4Ca2sbdo8CWZXdxZxQWW7StckUUYhNS7uzO+RkNK8ie38uYbDrGxv4KuCqwNrGOts7O2hjk6ueTKh8AIGua8zqGjNVTEzEEarK2gb4uqZem+FCoIuRGBPz+RyXheRgdvgAMq3AObIq8wsLUspoVHTeEHIBYo6srU9RUVRsPsXWwqZzhpwtqrdJienUsX7wGg56z465tKBaQlyd4IOnrqqLrOyWtbZsBjSXsMxI1XXFjK4wCcTFgqjQlQMidR58zWzj0HBFX5Ro0oSSaEq9BRGhqoV6OkOcN8gCfT5VzeB9GDYDUHKSpmXhunIKRZ8HdZlDc7nYV1mSYjTFXDYuvTjvocwn2whRQJAR8WadLhuPrjPvnUldUXuPiKeue4+NRM4dzXxu7XJWRNE+q4gXuq6xQpxe0HkLXQslp21IkWZ3h/m588TdBTHqYLaXqljPB88KC9c0BwyDWGpbyJmUMvPWvGasry2EXHXpsdQ0DTlltpoZf+v9P8hmu75nDnz1Te/mR97wIxyqd0pfQFwJ/Y193tRogMzmKoF4R3CW4dk7x6Sume/sDmPSNY2lYOxzkjs/KHHLmcvwb3EOSSV8lWXOU1DatjUvn7wCSdcXc5TxweUZysi0kWlXEtMWXVVmpsOKxo5Ms1+NTHsmTHvrI2/kZ+//JtpcsSo/8ZV/h9ce+wRd1xU2yGeBaZbSom1aqqmOTLtCZGTayLSRaYxM49lj2i9+5Iv4V7/zOrpk8+bF13yGn/26v0Kop8xqQbtyj07wXvC+ME2VuJijKdHh6JrO+kPt98HLcICZYsd8Pid1mRQzqtZ/fQQTToZ1MzLt6pWRZyPPrjaeVVrj6tr0yoooxrWRZ58LnnV7+v7w5AJv/ZY/zd97z3/DA+dv42e/7q/wXb/+z7jj8Bn+yVf9GIf9zjOvrXQZ+a47f5H/5uX/Apd3adsvLJ5d1sDkvR9C9lLXgV8pLqZmOhOkFJwrE6eEZpkBvU/pUayTuYQbiiK4YiRICEq32EVE6BZzSJmtM2fZOXeeifdmsS/XCJVnpjWUmjmhrllsbvKJD36AF93+fDYmE5qqonY2RD74ZSiYWGtSSgQfCC7QdtHyHYpjOluDlIf8lSl3OJTZ2oyj1x1jY2OdzfPnqWoLa1WUWCatqx3O2YINoRR/y+ZBoFmRzHBdEcdsOjMQ0BFjLPn4HU4c08mEugpDxI53gqZEipFuu6NpPTio6noYj2Ezrlo8F0qqrt5irdYXs/X1Ys2096pXutZCVFNKFq5b1fg6IIgp9qp4B5jbLJGIFiWUYgLNTOpAJgxhoNVkQj2Z0qlSVY4Q1JRZ5amnwuzABq62yZ9z5smTp4kxkaOlFXPJLNKqCfWVTeqyMl02L5ScIinl4ukg7IQ5XQnl3FrMURQvjrqqqaee2EV8t88KD71DcPm7Qk5oKp4eKaEIuW3QUFueyq7sXLxZhNFkG2LnCdO15QHSiigUbxFbNyklC7UVmw+as3lr6dAQtCicIdSxvC7OUrANntBF+nkuZe3ZT1l30oehApptbknvwdDn0lTrAbVHuaFz1KBj4ZLLtZ9SJMXiLVMs8ZozThRL3xPRHPGCeW9kCw/PXTTLe4zERUOzvYOLCcllYxn8EOI5jLkzS3rxSShrKy5HTnXoD8X+mrM9uPTFA+8+93zec+LVnG8PkHUJ+u950X/ijTe8n8PVBQSHiiu5ZSGLhYOmlWurQkzJFLPahlWEYS32fQ7ggisPSVJ0o3mUqCopRoZQ/3J1g3bvdWJ5f30INg908Bcp86ZX9OwB+ShPLSPTRqZdSUyTHLA9o7cC2CPT9nzHyLS9THvrw1/CE9tHjUkp8ZkLt7Id14b+Olxf4Jtu/S3uOPAY62HX1qe3NfOHZZpqSUExMu2KkpFpI9NGpo1MezaYpgo/97uv4QOfuZXtZlm/+NT8KP/8E9/LX371r3DD5DzeWfSs87aOpe9BzeTYZ3Qonvp9IXfMGz+V57Oc7LA5Z+vDmLKtNV2O+gDCkWlXrYw8G3l2tfEs55Lu8xIH8qaVR549mzz7lfe+kA/cs0zr+iU3vJ+vu+3tXFuf5tvv/GUuNEe4bmPOX3n1z3Nk1nBk1vCP3vcd/M5jr7yoT5+JPP/gI3zHC36Fx7aO8SXHPsyhetOcDb/AeHb5FHnOlZA9O8wnV8tGZS03Bzmahd6hOF8UfekQALFEkRZm1zdVhJTM0ilAnC9AIO7OWVzY5Mxjx9k8eYraOYJ3pbixFbKaUuOC3eBsfZ151/LYfffzkltuY+Id0yrgRenzDA4WQ3GIKCl2VL6ichZqmXpoiUDMNsVV6WKk61o0Jw4dPEiMHV3bUoVgEBTLU2pDZKmwwBSqZG+5Xou3qRavVMFCKaeTKS4Ee02EypnS8+KYTCYEv5wA3jlSF4cw1q7rCMEzW1vr58fKotei7BkmkXm9KsF5qllN2zSD8s8uW0qCbAcWIoL3juA9ZNvgSam2l1MyKFIKLEaDD6JUVYU4Ryp9EOqaqq7pmgYfrBhdihnvA1UdmK6tEYs/WkqpXLNvZ6ATC09VUZIsQ/tsYnpEMpps7kUSIOw2DU2KZGAntoASfMC7CSFUiC6LI67KngcXO+mxTbBmU1g5kWMHPljX9tZcl3EDWA2eS0Voc7z3TLaXtfS7jSNlDjlnc3sIlQXMvq/DOuw/hyz9+vrFLbBPYexd+Ko65NLsQdOH0aaUB6UkosNDwXBt6fumeCqg9MX6crScv5b6p0zD3JlHkFqeY1JCQwEaal4GMRm2YiQ1De3unKkKgrXJOwsBl2LosXss962lN9UgjPPldcytZOWeU7SQ0ZQTZ+YH+eiZF/O2x78SgMP1edaC5U3+tuf/Ki+75t7h4yKWLzxlG/8MxJX+FJylNkINYlrg2yv60sc2Vd2+CbZck3n1C+kBreVOXVHmLDfP7uIQa3HmuTI+uDwzGZk2Mu1KYtqQl9v1oegj04b2j0wbmJbUc2r3AL/58Kv41LnbLxpvMKa98OCDfMvtv8qRybm98+GzwLThQdPbg8vItCtDRqaNTBuZNjLtWWFaFn7pQy/j1ObGnrG40B7gLQ9/Dd/z8nfh3eaQyscS8xSWlfmeYxw8901PLZ/Tsiqx6yy9T4rEZCljFUfWXOrZuOWYr0yLkWlXp4w8G3l2tfEsisPjOBjOc6Q+wdn2OkBoYuDk5kGuP7CFuJFnzxbPPv7Addz98LVD2+449BB/5NbfxqXI19z4LoILVH7Gd971WzgXSLrG2x56PZ+5cCO/X7l2coZXHf0EP/CSn+Fjp27nULVFhi9Inl3WwJRJVM5ZGGq2UFEP5jGQkikE51CiFVtTxdWBXBTGik4fHpiFkibEe7xmKyonwub5LRTlQhP54L/9Dzz6xGNcOHeONedxpev6AlheYFLCOI9tHCTlzDTB6QceRttIhYW0em8FlfvtTFKlQ9latKzPAr725M6s+AJDYbV+xUoInDp+nNOnT3Hm9Gl2Lmyzu7WDV0Uko+LMGgy0KRnohKK07X5jzIhYmpIQQq8JTGEkSF2imVsoel3XBF8hUAp5LcU5oa4q6rpeWfgQxCMqdK3lK+2LIK7X0+K5ITgJ5KIMUYcT84Kwf0abLJppmjkxtbgOco4WctzlASLN7u4AZxxDEbJQV0ynM5zzNCVvaghhuekUb9ZQTZY3V4Wma2nLyKSU0FCAku2iKS3IqoSqolULn099VEibkZTxOGKbETxSeaIqYVIzmUzYKIstx8Tmzi4zX4Mq2qVhXi4XgaPqe7sobmeaDCkFBbUSskUwQwl+CcERgqeeVKgTVDwVStM1aMmZGQbrviBeQKOFUZPRZPfnfZ9TNQ0pIEJfzw8DfYyRnEzRm+V/VTOVDVeOpfkr0KcPY3XskRJKmWNXprulNojR6lsxtCNgxQXVIK+Kplw8zWJ5yCtTKydSt8AFV16PtM0C14LDQmarft4A0iZYdOSmJYlHXCg5SSEntU2mK+G42Tx8skZcAeSeDc5K7mXL55tZNA1d19GkxA99+H/g1GLpwfD/fcU/50/e/hYAJlUJ9yxjlZNFPTUl325GiIMWMSt/ztk2Tj6UfNHgRNFU+oTeK0AHXaBOzBtAFBULhV0NPe1zS0vw5BSt2K2WPu91Usl63IPC6pGNDy7PVEamjUy7kpi21eyiQNclmjaNTOtlZNoepj25c4i/+jt/k6z77nlFjGm/gkPNAxT3WWcaWq7l88i0K0RGpo1MG5k2Mu3ZYFp2e+5gkJdc8yBv/5N/Gec9OXpwNp9itgiklDt8v15zOVwst7o3xVJiZ2eHtmtpYlsiMSqqUIO3NFa5HKaHksVkZNrVLSPPRp5dbTyjS0zU86dv+9/5Bn6Tv/HBXwLgo48+j//yX/95fvr7forrDm+NPHuWePbff+tv8e6P38D/+HNfB8DPffpbeftDX807v/nr8VUkB9MbGjPZZZxb8PZv/Zv8z+/7M/zoh77tElrqqeUfvu5/4vXXvpfzmwtuqT5ixuT8hcmzyxqY9kjR1pfabCxl7xde8r1PcYEhCKufqP3PU1xDLnEhXb3O07Tzcs0awjvF2iLK0JkXXV8u3yOfVdn/VU/RP8+kRav3uPeaF1//GV1v5bqX+dKVr7lU2+Xi7rzE9XT4vqf4LoFVi/szkWIjv8zvL9uky3/uDzhHLvmpP8QGtd/DXPpay1592s89pTzNm/pDuP4gbfX9+9u1ryl/kLv+Dw/+cR64cCNZlfPtYVYvKqJ46ZWsEfzZXsmfjYeLfj6ZLhrlDyUj00amPYOmPZtMu/yo7Xv5KZhWuTnff9Of57fO/gCf3H0zAD/1e2/kdbc9yJ95/XueuumXbtLTysi01Qstr/fZZlqbK370Q99Jk2oWqSrGpYvvZy3s8iNf/MO84sjdeNl71ZFpX2AyMm1k2jNo2pXOtKds2vic9lTf+KwwzaH8rTe/hbfd/RLees8r9rzNSR4Onp7hlS/fquHDl5/PI9O+gGTk2cizZ9C0K59n5Z4kszQ92oeyPnXbR549k294ep791G++jg/ds4xg+tqb38a33flzBNcxWO5W5qVg9c2+60Vv5wUHH+OvvOOv7SmrsV++98W/wtfd+h5UM3et34+TPFxOvoB5dlkDk4iUUbYiYtYR1msOGQp1ZbGcfKJCFSrLNS8yWKBl5QfVpbenG5pO8AGwAnAXzp5jZ2ubdtHQe7yISCkwtndAgjPrW+W9pRfRElInZVjFcgmCede0XUcTE1WKhGRWUycexAraOb/cwqauhWw5H+e7u6SuK2GKSsoZlwXn+u4uU6lXjgLi3BCuGlOy8EQYQs7qyTIXf/CeKgSC9+SUqVbyzvaDmvMKANU8giwMNZd8tDCY+HXZ330+VstwmZcrV2RZB6B8V0wR11l+UicCSen1oXncZrMgY3PDxkRJKeNYFoCTEvrqy/iYVTXhxRnMk6JidvPB0i0ZFRDM88ALRE1WQE7dsJV1Xiz8OSX6YmMpm2V+trHBoeuOMb1mgQKLnR1OPPSIeV+I2Jzbt6Jt5Gx+Z80MgZe5eFEMC9TusS846UQsclB15ZK61Ou9hil/Lr1rLLA5ljBl5xwpZ7xfgZP0w7zcPFnIqXtKfWrvXbaj/2xKqYRWS5mbfXhjvtRVBiu1SSrjI8u+KopmCI/N5pWguZ+LIOShX1UTWRMuJUpxJJvPXSwh8BnxofR6r8QUyUo/+fb3BcO/zUqf1fOps3eQioe3kvno2Zfy4OYte+7OS+SV13yEayenLtps9DpmGKNc5jiCXKbPc87m5QF71pKI5RfvNx7PVPaHHO/5EWc6tOgAZV+/jHJZGZk2Mu2KYlpJHWOh6P4PxLTgMi/beAcf3f4m2LX7+tSTN3JoNrf2j0x7zjHtXHOIhzZv5YOnXkyT6j13c7A6z4sO3FPaL2xUu7zxut9lErqLuvGzybTlOMrItCtIRqaNTLvamOZd4q719/Dw/KWcj5am5v5T13FguuD5154emfY5YhoKtY8Et2zDCw8+wKuO3F08sV05MO2jcm3+97o8ErjQTAChdpGpW9C2beleJSZLvbX0YC81KDSjWZbF41dkZNrVLSPPRp5dbTwTZ3WuYhdpUnPxpO/7beRZ+fdnl2efeuw67j1+dPi2W9Ye5g3XvpMgM9tHaB9Zm/fw7PkHH2Pq5nvW/qo4Eq+65iN82bXv5iuuexcxRZpmQTfyDHgaA1MIAVfyl06CJ6gVfEKE2ge8E5woztd4cXgHG7M1vMNSgaT+y62g3tBwQCVbztDS4I3ZzN6ZEp/4wIeZN3Ocdxw6cICubZGqpg6B1EakQAZgVk0QEeJ0jW7e2OJzDqmEDGhwTA+sA7boz144z4XFnMY5FgiVE/x0gvce7x3TftuqSruYk2OiWySOb26xNlkrikBp2hao8L7qh2eYqD1gXPDEZkFsE120cN5+IL33HDl2DBFhOpmwvr5OcB5U2draYnrwAFXV5yDtaNuO2LUQC/SSeTikzpR4XzgsewHnhgmaNTOfz6mct9ddBidFzwuTqmKxWAxhevOdXVq/LFDoZdnXsSt5kmNnYXfB47wnJiue6H2gx00GIlCvrdkCS5nYtQRxqEKKmViBYjlQnQ8kb1ZfJ4HZ+joutsTFHB/AEQa7v/dAl+h2FlRhCk5pu0gTE694wQt4zVe8EYKNy5OPPMqv/7ufoXIVOWX77D5x4vHiTXlhBUlFIbftsNkRHxCX8BKYlY2DBG9F5FIkiFhKgWT5ppWlIrXus6JrWa14H8BivosPgZQTMSvT6XTQB7bdWBbLy3kZPmq/x6Bpq2qPcl7dIKSU6LpuKPQGFjLdxY6cEnVdkYuS6D+fUhrmaouBta4CihXp886Tc6IKnuAD3e62fV9O5JSIqSM4C331TshtR9c2hLaBrkXLtRfb27S7u+S2JazPEFwpZmj5dcXpMF7DQwuKLzlhVZW2bclZmceav/+B72OeZhcP8Eq/bYRt/tnr/goHZmlIN6RlU1aFUCBW9jdln5MwiPVXij0EVckx0aZMDgZTLXmSBzgLFn5cdrqroak95PcraRu/MqYoXRdtLgwjv9zErkJglKeXkWkj064kptU6sXlZ1VST6R+IacEH6lCt8KAfPXD4kWlcwUzLWBu8Fn0Obdvwe4+9gJ+85zu4WJSXH/oo//SL/yLeBTtgEUeMSnRh6L9ng2nOeTvk8a6sR0amXQEyMm1k2tXGtLVqwV+77c/y/zz+D/jtc98DwI+/8yv50jse4O//sV8emcbnhmlZHT/81j/Gyc0Dwz391Zf+NN9yx6/Rth6c4lymdDU5aWmnpQrbjRM+fvp5dF3LtZNT3L5+is0L2/QOF21nh63iBAkO7wI5ZzM6JaWqR6Z9ocnIs5FnVxvP+me0rc0tTm2fGoyJvVnJqeJGnj17PEt7jVoaO9JiQZgcwGobQdu2l+RZ10VEeoPfYMUDYOoX/G+v+WtshC1Onx55tl8ua2CqphVp0RCbjq7LBJGhg733lsfUQdu1w6SOMVpRY5GVwo8GBFRx4nECUizlwXlq5+maBkUJEkhT5cCBg3hvHTGbTk2pJcWVPKP9Ytna3jGLcdsxqepirVWzdovQ5cyT588C0CJMjl7L9//Fv8SZ06d57NFH+Oj73o/WgQ2pWRdvOU1Lp61NpngX6XLGOY/DWTG4maOaVKY3ewurgiQFB7vzud13UfbeB7wIba/onSOI4+zps1RVzaSuSV2inphHQzNf0ARP7md5KfgWVJiGihQjWSKVq+iigU/KoUWKkZQjOGf5FlNkd2ebtbU1gvd45wePBhGhmtSAEirH+voalTeFUIbOit0V046bODw1daqLkV2JWVksGromWQHEcnrigsfX0XJOJvMq6JoO3yV8qBDvaVu7ckqJUyfPMlu7hlBPOXv+AtMg5iEVE13O4PzgSTDfbchtsuJ5ZnOgU2Wh0f7tAkRrc247FrtzdiSibaRNl5jysvyzz/tJzJA7QJAM4qymgXqWBc9SQol0uztQTyAoKUJ2wXLkOsFJ2QiooqkDcYjzVKGiCh5f8ttWxRtBk41NKotXyDiwHLI+mGLI0OcFXpUezsNt7bFe98pxaahBrDimlCKHqrkki7MHBJtPiSQWLpqzFe1DhKoKBO/wAn5Sl+6IkBskKw6F8qdzgvMCsUO7iMYCkZ0dctsRxOHFDzk/bbNmm41eGWenRdmqFffMiaZp+Icf/8ucmF9LVmGRJhePbbmfn/rKv8b105OgmUkViWreLwC5UULoPSbM4CjiyqbE6mT0XlFZzYuoaVtqH6irmrV1W1sIdG0zjItSIuK8eev0n9feM0pkj4K2v9q/Y0pWfJHlRsHGbmXK9op+BQOjXF5Gpo1Mu5KYpuVheLFo2dlp/kBM63JEJpOhnXtkZNoVy7Sff/jLectDbyjtHvzxUVV24/TisQT+/qt/iC+9/r2sr6+DOFQFO1PpiJqfVaZZHnzrEw15ZNoVIiPTRqaNTBuZ9mwwLevFB1G9F7NzDuc9zgU7SMac6XK2e8gx0i4aFrtzbgkfpV5scnre0Cy6YRxD5ZnNZoQqIMGNTBtl5NnIs6uWZ7rY5bqNx/mpV30b//ADf49Pn3sZWYW/+Z+/gz/xyo/w7a/94MgzPvs8Y3+bxVG5QHBmXNLL8Oym9Qu861v+Av/d7/1XvOOJ1wDKP3vVX+Tm6UOkHMnzE2xLHnl2Cbl8DaYyIUSwwZJlmrKcE1kUh5hyFwtZj7HDlQJlrlxdtFwrZ8TLMAFzzuRitesnaRbFOU9d1ThvYZDZ9Kd1Dq6EusrQIaUHbeL0oZEZ2hjpUmJeFv3GkaM87447Ofb85zO54XrywQO863d/lxunR5nMZtBYKGou/W0TzhEwrwpRQcXCDKvK42SZu9OREbUFk0sRLVWYTmpSTmQEKRXUclbmiwVaTRAVvHiib4klxDenRNu05JjohxhyCZMtky0lUkwWUqqQ44oVOCeyRrJms1J3Cc0GVyfOrqNmQW2aFidCHSpkogOcVdUK5DkdDDAilppOPOQkdCkhMZXoEsEK+ZXPZ+hamwsxRvtpEwFnBcjEs52sQGPOSvIV1YEDTKYbnNnaweUOcrb25MRkUjFbM4+Q84tMq8k8F8q8yljfnzt5igc/dQ/NwizJ506doouJTgRJCS+Zr7j23/OJC2/iicWLAPjd++9k3gbecMdDBpQc0RL+rzGBGHBwCVTQXtEHAXE2PzWAOis4l0tIoXP4SRgswzlb2K31E1RVMG8MsVBbC59cKmcRW3N9WKjrN1olLdwepdmvibJm+4MqVzZcTpbhk5pTWdMWSm0LSqGHhy5DU73fDwvoi/FpttBiJ+XrnYWMI4poQsiIKOKkeDEoqW3IZVPXzudoSgTfR4+5wZKv+7wjcs6k4lERU+TM4gDvP/V6Hty8hQvdIS4l33D7u7lp4zQi8MobH+WAP0fXZRZN6aryvpgTxLJpU/Au41wgF0+Oiyz91iCylGJ3OZPF1mpOqRShlKH9fQ/6fu2JoPuUPGU4zUlCEedKDmbTkf34puJRYjrO2seKt88oTyMj00amXUFMk8mBQeeh/IGYBgyFb1flyc2D/MLHvoive9ndTP18ZNrnkGm1d1w3PcmF7hCLNBuY9vj2tXzs7EsA+NCpu3hi9xhPJUE6vumG/0zlzKMxVBWvu+lebj+8DX4dxQ4/ui5j+SH0WWVaf/WU05CaYWTaFSAj00amXaVMe8H6u9hqaz64+a2AjEz7HDMtqextP/DOE6+n9ok/8fx3FJbYQa7Na/t7yokutiwWu6znDhfPorQWXFL6WcRR1TWz9TWqugLvRqaNMvJs5NlVyzMXPJW0rDcfxuftMquF45uHOb87LSkHR559tni200x4y7tfwOnNWWlu5ptv/c+85tqPUvtAzpjxrUS4XYpnmhI3Tz7D11z3qxwTSw37/LWPcrQ+bXqkGCBHnl0slzUwabIQU+cdwYG3I2BASTECDo8jxhbv3aDog1SWd1J7c3S2yZETZqK2yas502kma2Q2mYAIkjN9rkvLZ2o5RIPzVM6+I5eOWu0hKYvfiYXiORWatqXrOtpikTt0+DCvfM2rkcOHOXz9daSDB5h3DRuHD3F4fYPtE6ds1pZrZTElZ+F5rgxUUfTe41zJnQhIjpCL946Y5bOLHVUo6bIEDlxzDQBt07K9uUm1XhHJOG2tn8ui1P8/e38eLkt2lPfCv1hrZWZV7b3P3HO3elC31EIjkhACSQiQMbYBywYM5iIbAwZfY+PZfJ/x8Bn7csHG5mIDZrYMZrCNdREYMAIEmpHQABpaUqvV83zms6eqylxrxfdHrMza53T3UWtA2urOeJ7dvU/tqsw1RMS7siLeiJRpFy2xOM2eEmzraJHDpMpyuTSlcTp8W57KF/Fd1xaHn1EUhyM4o2ku5wso11l0kco7fF3jMYtNpW6rDwEJwqryjgGoy5CdIxPp7MRomSUh4CuLnHcp0sUO10mpsRzJUWkl4FKmzcLZ5ZyoZhLNocNsXHEFzXQDd/Ys7eYZRJXgPRJb1qYzjh2zL2Pa7ZbUdgQfCvXTlN8BD999D4vtbbbnVgu1Wy5pUyICPiuVS3zdNd+HIpxqr2KZZ7zmj5/HvWcO8ewrH2S2FlFNQx1XcibHCF2HOIe6ohdYhoyUFA4Dz4yos+ixKQ4yaYbDQM4ZRXAuA5mqruxAJGIZLHsowtLXjHU95dUiyYMO5FXNVcXAzoIa2bICikIIQgj9oUSKjkRcqdkZCxj3dppiQtHBwXtXIYA4jEFUAEYLuCLgSJgWKE0I5FgOFprBmW06L0CmWyxI811QWO7uEpJS+6rQlR0qgd0u4FJAxOwdik4m6JKyXCY+cu4IP/vRvSWElLVqTk+JB/j257yWl1z1AcQJSZW2m6HLDkndAFb9tTX1S5At88fbembOFyn77gqopmS60u+HAgx72NcG1sG+tS+qqiuK6eDwezMWO8RQDg0WpLL9G7JH9qQUOOfxj5bpOcojZMS0EdP2E6YdWLe60M45fKH+f7yYhm0tlVvQuB2WeQYId546xg+/4Yt43nUPcNlah8/tiGl8ejBtUnkOVadY5jWW6kk5scwVHzh1LT9+yys/pp+qXMeh+ix/92n/ljW/bRm162scOnKQujmwB9PSpw3Teu+Uk32BMGLa/pAR00ZMe6Ji2vMPvJZL/R+VABMDpn3B0+6mCYvxOe1PGNNydjRuTpAZUU1n/uddX8kHzz6dL77qnaw3HUHiwDbov9Rdxo75fJe2bTmi95E6RZ3Hh4qqjNk5RzOdMdtYp26aEdNGAUY8G/HsiYtnoapQIg8+8CDL9vxeTNZTasQz071PDZ5t7k75j7/6AvpopYjyzTf+JDduPEgIU5ZRCFWNE09OlgDhJJNogfPx7Ksu/0W+/IjZds5KTCOefSw8u2iAyfxHX7pD0T1RRV8FSJmYI9O1CTkZzW0yaWjqGocOUX4vFkUL3tmEcTShti9f1Qy0b9mJ88zWNsoCGu0sq6JlMZ2D4MJQu7P2AU2ZmNWMUSA4ofKOXDfkrJw6fdqUZbnE5cRH3/RGcorsbG5ydH2d57z481mra978ml/h4HRKT3edL+YYfU+YVI5ZMzHHI0JMnQHAYBATBMgaSRpZLJZsb2XatqVqGtaPHuWFX/fViDhO3X8/73jt/8K5GqLQLju6ds7O9iaqynJ3wZEDh3CVGfvu9o5FkoPDTwJ17UGE3eWS1GWqEJjNZkVRFZIS5xEJ3uqrItSuxhMgO0imkVa/1DObTNCc2VxuEjuL7iuOpJmgASm5DIIBmzhHNano5gtybFHv+v5+K8UKAV8HEomqqagnNcFXiHoSjqjCqd0tlilSNRP+4t/+W1RHL6NT4eC73sUtr/8ddo4/TI4dWSMHN9a4/ilXA9B4x8njJ3nowQdMp4zLSRBHu1gy31nwsi/7U4gIDz3wAK/7jd/gYFWZHjqoJvCN1/57Xnb0tfyLD/4qAO+65yl8w6v/Gj/zba/h6PQ0SqbN1uxUUFgujFJZBygZIQpGLfUBjR0iQj2pzfBFEO+Jy1X921BPDACSEom4qkJV6QqFOac0OAjnGIIr5setVWJfS1hwQ9ZH7zS8OBRHJpXEncjQIHEPjdUHVzJEBEjWRLJPz6DPjpHVOMQqxoZQMiViB1qyEVTR1A7XFpTYLnCacJIRWugWsJzDcpd2a5N22zI3JGVqXzNtZiyT0mXlzq3L+Fd/9A8u5pZQ1aF/Ui+zsOD1r/xm1uudYW+OHgpUkwmuDrQxMhGhEaGZz2h358TW1n05n1sNV800TW0HhN7zi0eBGNt+qdECnvT2EKzRqIgjVGFobKiqpJhx3g8HIfPPzjIFxBsVeLhXP6dk+yugAj705OEMpdqw1bk1ymtOiSQXQtIojyYjpo2Ytp8w7ciVVyAiXHb5Ma6+/LJPKaYBZBX+5s//Jb7mBe/ir7z4rSOm8enBtIVcwst+80f4Kzf+Mp9/7N3EruP7b/m73L59w8dyUQD8hetfz/d83o8yqQ4Apf+FCPV0StXUnxFMqyorSeHFU/lqxLR9IiOmjZj2RMW0iYcDefZIpa9qqOrxOe3TgGn/4pnfzf++98/yK/f9peHzHz77VF74K6/l577w23j6+i3D2vTVGxAhZ/ChIUxmNE1DVdXUVYO6PX59xLRRLpARz0Y8eyLjWbcQTpw9g17QFwjv0Loa8YxPJZ4l9kpWxze+6Vf46utezzfe+BssE3RZaWSHm6Yf4MNb11Ix5+r6NpviiGefFJ5dPMCETQqspmZ2YYic56ILxsizyJlRq4xGJTCUtxILqpb3ORxCLk3j+tUzihz0yhZTImP0vdp7q60qFs1PqqRC4+za1miqKdtCxsQiRXxdWVQQaygH0O4ueOCuu9nultx799089MADhJzJm1t0wdO2C5aF1mfDMiqaRcpdaS5mGQRoh3MW5Qf7v+GDtwhzYxHHc+e2zNBywldWY3G6NuOqp1zN/L4zpJjwGOXWliDT5czWYkEoiui00FVzRmOimRrgxJhLAzdlsVjYe/sopgjB235Z87SEqJDdijonAlVVoxjtdLFcEqpA5a2J3LJdElMa9snoylZPtc2JRbtkGS17SZ1DvMeX+q251FBVNUchzrys4EEdOSudgzabDoU6EGZTsnjC2hrqnVEfg0dTZGd7k+PHHwZga3OTZbtEXInMFseUs3JofYPLrryKozc8FUTYVmO9tJKonKMJgdgmKpY0sjvoelbHzrLmp97webzs6R/lJTd+lG65KIFzRVTp2qVlN/QOOICKsyaIYnV7faHXqvFOy9WFQrImFnqnUQ3N2YpIiQTLymaKcYk4szNK6Tjta1z3lmnvdkPdTqvp2bNpM+Z8e8dsdi1oMuAQIKYOweob5xRB3Kquai45CY5Cy0zknHAlOydjZYHAknA8SreY4yXjPEheIu0SXS7R7R1+/dYv4p4T6wZeiyVeLCMkqR3oNtt1FunR+088UpTvfM7Pc/naGSYVXH5wQVWcvYpShYAKJC19kJyg4nBBqOpq6IXhnSPHREzRIvcly8bWPJ2XTWA1TEsmlK7A2HI9dHi9/3/OisuA00JTXYEzKvRN2m2py71NQYruJbrUDbqUs543lmEldPX6KI8tI6aNmLafMC2fPAGqdG37KcG0S8KdfMOxf8Drz/4tjscbAWHRVSRqQtOMmAafckzbOXOW248f5S3Hv3zAtCxTNtsNfu+Bl/LBM08j58z9u1fQ5Yq9slFt8rdv/k/UdUlCqOxB8emH7+XAWkTcCgtV1Gqif8YwrV8+LX8cMW0/yIhpI6Y9UTFNFA76h8/DNFXhp9/wwvE57dOIaXdtX3uezzlWP8g3XvUjbOgdLNoF5Tb2hZxzVHVD0wR8CFS19ca13+vBN9n7R0wb5XwZ8WzEsycynokITTXhizf+A5cvPsjbdr4DgHfffR1ZHX/tC9+EK0GdEc8+cTz7vXdcwVvfe+l5vkVQ/tpV/w8vOPBBDrT30IqnTRnJLZtpm/V8F96lIUg94tknh2cXDTBJ1hKltJp+qqaM0NcnNSOTEuFbOfcy0703L79aTUYhF8fcT7qnc1K+aE6FYikKUmirriwyeUXvijEihc7lnCPFjpgiHouGiwh1iep1yyUP3Xc/0SkP3H03999zL1ddfhm7J08SndW9jG1b6nkqqNVfVBGCE4J3hGCO1GK2GIUUEOnrVTpQRULAibC9vYNqInZLds+ds8hft+SSS49xzz2nyClShUCnmZTNOS41o10/DlgLtc0nK3SZydThfUXOnTUSUyWWJm2eQnf0Yo3UxGrVxs7qvPY1lrUEknv6W0yJZeyopxOqqkK8Y3cxJyY1BKdQRH2mwupHtqmzOpK9Ew8BX/lhT7P5FxCG+pDa64oInVovJckJNJeUE6sdqtmcax08TpX5zg6nThwHYGdnzmK5BCdkzUVHhJyU6XTG4WPHmF16OeKE5tRpui4TJaPi8cFbHUqxMjRXNrdxsr2aVmcowu9+4CaaKnL9pWe5tHmQrBElUznHsmttz4MdHMR50FyazHnIgazWLE7F0Tt4M1JB+sOMGnKrFVEteuONVrzHQTi07NEKvBWjrl5Yo9PKsSVAEVG8mBM2L2MOfQU7ag5bs2UfpGg67q1Wp7hhy8ml7qqokHMs2T/2Y+eyTGzb1XUlk7oWJJNVcLGDtqPbzdz/4Dpvvet53Hr8iou5nccl07DkmgMn+NqbXs8Nhx6mqirTeymN/zSBuAEOxRvwWmTeI0GLBUNwnhQirnMsFgurZaxWB9ac/h6nWsC3B2SFgUqacbicyVJoq1pWRc12MxkXvLm5nAeH3oOq5owUaO53S4ttSrGZ/r4Ufzg+rnx8MmLaiGn7CdPmehIFlu2SxXLxSWPauj/HFx98NXcuXkQ7n3A2Get3aznjgc1jI6bxqcM0XbR0m1s8cGrGLQ8+hd+992WP8De3nL2ZW87ePPx7LWxzyeQk9+xcw8F6kxs37uDrn/paphN7WJlMZnRdh3OOMGCaffYzjWmrkgojpu0nGTFtxLQnG6aNz2mffkxzRK5q7kREuW52G6+8/L8SpKJL3krrYPojTgh1Rd00I6aNmPZxy4hnI549kfHMizCpG57LryPkIcB0+4nLOLm9wTd94VvRUoJvxLNPHM/e95Ej/P67rxnGOpEtLq3u5s8e+QUub07iu4rgK6qkpJxZRGVWPYx3HnUjnn0q8OyiAaZu2SICjmw7P9TxozhxW6qUUmnaBaGyaL4OSmaRS4fV+HSVRxBiH/F8xFDFHAOm7B4pUURBnQNxhOBX0VwoBqI9ThCL8rkQUHFMp9bgKy6XPHDX3WSBw5MZx572dJxmbn//B3Eoa6FG2gQkm1+KaNsh4qiApmQSkDOLriM7cFoa6IlFg30IQ2Mwcubgxjrb8x22z53kj//3ryMizKYzrrnianJe4sjUdU3UwM5il53YsgCmzmMlKpUcd6mAIIL3Fd0y40Vp6pqqqsg5W8ClKEbWjK886uz3LnekeWcUOh9oJsVRidUpdV6IOYITpmszxDtSzvimJkajOwIk7xHvoQrkZFRY39R4gaqZWC3LqlcpRdWhXUSxaqVd7Kicw4WGZjJha3uLneWCZjIp65VQbek2z5J3t3Fdx6yesVFPSPMlJx4yBtOyS8Rssd5MskhyVVlWg3hcVcPaOoiQqoadnV2oJsVYhel0RgqBa91D/JubvpjvvfM1fHjnCwZ9+o0/upm3fPhafulbfoqUF2TnmB2esbu5RU6Jun9jqe8rZS3FOXAQy6EjeE8fAZainEEcOI84T9JszEMHrs8k6O1hryN30EegV8TxvWLO3IkakIsYAMXWQAjF99RKsGwBVXwPAmo08CzFhnKi69mTgGjZv87qplZVgJiht/uS1WOfayF2QCZ3GXILuwsefGiNb/35v/IpO2h/7uV38Jtf/y8LzXeyynzAbCB1yajWEpg2UzpyqcebmE3WaNOcLrfDWjuMYppSIrd2KBLxpTSyDk3/vHMEX61qoGqmWywtw8Z7EIf3Sn9qrSpPznmoLeyLw47R6gjb4amnPveHRvOPg1OnP0xbVgRlJ0W8HdicO2/+ozy2jJg2Ytp+wrSz2ydQVea7c3so/CQxLToPMfItl/0N/mjry/mx478MjJj2J4Fp8ew2D9x2N//5Q/+IW88+nccjX3T5W/m/nv89fNnrfp1vvPG/8zee+V9YX1vDh0CoKqbT2VC6Yr9hWv/4UVUe0BHT9omMmDZi2ohpI6b9SWPaRjjLj3/Ol9BU0b58dwEJgVDXTCaFaesF5z1rI6YNejBi2scnI56NePZkwDMhUS/Pr2iAgs/md0Y8+yTxLJ1fHu9Zs9/nu694JbItnF1OqJsJ0S0gBMQFY96NePYpxbOLBpjqui4UUcWrI2ipCwmQkw3SWWTMOYuUala6tsM7CLLnYiUi5p0bIuQ5JSgRXfpGaALiGRIRcoa27cjOkZyjDqHQRXvKFyBqdSdzRoJnEhq60thPRIZJZoCUmDQNTVPhRdg6c4Z6OjE6bF0zDZVFslVZ7Gyz0F00Z4J3ZS0EdULWBCmTcxyUS0SoqoqYEk6sLmKM0T6nwvzcOQDazS26c5t4p7hKaPOSeTdnc76Drs14xTd9E+/8rf/N6YceMjjLMPOluVzK5KigQlNPmEwbow6Whmk7812WbcfBAwc4cOgAmjMPPvAAOUajJ+ZE1lXtygyE4BHnWD+wQRbbl5QT9WTCxK+USAWCD0yahvlySWg7QhdJCnVoqEOgLrTgmBM5moKmorB1VSNZaNslZ7a2uPSSS+g0UzcT5NAGBMfi7CZ//I63o8uWSRWQnAg4YuxoS13R9YOHcT5wevMcZNCUSF3ECZw6/jD+1opnPviQ1ak8dRqfk9EmnYKYKw3BU/kZa5OGb03/ineefhn/4+Hv7s2qPzFY49MQ4PBh3HyBZCzzAQN5AaaTCdF7o9wGj2jfjk3pujioaUwLfN3gQkVwll6RyWgyQ/GuWhm9WpBCNZNSKi7aKL7OWWZLv+c5Z3JKBGe/p5ToOjukVSFY5kG2g5gKVKEagCfGznS62LQ1MLQDAtghjaITdV2ZowuOxXxhjTE1M6lr01PNECHmFq8QVGHZ8stveyavf++1vRu7mMvhKbO7+GfP/h58XVFVgbrQUTOKOMeBgweIKbHR7IIXKlfRdR3zxYJm0pjtYpkDKSXoOmKsLNulOJXgHHGPg81ZEW9087o26jZdxBeqq6oO7w0+UFXN0GCvi9aY0pXsgb7mqfQ+wfIryOhAZc2qpSFmRJzgUgFxZ3RaESHFZD4GLXTWR65Vysk2FMYHl8cpI6aNmLafMK1b2APagY0DHDl4+FOGaWdOC88J7+Ofrn0tP3zPj7GZLmF72fD/+ZWv4Wue9yae/9QHRkz7JDDt+D33ccedHf/lPf+Ie7afwoW4tu43+b5nfif/9d5v4w/PvBSAf/78/5vPv+z9HDhQ8VOv+BdcMj3OZDZl49BBYs72gOYdlav3Jab1aY4iI6btJxkxbcS0JwumTdpmj7KOz2mfLkz708d+ha+9/Oc4tFaR8eCEZjIlTGrqumE6nQx6Kt6NmLZHRkz7+GTEsxHPnix49pzw/vOf0drxGQ0+OTyLredfvPqLuP2BQ4O+fefV383zN97IwfoAta+NZec8dVVZd6ERz/5E8OyiASbvvbEHAU9AYiyOXoeoV183dDWQQhtFkN7T94rUR0dLRkKvzFwACOWKwyRyMsXLWL1LdeXvg5T6hChOTHEcSi5RO7e69LDRlQ/F2BRXsgAqsEaBzupKpoWndYJmhsjg+fNUnLO59FE/VyiT4gtNEYYx5NZqGnZEdtpII66ASiLmRJci3jkuvf56XNMQC00wAplCtVPtkzhwYsoqDnzyw1pnNUpcPWnMwKVEKdV2LcYSbO53USxLoarqQbFVFRc8odrjfDCaX6gqQsr4DL6MxXuPc35wPkLeu1hmfNLT7RLLdmmZEM5RNQ1SVxYpjR3nTp9mIxuNctAHZchoqHzJFuhf3/O3drlkd2sLFgtwDtqlUT7R84xFxOFLrdXrZx/kod1LeIT02SnioKowymjmvFkJBOfIbs9el4wF2ZMNkFVJKSIplKyBfvB2H83ZMgrKIcga65W9KJU20VzorX5Yy9VIemBQcrYod1XZ+8hG/d0zZNt/Xc2RlakylJftXy8f9SLloGb3ETVauvd+OJDgnNmdMtTvve/kAW59YFUL9XA4zjPW3mNz7f2Hc2SUq2f38vwj76Ka1NRNPWQBpRJgOnr0KG2MRW+bwf+kfH62Qu/wc78H2uuKlij8av2koJ6I0Ub3/h/hvHXuI/99VopN0WoSq7rBdlbU1sHnn7fnWXWguPZr4FQRvWDchQLf2+vesRSnyiiPX0ZMGzFtX2JaqIYGnZ8KTKtC4GC9yXr1diq3hAQpe97/wNW84qYZjJgGfOKYdufxo7zrrrWS5d1fWfm8A2+h8QvWwxbPP/QObtt5GpVb4H3g8y95J089+BBVNeUFl32wYJrpXh4wTfYtpu2VEdP2j4yYNmLakwXTHvULjfE57U8c01586I0898C7wNdEBXWW2e19wIdVDxTKc9qIaSOmfaIy4tmIZ08WPBuf0Qb1+ZTg2fFTM267+yB/fPultJ1n4rZ51tobef7Gm3jq7Fagogq1jc8J+BIIG/HsTwTPLhpgOm/hyn8063ku9rx6jHDe3z5hkQuvoo+4cn9fecQdL3iv7qX1XWx0j7zHo33sEZt24Wce5Z+DIZ03r8fYIHmU/+veP38cK9yv0aCUF1GKT2bjdAX8Fyr5hff4mGr5aG94xNguPlgt48g5P8p4bM9WjkyGfzuSAWo5ziR1POqKP+oc9bH/9pivPXIej7l+unLkq1s9PiOXR1v3C6/xGOovFx7EHvU6OnxcHuXtF977GWvv4ftvfBV1aEDEmuRVtR1snCCydvF7FlU2R/jofuHC14ZD12O+Vx59sI96848t5127N71H+LVHG8eFf3zsYe197fHqwigrGTFtuNzFPzNi2kXv8anBtI99iY8X0y52mxHTLnLxi2CaApqFtzzwpfz6R59+3ocqaflnT/2HXNY8YJgmNV9/1U/z9df8DJPZGmFSw6rQxYWD2veY9oiPjJi272TEtOFyF//MiGkXvcd+xbTxOe1Rh/NpwbS9N7BrfIx7jpj2iNdGTPv4ZMSz4XIX/8yIZxe9x37Fs/42zsKY9P15MoLqBQMY8eyC66zwDIGswh988HJ+7DXPHt52tLqP7772L3GwarCQ9aPfdsSzC//42MP6ePDsogGmpqms1qNmKleRkkXwFPDiELEoZ+4yrkT1gneWeeAcVYmYOrCap1mGkqo93TR4Z9SxaJQ+RHA+EH0iieC9oKlDVUlqDdxiacQGELwDb6315jESslgEWhScxWBz/96qZn06s2g4AimX5n+KE2iqQPB9pNRR1xV52hC7yHI5J8bWKLkCi+UcHzzOrZawNwrnhRA8dV0zaWZsZMsUiHEBWIPAnZ1tvC+RdYG47IiSLDTfBC6/4Toma1NQ5dStt9pa1BXBeXJKtIslrE/RZGkOfdbGkWOHOCKHOXPuHGe3zuJFWFub0gWPQwjOsVzO6bM4Msr6+gxfVWQREmq1ZHNi4mucD1b/FIuYKtZILgMqkAW6nNDlkugidTaqqjVMtOhuUzdWNkyELkVUMwGhCYHshCoEEAcnT8F995HbBc4ZeCxiy2zSFGPpKa8e8YJ2LZUXQnA0VWB30SHLOfPjD/F7v/CziMD21hZXHJxS6QJyglAT/NRqUmIH642DB/ii2S0858qv5O+/+2c51V7K1nLCt/zSN/PtX/A7vPTmu2F3juRkepdKjcqYwHWgDcRkDzvtkk5LxksCH2rTCTUb8KKgiRSXEOqVI9VE7BYMoCOWHeN8QHpqJkrXLVFp8Dg0ml47h1Gku2VpUimm4z4gAilpacKqZW8sA0CzkpNatgmWodK1LVVVUVV9vVxX7iGkFMkpIdlBSkPDvqSlHqkIIQRqJ0Y9j4ndh04Sd+fn+RUnnipMcb4iayZqJnhPXTf4qmIyXaOeTQghlBquGM3awXR9Dde1xBhJMeJChas9VaqIsadgi2W1OIcgxJipnMeJJ4jQLiMxZvpSo84ZtTSnVKjFNg/vPTFbo7umaYb16DMR3N4apNL78VUmgZbfnThrMomjjblsg41R1dTSrq2opiGLoLehnPMq62GVF4VqNj3LuvKfo1xURkwbMW0/YVrdmj2H4Kg+xZimKZHVMvz2ineOoIyY9glg2nzu+Zs//EpOnJudt6afd+At/NOnfheXTs4A4QmJaQOujZi2r2TEtBHTniyYNj6njZg2YtoTW0Y8G/HsyYJnWnrsfM/1X8H/Pvmt/K/Tfw+AV7/9y3j7A6f4/uv+cMSzx4NnPvDdP/YF3H7P2nm+xIljfbJB46w0JfCE/t5xv+HZRQNMToSslj2AFGpXuWEIHg+l/qkxDxiUwg2UNsAobZoL/au/uuJLnU1xYk2kSrRNSxMvVQXnh0iuZiUpkBVX6K5tqReoKCF4W3BdjVPg/CiilkZYYq9L2WDNRrO0xn1lhALOO3x2zNsFqNFaXbAalsFXhLp3bEZh9M7jnSf4Cl9VJDXnmTVbMzbsmqEKiDd6bVYbewhGnyRHFru77GxtFUUxpeibavXz6BvhZVFiqblZVTX1pKaZz+nalojineCb2qLRitVcxGhvdVURaqNPOnGk2OGcow5VoZ+uKH229FbHERHEO7wGfFaI9npMq9qwiuGtF2s858QhHqgca1Nhq13SqVrB2+USli20LZJNpxSrcatka6jX6LAvqdB7q1ARAKeZqbP9dzlx9qEHTT/aJQ1KFQQvmZw61Ff2xZs4fF3hUKoqM9NTeJfL+B0ntg/QdjWSMmxu4fqapMVDSE5I9pDTUDeTnEldJKVMTkpVhRXl0XucM23NMZbXHSKOHAsdtddbEUT8EIR2uF6BodBYU7/WOCTreQ6CPuFArdbn4NUAzQk3XMtuqSUy33Wd1Teu94yjvCnmXHoh2cEIMbqqlMAfWdEYkdiR5wsW2wt+5T3P5/YTq/J4X3nV6/jCI29hMtuwQ44qmYzUgdA0+KainjRUTWN1UgefYnRQVwWcWiPLTDI6tUBOFbk0+RzyRLR4hqxWQ7hkl5hTz6tmdoUSnVIm5aK3Tsze6Kmig+MaMhj611OyutBZ8xBU2itaxoHInqSSQq/XVbaLOfRVhoBiMdW9Tff6+9p+cR6ojPKxZcS0EdP2FabVFYI10YyfYkyz+wmvvOq/8c5TL+G9514EwB/efSP4hq848uCIaR8Hpt12zwHedsvlPHh6nS55DtXn+EvX/Spk5frpR7n20DlEarJWT0hM24syI6btHxkxbcS0JwumVVVmjZP8xat/YcC0E9sHeNPtz2Ch6yOmfZKY1kv/nDZi2ohpn24Z8WzEsycLnlmJPeXqg6f4fH0L2c343ye/ha3ljNsfFn7p9U/ji67d5Oja7ohnj4Fnx081vOFdl3HXA+tsz6vhfi849AY+78ibOXjwMJWK2We59hP1e8f9hmcXDTDZBS3SqHsiaGB9cCRbEy8zkhK9hyHC5vYsOKp76idbFMw5Kdhgjfh68cpq09xq5isnLOUHuhjJKCpanBkGBDgDD6x+I2Dtz1JGg41XxPBJUyYnc14pJfrdULLVNM0wXyzJMVnUNgdzjpUnNHXZTHMUAkzqCd5X4ALzlIkqhfLYz15wtQdvNSo1Zaq6pm5bnAjd1habJ09y5vhxAA4URykipoBYaYCu64gxkcl0S4vmTiYZ7zyTpmF7e4ucIvWkwQc/RBxDFQxAnGO6NsOFYCAkDmKHd87qUPpQHFG/j2YoXSmm6pwnBClOp0PTBRFNsZZzDnAqVM5Z5FcyyXvac2dYxkhqO5anT5MidIslkrLttSg+eGJKqBekUByzg6hKmzpmVDgFSYmJL03JBNK8sGZiR4PSBIcXSvQ7FoMxR69gDlodB6pznGsPsMyWzTVvG3Z2akK9iaM0PisOFgeaHORICB4thxRtl6SYSFlxsjY4qxAcWaxWqdXRBXEexBNjGsC81xFxplNW7FYRnH1GzDmkvHL04M2xFYcvak5C6bMFlN6QNCU7xEixCXEUUq41eavyytvsBYNkNunEshfKKbA0HzQw0XYBiyXzzSUPPZj4hXe+hEVX4yRxqNnmVTe8hucfeR/oAVKhEiuQKyFMasuWaRpCVZUyQ73uOauz653d3AnqrDavEyFXmSR7ai5H8zEo5JSIQlmzjHfOGhQOqQRFt7UAWXGmzvfgyvnOVHsgkBVYZTvIXkiPtvkZ/PSo3cOAOAOYvo5vzmoHgn4exWcM1xSr2bwCElkdqBjl8ciIaSOm7StMK5lSWSDyqcc0Bb7h2p8BGAJMb77jc3ho+xh/9lm3jpj2ODFt+5zw7g8f42d/+3MAmIVdrt+4m3/0nJ9AYi6n7ic4pu3FtRHT9o2MmDZi2ohpI6aVrfyEMQ148j2njZi272TEsxHPnmx4tra+zvP9u7mmuYXfPvVXyRo4tTnlp37z2TzzL9/CpdOdEc8eBc92F4GP3jXjp//nteYf3IKp30EQvuSyX+Mrrnwthw5fjsTi65URzz6NeHbRANN8sYCUICcWcYnPq0ZjXUpWBVkz3rvSmK80+9JESpmYS8RQrb5kUjVjKEYWY8QHb43KHCvlUqgs1o2I0NT1EDEL3nPgwAEmUwsAnDp3lp3lnO1uQWqVicIkKRIjSSzyON1Yt/30HlcFupKp4ESZrq9RlaZ75zY3mTQNrvwb78kxEgGcMF2fUVcVzaTGORtXXShsMUcz4NgxnU3IIuy0HenwAdYPHqBqau78yAdAlW6ZOHf2LAem61Ti8c5xYLqOJGG5G3nHz/wc3bmzrHcGfocmDZNQUXlrDEeGpHBua4fN7V188NSNRW7PdufYPLtFM6mY+AoJFcHJ4IBDCKwf2Bict5NASomYEot2XuxKyCkTKodzYeWoXAHbAso93DoRo65qHuiT/X7N1iZ0bcvu7oKqqvAu0MXE1uYOi7ObtCnSum1+4yd+kmbtIFmFrXObzNYaJHhSVs4udjl17iynNzcBeNHLX8762hpveOdbeNqlV+GqysAkWOm8yXTC577wBSDC7s4Od999J/OdcwhKqCtcqPChxrtAFmWRWnKZ0w+98Bv45bu/mZ+94zsB+LG3v4LXf/SZ/Ie/8AtkUZIIqTTWc3WNn9T4UCw4K+Jh6jxVELocye1iMNx2uUvCaIvqA5I8EiqcD3TLltnaGnXVDPalmu0QoloyXQJV5XDBYZHoMNiLFvpsTnb48s7THzyaqkIGU1fmOzuklMhAXdeEYNamyezU6OGmL0mtIZz3nlQOAbWriQ7my12WO1us+bIemnGxY/vhE/zWH9/MT7/55UNW3PUHHuC3v/rvMPEJJ8ZokmiHhi5FqtnEbKhQbWPMlsVTKNjee0IVWC6XxBRRsYaWOKHrIrvzHQ4dPDwcTrtlZH7uHF0XEXFUTQ3Fp2SviHdUvj+kWWYBztFMJwjOsm2cK4fZTCzr69yKhi6lsWebomXXlOyQvlFfOXOSgD7LqvRMLHThVTYC9IdFOyj39tMDSW9vOWuZR6Hh9wcx+ViufhQYMW3EtP2FafeePoeqcvSaKzh0+RV/IpjmnD0c7RXNmby7GDHt8WBa1/H3f+IV3P3wgWH9/t5zfoyvv+nXWds4iBPPkK36BMa0lK1UTYwRL3nEtH0iI6aNmDZi2ohpnyymwZPvOW3EtP0nI56NePZkxLNqNmUjHCxRlZU95OWSbnd3xLNHwbN/859fyDs/uKqQ9CVXvIZve9r3sOYqjqxPODC7nLW1Q+Mz2mcIzy4aYFosFtTeU3mHZKuv2OcClNglDofzUmqKgitRQgu3yTBwL0KKsdAfHa7QCpNmyD0Ntlw7C+K8ZQhkxVVl4injxLNYLmnLws/bJcsU6XJmt11SBTNkYixROh0WUkRw4qgCpBjJKTObTSClQrlzw8YD5BSJKZqyO8d0OqVpauq6IsYOilL1c5QS6AyVIztH5eGS666hvuwS3PqMyeEJqHLm5EmOv+UkZ3PCo4QshHpCNZniqwyLJY0qVRl3U0A0a+LcfEFVNdShoqmn5BjJWVkue0fuqAFX5uLEjGRnvmvRa+9xVWWRdCwq3bYdMXYs25ZQBZzzeBfwzrJHhn2UYig50aVEGyNtjOSYyV2CrFabFot+xhTZ3Y4IRT+yklMkxYRTZVpVBOdw3nPlJZdx7OZnMjl4mCuPXsrpe++i3d6k7ebgHcuUOVeyAw499QYuuepKjr31zYg4i5yLx/sK1VSyQswAJrMZT7nhBm778AdRzYTpBPBGuY0dQkKKfgJM1uHlV7+Jg/UZfvTWf0LKFfedO8L3/95X8lc/701cceAc9M33UiS3IMulZQEoqArOZcuGEcixHTI/MpY54HxAHLQp4p1FrOvgC821Hdavr4VqSpnRHPG+ttrASJ+4U2ixlo0iUKjjbgCplAudtOhqVQXaRSLniFCZUxdzYk5Bo9GAzRYs4h1CsDFqJnVLclziyTTeUVitiNqYfvatL+V9d19Gl2zsX/f03+HPXv8H1I0dKPoguxNBfaGIi+CryjJ7isOTYrNmX+bc5nOrQSziCD7Qtq3pk3NDhH4vfVOkt3v7TK+/zp3vHDNqdXoxGn6oaqsrGxVwRoMvYhR6hwuBUEBQdEWR3Zt1kFWRkiGi5W85Z8vo8fV5VPCYutUc8moOvcMHSLrKCEEEzWbkqyytUS4mI6aNmLafMG1zsUCBS592E0+56eE/EUyrQuClV72N9WrLME0rHt46OGLa48U0oIuelFc+dtoENtYqNITyUMZKn56gmLbKhM0lq3XEtP0gI6aNmDZi2ohpnyymPRmf00ZM238y4tmIZ09GPNPY0uic77z27/Cbx7+FD+98PgA//86X8eLrbueVz/5jU9QRzwY8y0mIaeVTK1E2QubgNLCxNmUymY3PaHzm8OyiAaaYIsH1UUtBRIdMAptJoXTtibh67zGV3vNFa6krOFAtnWUe9FS03INCWSwpk3CKKZzqnk0XFoslSUvjOjL1dMLaoUvYOXXCgMc51LnV4vfrgimXOQdbzOA9CgZkUoxk2KXixIsShlBRBWtE1itj6qPqrlcqh0pRVFEmk4pmY4Y/cpBJvBSwKOb6saP42QGcOnyXkeRxbYtrO7rlgsa5QREb7wt1z3Nyc5upDyAVlXNknNGFS+RXCKj3SC4UOoXsIcZk0c6s+Lbb4+iFxWJBLMDnnLeytuXvRlXOg070dNe2belSIkVz6JIM/CkAgyqo0i6WBO8hhAKuVnNSNNu8xJorHlxf57KrruTAFVdzsO1476kTLHe2zDl6TzObsnHoMACHrricw1ddycGjR5CzO9asrOiGqpAz7Cxam58TQjMhuUDKCY81HNRS11NjpPbBao0C3jtuOHQ3R6oHef1DX8U9O9eztTzE7330c3jhNXfgES4/eG5YC00JjREtDhoxhy1i4BG7blA+FaNVOnKhiSpDAz7v0GRR9aI8sIe6ajekUG2N+tiPGc3knEqcHAMATVAeHjTFlePDDgPJ2Vs0J5KWZpnBHJZqIqVVkMT7gJBxRRdyjkZp1mx1dkuG0e4ycM8Da7z1ozfy8Jk1nGRuPnwHX3LNO/nSp/whKp6ofRQdnPZ0T/Mx3nnEQcpxiLBfWIM0xYg4sTrKIkab1kxd1+S+a50yUEdd8V/GKirN7TQV9VyBgQGxldZwwRu1Wy2jRiXjSsNJzdmyTsR0y3kDwT6To3fmq9XTAWyAoV5tP5/eJ4L5oB6kFB1AoTcnyhhhZZ+DXqxuOMpFZMS0EdP2E6atH5wgDwprx46OmLaPMa2N/YOzYdqxtR18qFGRJxGmrdZvxLT9IyOmjZg2YtqIaY8H0+4/tc7Orke77hGY9uR8Thsxbb/JiGcjnj0Z8SyLUuc5L7/017lv8XS24iHuXz6d99x3A06EZ13+ADccOYFzI55l9dx+/zq7i2oY5DWzj3D59CEmVcN0MqOqJuMz2mcYzy7eg8l7ErlE7TPkWIxZSSlROUflPTHagLxzTNbXrQajrmoNJgvGDTRQEQEn1NMJqlYHNXerJm3mOKQ0VYvkJVTenGxKiXObW+wuzNEfvvwSbnrWs3j5130Nv/Xq/0x74iTsLMBb5FD8KgIoauCRisJqNiP14vChogqOdndu0VOgrmoqcZCVuglDzdTgPFQVXdexLONwAt4JofLMd3fJKMvUcfeH3sdl3S6H2yuZn3gIgIlzvPzLv4yNZz8fpw49t8U97/swJ++6m62HHiLNtzg0m1IVcJxUgQOHDjPZOMCtDz9MItNpZrGzS62Cy4r0VEt1Zt9RDRBzxE8DUbGfGGl3dgbFq3xg8+xZVJX12RreB4t0e0fbtqVOZlEpga5r2d7eYnc+p55MaCZTdJEJdaEgZtO4qqqZNA2bm2etZmvbslQDnKyQnaNxzujE3pGWS/LuLj52HLrmauaxY2exYFJXeJRnPPMZHLvicgCuvvwyUlYOHDzEYnNB7hIJYd5GfBVQqbjtrvsQMR1apEibPW3X0e6e5fLDh5hVFU3lmG8tbPPEauMmEdyk5miT+JEXv4rvfe/387sPfhUA//b3v4IvfeqH+Kev+F/FPrCDStcRNSNdh1dHRMB78OE8x101E/O/Ak4DTRUsO8dZjd75YmGOC5hMZ3ivxGLYvqrxzrFczi2LwDlcqMu2ZLwoOXV4p/jKWaZLAQnRZIcx7LDmUCaVJwm0ywWxa63JY1PjckfKaTjIeOfMwS92iYtdc6oOSBHXO6oy5lvvOsQ/+ImXDPoyDQt+8Su+i7V6TpvMkWcRDOJgqREnDu88lViWj43TauAiK6eYk0KGujY7TKpU3pO6lqauWZutc+6M6TEKgmNS10TvWSy7YUwiQvCBmOLgn1xwiHfUkwlVVdFnNdWhspqpnZa6ynawSymCE3K22qqhqui0LfVVE05XdFznS41crDZrSlbP2YdAjGkYEwznUbqus9K2aocq5wI522fLKayAgNo4VAeq9SgXlxHTRkzbT5j2zANP58c/4qCL+BHTRkzbx5g2lExwbsS0fSQjpo2YNmLaiGmPB9N+9DU38/YPXdZbGjBiGoyYtp9kxLMRz56seDZpPEEyr5If4IXrv8p33fYGQHjXvdfz3vuv4b99449xuFo+6fHs7NaE7/j3LyH3LGXgu27+P3nGJQ9zaOMwa2sbIEKb8ohnn0E8u2iAqRcRofIBn6XUNcUohmWAzllDO++MUipO8Pjh4n1/LAqdTMvAQuwjfysqVi+JPCyW5kxyGcmZlCKJMmFge2ub7d1dqCqOHD7Mua0d5jtzqqqy3iduaPmHiClujlbHFQFSJtRGy0SNqmieXqi9J5ZIfVNN2G236XJLGypi7nAC01K7sm/S5xG6tjNnK5Dnc07ccTun7r+Xza1zAFSTKQcvv5K1LuIPHIIDB7li/SA77ZJzp45TzSZ0WhoAAm3syHXDrJlw1c2fw9rGAYjKfR+8jcPNFO/BuaK0IuSY6RYd3bIjakRdop5NbJ8UtnY2sbqygcOHj9J1He1yybJrYS5MJhMmfsJ8uSwRShkWUDVTVRWNGjUyhEDfc8aLI1SrmpvWUNHWX6AYiIHL2oFDhLYjZ4u5nnj4IfIHb+HhBx7g3vvu5fjpE6SccMmiuQ8/8CAPPmhAeezqa6nXZtx3+51sJI/g6Eg0szXLuKgCyxgBIaG0mnnZV/9F7r/nLt715jeSMbB3ZQ0qX5XaoZBFEcmg4OuKb7/5R/mCS97Cv37f9wPCO++7nu987av4vi//n2zIwoxXM04miDenGrsILuAqpamqYe28gJaDjtfzqYlktb8Xqi/JmvVp1xGaBlFz0MEJKSvkTI6WpZBLg72Jry3SrZbV0WcWKFbPtPhQa8yoVuNTU4vkSEqZeTs3x+7F6gOD0WJTB2R8jlYbWTOeDF0LyyXxxCl+7E0v4Q/vuGbQlS9/yhv5G8/6BdaqDh8qxDkyEOpqyJCR6M05qhJCVfTI6vWKZIuwF7DKag0Yu3ZpmT9OcHVFU1U4hMXunNR1Q0Rd8EMmQQihHDz7zKby+T5Lo/weXGUZTmV8qWQM4GSVTSPDFAfpGUt95D+lNFxDnLMMSsl2kEhpuMCF9FIRR85GmXWINdkr71YpP3p+2kDWFV1+lMcvI6aNmNYv4GcS087eZQdEtndoj58YMW3EtH2LaUMGHDpi2j6UEdNGTOsXcMS0EdP2Ytrp055/9uqXc++JdfY6+xHTRkzbrzLi2Yhn/QI+2fDswMFDPKs5xQ9P/xw/cNt/4J7504jZ812/8fV89XPezZ/5nPc/qfGM5SN96fpkylozofa1laYLfsSzzzCefcwAk2CO3jlXnPzqi1OxWaIoTuzfOWe8fegRC2ITt//mrMRoNQkFLRRXe4MoRBEotFHtQUB6H7zapC52bJ0+w/0f/jDbZ86wXCxQVYsoJ0VlFamToukOKbUvrT6rVA2+RJIlrJbEYY5NVanrmp1tLY3PMpQIYl0MuW3bUmPRWyMwFMg4hW57h7i1RdeZkua2Y9tVbN51N/WhLfx0xjTUSIniVpVFMIf9S5mzi5btzR0uvenpTA8fY7EzZ/PdH2DqrFlb3TfzKkvVtp1FLQWcD/iqJkwafF1z6twZc5jOMztyhPnCsidi15FSsgwCjH0hezIJUkqDYwrB472tmWIUPCcMjj6nRBdtPZwzOrMPDiQSQsVkOiH6QC66cPLMWbbPnmURrREhTvB1MD3KSmyXLFqjg97z4VupJhNcGwk+2KHDueJMjD6aly0IuBBYP3iAI1dfzdmdLbK37BJxDqdidV+x3xVQycWYMiBcsfYA292BQSe2llNuO3kZbatonU1LvN1TUoKSoSJiRFLTJ7MFV7RCVUldZxkFOMQpkvNQXxgwCiy2f+QKciLHYgDZHMjguPrP9M5bdY8/Msegmoe96+utohkKxZWcSanFhVA0v+x5jEZfFXCaIEerMyzAYkHc3OLUvfdx572e+05uDGM5MjnHM47cATRoLnVKHdaPqETCB/pnn2EnUjKNKD5F8KUWbFIDvpj67AJBs1LVFYKgyexapZ/yKgvBOTfYk/kPh3i7F5gDdc4NdZptfJQasSVro19ju6CNVzPS13Fm5ej3Ol6RMqZiJ/3+o7ng3V4nWQCj7K2WlKY+O0Jh2HP7qAw+Ynx2efwyYtqIafsF0+bb2wA8cPsd3N1+6DOGabeeuJzfv/1mnn3lfVx39OSTGtN2z7a85QPX8d47LymYpnzR5W/lZVf8wZMa01blLmR44BkxbX/IiGkjpo2YNj6nPRqmffTeDd532yFuve8QOmR8j5g2Ytr+lRHPRjx7MuOZryrW6bhh+se87JLf4r3nNvnAuRdy++lL+cN7rme9mfPim+54UuNZP4pD4QTPO/gmjkxa6lBbHzUFl3XEs88wnl00wOT6+p7DVq6uZvVPe8onOMmoc0P2vnOgfRSwDC730cEy2a7rCN5Te2/1FnuHXgkSc6GFOVLOOF+a+rlVhBAg5cTp++/nPb/+m5x48H4aEWahxtcVqTWiXw8iIqYc3lmtVM0W9WeqeOcJXhCCKZoqcdkSu4hqpmmaokCWSeEQqhCYTac2t5SIqnhXI0GImoippXaeuJwT2yWTYHNM85Zzm/ch6R1M1jbs5ylXk86cJHYLcqgQaQaD0DayubtgsdPy7K94FtWVV3Py5ClObf831nzA1Q2hAI6KUeOWyyVJMq72TKczfNOwdugQ60cOc+tHP0KMEalrDlx5JfOdHXJSVHfRHJkvFuwuFhw9epQQqsHYdnZ2yCmRyfjgjGJZIrxZDfyr2uiTbbskLVMBBG/RYQcutHgXqJuaWVWhWLM4PXmSxXwX7z1108D6GtoGpO1ICi5mQmuZJ+9/yx+QgSPNjIl4Kuepg12r6yJRs5VkFWG2scEVV1xB2FiH6QSCgZ4Xh1fBGSOS3gwtCmw/lHqasqfRWv/O+TzT1ZmqZGfELhrNNU0gZZyzqL9Utem1mjPNanThZWyRqsJJgAAuF4dYTCzGzkbkHJIz2iWiaAHVVfO+otg4Z/RUzebNxIE4cxZKJqVusLsY2+JezNE7IJffJQEqaKlpm9vWxi1KADRHcmv07Ly5xfL4SW77wG3sbO2c7zzEatzmbLRSJBGa6oK3iDl71ZIpYdko4mw83nuaok8xRWIWckoktNR7FYKvMN+eCeWQpqrErnh1zBGn2BqLWgXxgcoZTRggkSz7qepByPYrl70W5/D9ATDGIcPJqYCXoR5yVqOu9nVY+zmKk0L5pmQlqK2JrphO/Xv7Wqeqel7d1FwOnIUxWw6wpdZtAbZRPraMmDZi2n7CNF8w7b1vezv+6O9/2jDNBUclLVErFEdSzw+97cv5pue/masOniZUkJ6EmNad2+H4fcK/fd0ryOoQMpW0/P1n/UduPPIwzk+ftJg2PLyMmLavZMS0EdNGTHskpo3PaYZpv/fup/Lf3vC0wSeMmDZi2n6WEc9GPBvxzIF3dDHxLdf9IG8+/VE+sv1c2hR44x03c8vDV/GC61+NbxMq7ZMCz9JyQZc8v/POK/nlN90IgJeO62e38I9v+NtcMruKOqw9qb933G945i7611FGGeURErPnb//W/8mvfuTzP9ND+YzLuXiEH7j1P3H7zrM/00MZZZRRRhnlcci10w/x089+HtdNbznv9f/+vs/nO177TWR9ch4Nf+ldL+Tv/s+vHWp7P2P9Xbz6uS/kiubuz/DIRhlllFFGeSy5ENPG57RHlxHTRhlllFE+e+Qrb3wnt/yNv83R6RYAp3bX+Maf/eu8657rPrMD+zRKFx1//Qe/hF/7g+uG1/7ujf+A73r63/zMDWqUi8pFGUx90ynFKKiUHwHUe6PiiTWSspQDoyReSMGyQG2hixbelkip05gzUXWoq4qIRe2kj8I5ox6WGoOI0DQNVW01SLucCOLYPruJo6eXKruLOWLcOGteBThxqM80TUXwHidYxBElayJnRwh+oOd676lqo4321+hpsiE0CMJysRxer6rKIuaVQ9ShAsF7Zs2MOlSWtaCFFldX6HzB1rzl1PGTnDp1nPvuf4Az29u4w8e47sYbWVtbt/VznrvvvItzd93LwydOcOzQEUiJLlpzuVAF9iwfIlDVFeoCrvbUdU12DpoGOXyIydoaqeuoq5rTDz3I9uYmMUam0yk5tsSUSDmztbWJ9xWu1AmNsbOIJkrAW03OQvE1ml5isbT1SIXm10wnJUifraliziCZLkajLSKoCNPJlEOHD7N22WXIoUMc/8iH2T11isXZc7gQaGZrMJ0BsL27y7KLNg7xOLGMBpHyk5MFkRVSt2Tz7Bl2H3yQ5anTpLZDsJq4Xk0PdU/k10LUhcboHQgcnZzg25/2H/mVe/4yJxaXAcJmO+Otdz+NqIGv//z34j0oQuo6pOdEZoXYFUqhokntmmpU1SAOfER9Z/V5S5aMKWseMgmCrJq1iQjBufMyCbQnpZaIttVAVXK0z5ATqYvDHL1gjSZzxpEt8yEnKnFUbq8Ng/MOLa/RtsOPzpe887bLeduHr2Q7HhzGAMqrbvzvvOSyd9N2VtfblN4cidUF3ZORRLmfWsR/YLkriGjvPayOL4VOii96bnVyUSXHSNd2xdcosctInwpV6LFBHOIDrvIgkHOpxVz0N6U01E1NyfxNTAl0lY3U10qNMZ7XsM97f172QC8xRqSqjHFNocnK+X2a9orRXDmPwRpzoq+3OjTxw9mSam97+RHXGuWRMmLaiGn7CdPUHUSAShyN8582TAshc/nhJX/hylfzrjMv582nXwnAItZsLqcg/kmJafPtzNZiOth57RNXrO9Q+RlZ85Ma03rvF2OkrnTEtH0iI6aNmDZi2qNj2victkSX7Xn+YsS0EdP2s4x4NuLZiGcg3jNdXyelxHJ+hnj2g/yVa3+YNz70Cv7o7Is5N5/xWx94Fid3D/GVL7r9CY1nH71nnd/9w8t4+OyMtvOsubO88pIf5qb67azLGab11PqcjXhWbGZ/4NnHKJHXN6ozChY5W11E+vURHI5Er2D2Wu9sViJ7PgPIalPp6V2DoQnS10V1zoyGAjRi9TmbpsF5o72lnMg5sXNuk0ll9ElUWSyW1PUE72Rw0l4c5EzTWIM4J2IKX4xogKEyeKNYVuSc7ItisY0CqAcAiDZUb8AQqoCEUluzrEFTByY0bMctUBAniK9YLJbszBec3tpmKcq53TlRHEcPHuKSm57G4aNHbUlmM86kRHvPPTz48IPMrrgSjUahrapAVQdcacwnYqDZ1DXZKa72VFXFkox6T55Omc1mpBhxIpx+6CF2t7fxCBtra6RO8DHSxsi5zTN4V+GkUH1LnUrbI2CPo++d2aI1R4+a+5lOp8Suo21bYoykUq+zjR2hspqSToRJ03BgY51jx45x6BlPJ586zumdHbLfIVQVofFUhTKrKUOaW71UxHRSjVgoZISE2HaS2yVbZ05x7u672D3+MNp1diAQ010KOVD36p9g4OYDaOZwc5pXPfVneN/p57KINVvxMADve+gp3Ld5lC955l0cObigConYteZMUEQz9JRTIEdFCHav2CG+MuqjLK3hnHMDqKqIOXIRG6VmwyDncQ4QN9iMzVzRnAYnLSgpduScEDKp68wrCFSVJyWFUrc4pQ7JmSBCQO0A169HoZajGboli53M2TMVnFvytg9fzf96/7MGK69cy9HmNH/5xv+XazYeJKVglPXe7jHAGuopA17MgTvE1gy5wHf0ztPAuaej2tDsEKg5E2M0W+wdYnH04u3qDsEFb5TuICRdNcSjH1uhhvaHypTM7gFq6j0+Idg99+iN2zO2CwNMwQfEW8M+c/TyiDJ6YD5uoLlK2dPiH4tJsRrx3rFf+K9RHktGTBsxbT9hmqsPgAiVs4eXTxem4YTpbJ0vu/x/MvNn+ODW53G6uwzFk7Pj5M5BjoQnH6btbq8s/EA4xZHmDNPpdDjop5SfxJgmgBJTRLOOmLZPZMS0EdNGTHtsTHsyPqdpu4Rly4kHdcQ0Rkz7bJIRz0Y8G/FshWfbm2fZ3NqhbVteecXPcHJ+iD86+2IA3vrRmzi3XOPPfe6HnrB4dvqM430fOcQvv9HKvM7cWa5sPsLXXvZDOMn4XI94NviP/YVnFw0wVVWFQ3GaIcbScG/VyMuJ4LxF86Q4bxCCC3gB7/rbW2RziLT1OuTENjbnIQKNCOocLlQ4b4uXSVaDM0dCcHhv9wXY2DhA1y7ZPHOGSWjwYoafh/EAxQn2zb5i25UoJbhVjz8Ai3D3jt6BVf5Uqrqmahpyiiy7jtAEQjAnCtB11igPMlZQMgEdm2cWTKczZtM1Ng4cA8xo57GjS3PcZMLB6YxYNeTdXdyBA/zpv/WdLLc3mXcWhZ4e3UCPrJEmwr23foArjh1mbTrj8FrDtAlUQcyxUQBWFFdZ/wDEIqa7y13a++5jee4sk6pGQ4WqstxdWtM5EeKyJZR6kADtfIF3Ce9MTSazhuADvqqoJzXiHTghxgziLHgeS13IcjBoplNzdG3LYrFAnAdnzc9i7IrSK9p1nD3+EDlHDh3Y4MRtH+Xc8RNMfKCZTEldJJZMrMp5ZpOJbW1rDkxEcDkPziz3h484Z36q5Y9+//W0XeSI97g2ImrjUABxpgEClffgFBG7ZuoiKUcS8E9u/jbe+PBX8IO3/dBgI6d3Z7zqv/xVvucv/BZfeOOdNue6rGFOxJ3tQb8yDokB8Z5ZVZE1od2SmCOhbnAaDTwBp7H0wkssU8SFyhr3aS6HHjc44d4vivd4kRJhzsyXc1LX4R1QsgwA6iag3hq6egc7W3NS21rDxKqCUr/WFjhaBlGMsFzwlj++ju//lS+0Oel5HpnnHH4/P/YF38HaxhqhXsMFa3jpfMB5b5F03DBeRzkCqtKEqmTzdPiSKZBzoi0HB3E2374ZXlEyUoyWqKAyHEJFLXLv+sOkCN55fAiE4IiULKYyDu8d4vrme+XAIErOka5boAqTSV3G7HB4Ax3nwdv4cwGcnFIBqzLG4M15x0ibIpWvzwOHvXWdu9LsMqU0ZFb1mUta5qSlXm5/fRW4YBtGuYiMmDZi2n7CtLRoQaEO1WcE07Kr+LxDr+dZn/P7/J+3vJNz8ZIR04r8o6f+Y1566ZuoJwepJ1N8XT+pMU33ZKuNmLZ/ZMS0EdNGTBsxbS+mSdcSdyJ/85dexdndFSN3xLQR0/a7jHg24tmIZ+fjWWqXLM5Zn/PFBYxUUYX0xMQzjZHv/skv5SP3Hx6u81WHfoBXHv0hNmaHAKjqmnoyHfFsH+LZRQNMq0ibOQInHtdH9vq/qC2oF4cT20TNGXUyKG2v5uL8KtKXizMuG5P7TQKbqK8KDSuTU7QmaFnREEh7Gl5t7yS0LIwgg4OZTCZ0bSLmuFJaLNLuRQhVwPlCcUQtYixulT0hRontaaqqymw2LRHLzqKNQC4edrFcGPWv8jgyvgpMZms8dGqXbtnRuiXTaVMUTJHKk5fCoUsv4ZJrr+fM5i5PWdugOXYM2TjEuYeOs9jeBoGr1xqk7ZDFgq2dTU7cegvd+gEuO3yQIBnRROiTLwTEmZKKN4VIKSFZ6RZLYkoE5/pAJz6YERll0CFJC5grLkPwQvCFPpkyKgn1Qs6ButBk23bXwLc0+jMlFsDRtUu6kkXgCnVQVWnblsnE00fuD6xNWSwWnLzvPk49+BA7J08ScqLyAcmlwWFxbN57XM4sly2pRI9FjErbEzibSTW8t5k2CI7KB5pmhk8OdWpZDWAZEr1mO9M5zRnJuTRPU9RV+LDkOYffxT+/+W8A8PsnXslbTv05sgq/8Acv4EMPXsa3vvydSKFUI0Js2yF67uuG2CVIDl/VJEBCwDsKZbQ8JGARdpyiMRKjUqF4JyiZqEYDNhroylYd1oxNMecVUFQTuY1UbkWflNjZg0hOZtspol0LKZLbJS4EqIpCdcXRO8d/+t8v4L13XXpef45D9Wn+yTO/ByFzZLpF04g1lyt/dz7YfhVnamOlWIEOVNWsiZT6ter/SskQAV/00zmHpjw4VXIh6ma7sGifubDKChLAVxW+qgqtO2MM1zKeAiDee7quG5yr0T8vyGwoTsoi/nnIPtizEZabYrxUNFojT0r2AH713r5J395MAlUtjj0P4/DDWlhWgUj5PSVynwHxMXMJRoER00ZM21+YJqUcgvdWpuDTjWn4gK+UNVH+4U3/mN8+/nUjphVM+5yNW5lNG0JdwYhpw2sxRlLKI6btExkxbcS0EdNGTCNncA7mu/zxnZfzP975HLYWDYqMmDZi2meNjHg24tmIZ+fjmQuKwxb7Sy/5Na6Y3MuP3vUDZDx3nTzCv3ztn+Pv/tm3c+nhxRMGz+49scaP/84Xct/JjfMS/2bTmsMH15iurQFCCOEJgWcndw9yZnfGpdVHeKLg2cUDTDmTpUT8xChzw2O4rmhl3jl8iYgNZQ7Pu/n54NC/nnOJ+AHivUViiyPo35+KsWnOtqmqJWJv79HYQTZ6XcoJV5zUsHBZqQqFzGh8Rmt04gnO0cVkY9VcoseFSicKogYSqoBS1zXeGyOhbVu8X0VcY6GOdl2kCq7M05HrhqXzJFW8pmERBJv4ZG2Nw5ddxtn5Axy5/EoOXHUNuJqzJ8+ydfIkIrCRIunUOWYJcowsz5xi0S5ZnzZ4ATTB+bpGyrYvmpXcYvVksYjnbDobxlHVtVEqs0LMpFTqnaZE8N4AvL9wyva9ubca1loFm4fmsnfaW/EwkOV8SdcaMIYQULH4cUqpGKa9f1JXdLtzFvM5m1tbNOKpq4o6eFJX9ruM2XuHzx5waLYXV1HWAlLFiD2OSajQLGRxVN6izpogqaJ6fu1PG/ZqHqqY6xGjF18yPcll1esBuG37ucNnPvzQZdQh4RRby5zRnCBbpBtnNGpJpa5ljqYFWtZXc7GZQjn23nRZc6lbGux38zIGHQXdtb9f2QMVAQl423G61DHUD4XBqdu+Cy4lG2vsbA2jR2O5dtuxuwzcfvZK/uAjV3H/qQPDnK+c3cczDt7Cyy9/AyIZ5wO4qVHMe39AH/k3R537OrXFD6iztU05DZk4WuoIIzIcHOxAsGevi32bypWATl75Fum9QNEx2bPH3k5Dwzm0PzS63veYuZfX7Gq9T1JK3VEtYNKPyzzx6vO98og7X5/2/uS9GVirg7VzMjSZt/muAHLve1POw/XPp92O8lgyYtqIafsJ03r9cd6Vdf/0Y5qo4Ih83pG3cDpexanuaj60+Ww+/NBldDnwoqfez9OvOMHEPRkxrbYv7UZMW9nB8NqIaftBRkwbMW3EtMfGtA9uvWj4zBP5OQ3NiPd0W5s8cPxK3n77dcO8J37BF13+RpykEdNGTNvXMuLZiGcjnj06ngFct3EXa9Uucpe9d2sx5Q8+ej3fvngX8lmOZ92y4kMPXYZ2mbuPb/C2D145rI+QecaB93DVxkmm62tUTVP2JTwh8CxnSMmjQZ8weHbRAFOKES0LW/kVTRWgbCHeeaS2yKej1Ka05dtzpdUgVgO1JnveCc47mkmDlIWOMVkkVUskLkZElVBobj2di/K7AAHHfD5nUtfUIbDYneMkEEJYOXoHTpXYLgjOAKadL6z5nqtwqkMkV4HF7i7Be6rGaJ1VXROyXevBB+9n0jTMZrM98xSWy5ZmdoAuKmd3t6kvu4JF7NiJ7eAgnCo+ZwgOCY4scOtdd/PMq6/n8KFjsMzc/95bOXHnnQDs5JbQLbg2rCHH1mk00+1uMwlCKF69a402qU5QB97VFlyN0O5mtPJoMRzX6KBgs+maRcy7lt3lDjvbO6gmBGV9OqPr4kA/RcTWyFmdTSeKc6XGrFkeudSGzapoSmy2q2aEs/UZGSGmxGJpjQdFzflMqoA2FRWZmRdmzQQnjtQlzp7bWik24H2gFk9OjkUynTF9iaTYojnSLmwcUjtc9jgJZMmk0pwxJR2aN/a1XgEkmXFbAzhvNE8yscu4qkbEGyMFcN7v0W2BrOi8hUllRkNH1WdtOIef1kjbkVMm50SoKrwXfLCxm7/uHVWgx87gBEcm5wjiELWamkGLDag1m8t5gViXOsgVtUb7XIp4dADs1C7wmou+J6rYUuWI5kScz02PSs3f2Lbc/uCl/L3/98seYctffe3/4K/e9Gp7YAEQR5eVOnhCXVGFipQsS4icCc5bFkA5OCVKk00ndG2L5g5BidEhldWFPa++6ODzHSKlOZ5YBlvqjCpq1zan3tdJ1ZxxQVBN5BQJk9oyo8o+trG1g4768sBp++C06DaC9hTsorTeOSQEgnhahagtmjKpi3hZ0Wld5YZxB++LkzelT9GaT8ZUdBWzJzvA9AcNHUBtAJvi2FNOJasAchofXB6PjJg2Ytp+wrRcMqVCCNR18xnHtD//lNfyksvfzl9+y+tIGrj9+FH+3i98Fa/+1v/BtZee47Mb08xOU9dx+wN7MW1lzyOmPTam9fhjjNyityOmfcZlxLQR00ZMM11+Mj+naUp4VU49cB+bx49wvgh+UuNII6aNmLavZcSzEc9GPDNdfjQ88w5mfr1EbcyP9qqvKfNZiWe7c1Rga77OP/yZLyXm1dr0tly5Jf/Xs76JSzaWNM36E+4Z7ZLJaY7Wp9jdZQiIfrbj2UUDTE1lUWaHUgWPxDhExCxCaO9zxSkrFuXtG5n54uxVLdqsGaJac7uUFOdKpNA56qbBgtKZtt1BNdvGZqv96MUh3hk9S/IAHd5ZXcJKHC7ngXLmpCF2kRgjdaHdCQ7vHYvFgqapLaKoSnBCEMgpDXRKKGOJfWTX0WZzkN4L6xsbZih9jUm1zZ9Op0SFThwymfH0V7yC7a0tzh5/mJO331IimwkfOxJKjEtY7HLo8CHO3nsv3Zkt1kJDdfIkB0st1EvrGtFEzB00gZgzOSYz3ELZy6UpWKgrfAh0MeHpazw6ujaBs9qyk1BZGbMMu9vbkDPtcsG5s2fMSJ0jeGGytk5MmVxqyXZZrTasZDKZFDOL3SUxRnIykmFfN9WLUSktM8Gi2zmDeKEKFcFXNFWFiANVYmeH49o5fFVBSihGUUWMFtg7wahKl5UORUIgx8iia2l8icqKDE3SVJVzZzeZTtfw3lsDNhFSinSWbkGOK4qkF6iCt5qoCqhANv32eDsdlHF87bW/xOdd8i7+4bt+nIzn1uOX8s2/8I187yt/kyuPbEIQmwvYtdolLibbM4WqDkhOsFxACKXer611XAIiFkyrajR3aBupmkmhGiuSSxPJrKCZ2C1xTqzB5M4CR0ZyIuSOIKXGryrdYm5Rf4DFHFnOrdZxjLQ7O/z6h57Pr3/wRcP6LePK2X/h0dfx7Td8L9P1gxyZnkO91RYFW5pQBXoKpx3qUqlf6ow+Lb15STmUGK06eDH90gyaqSScVyc0pUQslOUqVH3KmulxVLq22xNNtwwRLQCs2RxwjpEsMAvgqmD1VcsW97TQuq6JnYFeVVVQHmL6w44Bsx8yBpJmcDLolStZFIOoAY4ZZyB2XTk8S6H0KsNBuBy++kZ8ffO9LsVBLyiHEqVkzfRz/BiZBKOYjJg2Ytp+wrSUix9J0KXPPKYhDD7mPOkStN1nLaYJMtSXzm2LLubnTa9yHT/+km/mKRv3jJj2GJi2N4ttxLT9IyOmjZg2YtqT7zlNELaWa3zXb34DXXL0Pjd1HTvd5BF+Imq2L2cZMW3EtP0rI56NeDbi2cWf0Y5MzvLqL/pafugD/4T3nPp8AP7pa76cL3vmR/iml77zswrPtMv8f1/zNZzZnZLVEfOe78+KfOHR1/E3bvjXXH4448MEdeMz2mcDnl00wNTT51CsoV5eUQvVKalQA9XJUBM1UqK6MuzF6nJqQ8xaorgIRJuo6+ttqtUCNNpaL2XRipb0ijP8VcGV6B5ZSTEZTVON+telrixO2QRn0ecUrZZijoksRsVNeyJyCqYM4uhSMthyIFkJVcCrmrMHyAy0+y526GTC7NAhJpdehjtyBNlY59yDd9t1Y4fmyHIxpxNwG2tc+bQbOXn/wzx450c5Vk+ZpCWhtjl6ieAyzlnUWNQUwofeeDK+z5bw3hoKdhFNimQlFxCzULWwXCyHrI1l26I5W83SFKm9URq9eKpQ4RzksDLkmDNd7kjdkhgzKXdW97TQhUNpgijlx0xTizIyjB1ciX7aPhsoJ6v1iBENjTaYgZ6yWPQAoyFmFbITkoAdH9SitlKi7gAps+wE56vSbM3j1KK3XYyQxTIU1CLwrq5WWQtZLSGmXFZTLj927QP1aZ5Wf4RX3vBa3v7QF/Dg7pXcffoIv/PBp/O5197Ls69+YBgzzg4qBvSKyxldLsH7IcPOqQ4HKZ+zmV4qNF1n75OuQ4tDkL5TZFZctqaYrozXaSKnjpwikixLQcpWkFqGTJ9uiXRdcfSJ37/tmbzr7uu5+/SFGXDKSw7/Gi899ns87dDdTDYOos6T8auzTnGYWoDHHG4e7DpTqJ9lSXI2m8piDl/FIvVJM1XRovMi6llRBxk7BPaHOnWWkZBF9oyWXotKtD6VQ6VYZoPu8WWU+xYaMQqiStVfs1C3bUhS5lbOAdi8xEmxvT30VYw+T1kXoy/bRsgeJ997nFJJ2ubX/0UY/LAKQ11iydicSj1fVlMf5WIyYtqIafsS0wzXPtOYJi4wq5f8+et/dcA0gDfceiOfu/jswbS33foUTm82SM60u3N7iCvjTiny0NbB8+xYyNxw6F4ONFsjpj0GplG+0EnlgXTEtH0iI6aNmDZi2pPmOY0u4mLio6eu4H0P3cCdp46e10PwQnnW0Q/weZe+k76O2IhpI6btaxnxbMSzEc8u+oxWB+X62T287PLXUckW7zj5p3jg7EHec/dVXLqxxZ961kcJPu1LPNtaNLzh3U9Fuw5yRpNy+4lL2F4259ntwXCcFx/8NZxzPOfIe7np0L1Mmv57Rzc+o30W4NlFA0w5xYFKmLJFWnu6asboU+Q0UPs8grpMEFDX16sEYUWNVAXNZoiqNuGYZYiUiQiVCwM1a/iciE3fB7xfRQFzZzRWUsaHypxWSkhdkwuoLKPRJVMWMo6mqi1C13agSuoiUTNeA7FEjW0zHL4JiPO0KZGCI+VMu1xyLFitylCUJUfbxNR1zLNSr804eOwosrHBbG2NcMklnLjlfRbVbJcsuiW7587ROoc/dpjrn3oFxx98HQ/efQdhssaGgzC19cvLHTItySe6iDmtsg5tVMSbY7blsWh50mhABuCElCwar5LRc1tFaU2trCFYKg0PrRasc1IyL/yQ+eu8x+eEtsK8bdE2IyRma1Nr8qdQ9camph+imJL2mlB8marSxm7Qta7r6GIka6apAoigagZqtu5WJ4e+SZpkkstkD9lD0ogjIZLI2hVTcbQJpFviNRMIaA4sY0fXtYOD72XW1NAbfS5nchWcQtclUupI5eDgg+fAbJd/9Px/yz95278tX8YJP/uOF3Fqe8KzL7m7UFZtDwTM4QMu2/64KuCqCjDatu/XWi2KHGMipUioazw1ZANMFcGFVZaCKPhSS1TEsnS65ZwcOxxKKnRJUCR1oCX+3y7R5YLcKvO559V/+ApO7a6f5weExMTNedU1/47POXI709lBQlOj4kgqpEKZtawVzDv0zilbPVaR4tjCKsqe4tIcb/AkhCymfRlzaop93oyg0OR7Rw8FuC3aH0JYZY/Qb6sMYJlSQrzDSTBAUWUvLZ2sdLmz/c4Zr3Zw7Ksu99R4FTFQFPukaiZrsgyqYHTwPvvB5ljo6RTfPxx0ZJhHT+5XWYGH1bW1D1otV/OFoWRFpSS4Llp2Deep8CgXkRHTRkzbX5i2KmWA8/sK0/5/7/hXnFococ0TfvYdL+LhzTVuOvIQddMWX7z/MC2rsNhRfultz+GD91/+uHyCl8ismlPVFVVdjZj2GJhm812VihkxbX/IiGkjpo2YxhMW0/Y+p9FFtI10S8/b7ryJX3z/yx/VJ3iJNN7KV/2Za3+Hb3z6L7FsbW9HTBsxbT/LiGcjno14xkXxTILHVY6veMovcc30A7zvzBewSFPef9+V3PbwMV5w9Z3M6oRzmRntvsGzLir3Hz/AD/7a5/Po0YnMRHZB4JrJh/mOa/4OdV0zm83G7x0/C/HsogGm2gWsYVhCsznNgYhdGs157wjB48WGK9lqHlbOGaUOW3D7W7IBO4crtTopCzZQsRSyJqpQUYVAXU/IqTgNhOArqqoaQCQ5R44dedmhkoYo8HKnpWoqXDDFAYtYGmXO6OKIUFU1jg5NkS5nnNRDyFW8Z9lFxAmXX38DlzzzZs5tbfK+t/8Bx0+fYJKVadmkaTXBO898PudsN8fnRFzf4NI+qwI4dPAwqko33yWdO0PI4M5twl13w2XKZL7LhijTuES7lpgtGisusVguWSyXUE0JlSPgaLCsC7LaPgEinlDVrIfAfLGwBoTes7O7GCKa27s7FA3DB2eUwaxoisQq4CJIzLTzCL5Cyz6GydSMJEPlatq4JKZIu+iofcCLI3U9RXTYXAzqIUWjuKqW5oRRizKZkjvvqEJN1dRosoaGbRuJKSPiqeu+uZyACgena2ztbhFbV+iZZzGXkYY9dyJ476maGh8qxDt257t79C2xNlsbam6mFMnJdLZxdUmJsPdmzeAcoZrarOpA8kLMaajT2styPufMiRMG9sUKNcZyWHH4pkbjEkkeUrSfqoJCIXWarDFfisR2Se5qXFVb1o5Ydktv3VmdZVVgDl5ESV2LSy2iCSdKWiwZIuBdh1Y1qkJ35hxnHjrOH977dH7y/X+N3bZ+hB+4cf0W/t2zv46rLq2oq0vACfVkZv4gK8GXuL/YwWDSNHgXEJS68sOhbNm1dN2SnlaZNRJ8RaitBmndTJAS6ZdSbdmX9QiVgBO6nPAlc0k0450nu2ROMKzcWS59kPp9bqZTpCrAHDxWDNnWLy7jkF3gMgVULLPIi5Do813AiSN4zzJFYkokLU0kXUk8USVqHjJ1+gNrypngSvaHyJB1Yfol/cWNaiy12WOhqmoIpJTJqrSL5R77so/mrAMQjXJxGTFtxLR9hWmV+VvnPFXd7CtM+zuf8095+SXP55+/5ycB+N1bb+ad91zLq/+PVzOrS+31fYZp9505xt/66T/PvK0er0vgy67+Xf7FC7+XIwcE1WbEtMfAtMHn5EyKccS0fSIjpo2YNmIaT1hM2/uctru5zc7mLt/3wR/g5OKSx/QJX3b17/LPX/i9TNbWCbLESzVi2ohpnxUy4tmIZyOe8bjwDHF8zuH380svfQnf+c7XcO/uU1l0Fd/yc69CgCsPnuXH/vIvWpBlH+DZT/zuy/nt9z/zMW3/gD/JD13/PKYTR1M71mYHOXL0CKEK4/eOn4V4dtEAUx8Ng8EeL3jD8B+GeNiekFb/xboMb11FmZEhfvaIa6/+/chrPeLfcv77HjnYC0Ns8uhve4xPDDFw5whVZUqCbaiqDo2jh3XqX9dVc7i9Y95bPgssSkjOkLNlXNhFyk6WuGL59/D/4S2PPoN+rc+bR/959nyuv4Y+ymqU1/beQy+Iuq+G+ujjOH9nZfj8MJbVMMp/Bk05/55qL8uFuiVGat1rLNqv4d4792s+qF+/tuf//dHmIXveqXveX34p9TYfZe579mt4v1qEXPbMfbjwBa8Nl71wTOXfwt692LvSjzqY1eurVI7hvr915xfzR/ffwE67oqhO3SZff+xfUVfC0clp1sMW3h1GnOu3Y/Xf1XKUQ12v4zoc8nTvUHg0nZE9nz3/Nbv23v+Xg+MFOi57bPERrwvn2d5efzRkOfTOE32E27hQp8772x4/+Wiy1zf0Gn6eZZxnU2XdeLSdlGFyumeWeoGfGeWxZcS0EdP2E6Y9Qrf2EaZN/JwbNj7E33ra9/CLd30HZ9pLODuf8hNv/SJe8bQP89yr7ttXmPZ777+BP7zjqewsay7useHS5j6+7qofZ219xg2H7mG92saxThoxbc/nH1tGTNs/MmLaiGkjpj1+TJv43eFzMXt22xrd6x73EaahypmddX7uLS9ksbNLbDu6ZeTU4jBdXn1986eO/iI3rr2PEMKAaRvVNpMKkhqLYsS0EdM+G2TEsxHPRjx7fHgG4CSxFrb46zf/FG89/iX89n1fzm75Lu+Bcwf5kTd+cZmmlVNTTTYH5wZW1qo6lBb/l/g/vuhdHOjz9D4hPINfevMLOLtdvlfMmffefc0jkv+ExF86/C9ZC5tM/JwNf5rGVwRfI2LBn/F7x0fKZwOeXTTApDmf52B6J1Ouft4k2Pv6oKt6/kT2fF7K33X1jz3XYI+z66eurJZoRWHrbXyv8YkTNA271kMQghT/Kav7DpMohrWnv/XeSF+oawgBX9XUkwmd80AGcXveu1IizWqNtXKClCElutaokbFtrbZizsS2Y3d7Bz/bhi4SRBAtFL9+js4YECkp+Gx1NJ3ucZirtcpZSz1Ry5Rw4oxG1xuogKjQU1L37mHPuMgqZAHUWYHXvh5lzmVrLrj3edu/2mGh0A73rO9eYNhrXL2D0z1ueDU106m9TiJrtqB777+HP/Yv7DFoXY15OJbYQpQI8mrEq4lw3ni1OFalfAZWcxPlcHOco82DnFpeAcBuN+Gh7SNsHMqos73wwZeHFvuR8tODumpeNW+kgH/vHAb7s31TBAb7FkRdWU8ZbKv/zF74zOp48MxBfGjwCt1Z4a33fx4fPXnFMPWNcIarJx/lK479FJNGCFUNYl/a9UCVy9qc/8BlYCn9og97fz5I9ucF1Z66rnvWfnUYNH3UQT32grEqkJUs5qOkjGnYvgHc5REHpSEbau/BZ/BNK1c8jGnPIamfyXCYy7nYvayu9Sj/H/YJOe//58sKeDKP8nm1uqt7zebiJ/FRLpQR00ZM20+YNjzslHHuJ0xT4Ej9MK+8+ud4+4mX8dH8bM7FS/iNDz6Xw7Ndjq5tW6kBYFInLqvbR8W0roXjmxtcsnGOSj61mLa9aDi7M4Gu4y0fvp4333rjMN1GtjnsH0DElRrVrr80T137EH/x6v/CkaOH8aU0xohpF8e0YRzn2ciIaZ9pGTFtxLQR0x4/plVuwZXTOzm+uIqoNVmFB84dGhhMOOHI2haTOpkffhRM+5N8TtuLaaGqeejcEX7tAy+AvfO+QF5w8Pf54mO/Sj2pR0zbM5MR0z77ZMSzEc9GPHv8eNbP88uu/F84Ih849XQenD8FxbHTTnjt+z6Xj1e8y3zhzXdwdBkJoSYL9hwlMuheVutHpRiDyYmSY01OHaoWuHzdHz+D42fPb7kRpOXy6QOkGFFVHB2vOPjTHG1O4YO3FXEeET/4bLObEc8+2/DsogGm2C3JyQ5QqG1z312kwxbTC9TO4YLiENJex3PBLueUyIX05ZxjaObHHrNUJcdIzNmaj4WAvdWCCilHpFNyob/VVSDUFdk5qznorN5hO+9QUZzzNPW0DEDJUclSaGoevFc0JSQnvEKzNrEvIkRQ7+lIuKbmyHXXUl15JWvLI9w03+W23R181xIG5y62TjHTiGe+s+DhO+9DN7cgZeL2Fvd++MOmNLGD7U1ke86J2+/i3L0PcvjYlUgWLq8b8tYucb5EU0KBSGRnsWB7vmAZWtY3lLUJuKDQdkgsYALkGGmXQt1UhKqiDh5fVyxjJKZo1Ldi2GDN8yjN0XLKbG/t0jhPcqYavp7gK4s4t11nlDwxqmnXtaQUqUKFlBqXXVfqVHpPVVXE2JGTEnOiS/0hQABnVNUidahJYo3WtIt4hZiUtsukLoF3uBL4XizmRFXWmkBw5nC71BU8EkQ9lfOgRi0UHG3b4rMSatM972y/Fm17vsnlFfjttitapQdy5UhZh9qfKTuyWr3Xb3v69/Dio5/LP333LwDwtvueyx89dDM//uf+b9aqBc47Dh4+xBClTnHVYFKALsFysQJskSFLJTiP14hkyIsWnDf7KTROxOG8Nyqy94h3+JzwKdmDUE4E8SCwExu+82e/gUXJIng0//DKK/4r33ztDwIHwZXrVZ7oAuIqQggsl13RG9trc3aJnJSwZoeKrtS6LW+z97nygAW4FEkxs9MtOXTw4HB/L0LsEikmtCo0afV43ztzRbJCl+gKNdoFT46R8uxma+odSEZJdLHFScZJwEuwQ0uZfIoRzRmHo3IB8Vb79sKDBUBOmZSS0ag7o6s676l8PQBRTwlXVdquQ0KwRqAIrtRxToATf8H62wOpc57YLokxEWMsvXntestlqR8sdi19rE0c5VFlxLQR0/YTppVzLMvlgsViZ19hWpcUTZkcE999w9fzuuN/hZ+89wcB+Pl3vZhfeNeLh7m+6No7+L6vfM2jYto9Dx/l21/9lfyHv/o/ecZVZ4FPHab9xvufwY+87gse1dafPfkd/v6lX8tkMmOyNqOZFZspjW9DNRsxjcePaVrG2XYdwecR0/aJjJg2YtqIaY8f065fey8/8cJX8B3vfh337t7EMgb+5i+/6ryvXf7NV/wSz7/m7s/Ic9peTFt9v/nYwSWAqpnSrG2Mz2kjpn3Wy4hnI56NePbxPqNlts49zPOan+fHX/xbfMOb38ZOPPBxWt5KUhb+8X/5qo+BOo8tF3N116zdzX992Vfx8H0PsNhZQEpMmgnTtYN7ntHG7x2fCHh20QCTQ4bMz1o8FXlQuNoHPIoX8M7hcAUIhCDWNKxvdu9FcAIJQbI5BO8dreZhYSofVpFC5ywKnTNd10HKuHI9X2qs+nLtSsp7z8sAUEJVkVOm1bbsPIiCU0fWhCPineDUE8g4Mjkriy4Ojj6LI3rBpczZs2c5GjN1qDm0cZDZdA3vPFVZ5eXODnlpGbwbGweg7dja2iHecRdzzZw7fYpz9z2AqhIE1hxMXTBwWLTkrU0EDwnScmmN50rkflczMdT4Q1MOHjhATpFlVtZUEc1YitSe3dZBF5D+y5S6QlKB6eVyiKRKzlxzww2A8sCdd9DNFzjxuFBROWse1tdZXSyXBu51IKbEXorcsrW5d6XhmfeemDJdjLQFuFO/t87hnCfHPIzVOWfN1gRytH1fLls2d3fYbluyONT1jcYy3gdmOaE5E1Nkvlyyu9jBi1A5x1o9KZjSN+aTAiIddVP3MXFEBFFBioN3zplOKqRcqKRQ+uo4u07JXnPZkaJQVVYX+MaDt/N9L/jr/D+3/CuOL65kmWr+zdu+iZdf9ps87+gf4RRrvFiVGsMx8cYP38iv3/r8Elpfbd55hFsR/sxzb+PLnv1RfBsRb/WE+8w4cQ5UzRbaDkkJNKNdy0cePMpPvuHFQ5ZLysKyC+dff7D3yHc99W/zomvu5aqrrgWg02wNJlMkaQdq2XohBERlT5ScYteBNi4R8YgIoaroYiQlNZ3ouoHCrSpYzouwUDEbF6FSMactiit1aL3zSHB49WV1zBep9/jS2HNrZwct2Qttzmbf4qgnE6DUKU0R2hZ1g1swO0vmi7TyqCopZWvC2dthmWfKia7rrBljea2LEVFHztkOms4Na+J9MLAUDyUjQ8s9c7HBPusplxQLVSVly6LIOVO5YCcNybCMw7VzXmU4OLfHAY7ymDJi2ohp+wnTzsYGVOlypE1xX2GalVOocVLRzOClV7yZqzb+Gt/34f/EIs/OO19+6OEr+Ue/+nVlg6QkZBmmzVvTqB/57S9irWn3bKRle+Vcss/2ZGT1eKX9fGD1xUD/I8Lxc2urz+yRv3n1P+NZszex7tfxk5rDx45y5BLrWTFi2ieGaX22rvcB5/KIaftERkwbMW3EtMePaaLgJfD3b/4nvOn4n+O1933L3u0A4Cff8aVs/PFihUsXYNpjPad9LEyrQ+affd1bqaslp881fN9rv3SVbfwomPaxvsM5WG/xw6/4Aa5bu48D4fIR00ZM+6yXEc9GPBvx7ON/Rqun4MhI2uZfP/tbSeo5ubyCf/OhH+RjJSg8Uh4f/ny88hcu+zFeeuS1nHrwYRoXqNfWyDHjJzWHjl0yPqM9wfDs4gGmnoaJmnNFcEXlvHN4wIuWaG3/GXMwTlZ1B11x9GqeB0QHp26HK4YFoDj0/ooxWdRbxEDHy+oHQLKa0WY1p+n6BXYWiUyZRFzNR4TcxTInaHxNkmzRTVHalGxBEZIIMQtuseTs8RNMHz6OE2H75Glq8XhfEXpHnzIxJWaTCbPJjCRLmt0lZ2+/gy4ntrc28UuLTFfBs7Y2RRzErjPlXy4Ab4z7FK3BV5njZhdZv/RSLrv6aprDhzh59920Z84ghYJKsgwLoNSqdKagqvicqKSwKjBj7AG4X+/1gwdBMz4EovNGT/Qe7wNZdUWbyxl1pQ2acyDWSC2r0nYtmpUUU1n/ZA3Cch4MRUvGAhnUQerMWAQh+g6cjSlqZtm2LFMieUc4cpB6bY1mNgPg9AMPE3cXOJQUO0SU2cYaR665lM3Tp9k5d47kenBXYrukqmtcdiXTRAjeWTJJadq4ynYxA+xp1JpL5FoNTI362XsItUwb73Di2ah3ed6Rt/OiS9/GLWc+lzu3nsoHTz6VQzwd7XY50h5htjaz5mzlEu+6+zrec/dTLmaGABxbn3NwMid3CfGu1E8tYxdZ1VJNccgqIXbc9tBR3nPX1Re99rMOvpeNahNH4nMPvZWrNyrq6WVlPYzuOnETfBOsAV1K5C6VvdQB8EUczvd00owTZ9H/bOTQXv8sYwjaaHouVaC+9FIWm5vk+QJRGVhAPVW1pw1TMnbUOtsx0FGxQ4J5Ay26pHYw8Z5cHLPmsqdDRkvJAkIsE8Rlck7kLtIuW3Iy7d8bNIopFYqqvR5jJOVEKq9pf2URy34sNtU/bCqFrotDZXVYysWHRZE9SVZ9hWQp27yyXS3rIuLGB5fHKSOmjZi2nzDNyZXwRqOmZ/K+wzR1ApqpmgmXhzMcqd/G8w/8LrftPI8T3Qq3NhdT3nPfdRe1vY88dNlF//7JyrWTD3FJ8wDee1507A94SnMHKa6RghCaCfXUzg8jpn1imEa5svMeN2LavpER00ZMGzHt48e0Zx75APN8mPt2nsYfnX0xSVdfh3z05OUftx0+Hql84l0fuYJpWHD6XMN7br/8/CDVo8jU7/Ccg+8s+mB+NuWEryqOri958ZXvIeYFUZsR00ZM+6yXEc9GPBvx7BN7RpMUIbU8c+0tAJysLuP5G79TEh4swKpqQRfnbb8o9+3F/JXj/VsvJeqqL/vHL8ozN/6Aidsd7vG5B36fm9feRWqVZjLFiSPFPD6jPUHx7KIBpuCcDUjNmfuyjILgxeEFgpPitHOJMmYMEjC6F8W3i30Gkh2QUiKlNCh+KlQwoWQeOLtGSiUyjSc4R+WESjAaK0DsSGoUxxg7fFXhqmBZEAVMcqGz4T2ucizahWUOiJAqgfK7BE+M0RZWjI6JE1zb8sCHPkJoE11M3HvPPVx5cJ3GVYRCI9lJICocOHCIyXQNFxpSEj7y9ndQVRV15bmkMWNt6opLjx6mbZfM53O2t7dJ7RLzJYKqIwU/OPqHthY89+an8aKv/ouwvs67X/Majr/3fQRfEVSJKbMsjkKqgEdYzOe0XQdOmK6vFWW1yGqfhaGqQ3loRYhJjUroPOoDBI/TFT258g68o2kqApA0kzUbbW/ZkWMqtLqy3z6aI3MeJ4L3DKBBUnK7osy22uubsruYsxsjTCqaI4e46jlP56qbnsqV118PwO/9/P/gvls+TBWEdrFDM5ty2VOu5iVf/Ur+4A2/xzvf+Ca6Mupu2bJ56jTHDh2mChVOhHYxZzadMqlrmtCACqkEZZbL1iiv5aCSUkfOmQDELuJFqCszKklAziSSAYb3xNTxD5/7ffz6PV/Dv3/vdwPwtpN/mred/NMXM7WPKb/zgZv4nQ/c9Eldw8Si8CsR/t7N/45nH/ojUlK2lnOiHGa32N1Ou0S9cOzYMa6+6en4nFmePMHxBx4kdi05RgSL7oOBsPkEULVovlSenOzA4EMw96pwcnOTHALNbMKlL34Rd7zjnZw9c5bD4ggpEbwbMhX6qDoKqcvkFKGNNHVNiolF6szRq4KTQZf6ccWciubZIdECdOUgCiSNdDnStS1d19EtlyznC6qqxoeKqtC1U85DNoTzASkg3SWjrWYbbFlawYcALiAS7E59ZBHBOUFlRd2VFMlqGU1GzRfAlXihzaM/mPQzy4mSmXNRVz5KkRHTRkzbT5jmNm7G/VePOiCwbzFtMl1DYkdgl++69pv4qfu+j9889W17LGtvIZMLRQsyOPrD9adOrNY3wJ+/7Cf4s5f+IhtrG9R1A3JoD6bJiGmfJKb1tcZ9FfAhj5i2T2TEtBHTRkz7xDDtJVe8g+cd+EO+/m1vYieuekX8yWAVdMnzPf/9pR/jXStMA7hich8/8OxvpWkaEDdg2sbhwxw4eoQ2j5g2YtoTR0Y8G/FsxLNP/BktL2Dz3Baqyoy7+GfXfx2xa3HeMZ02qCqTyYTZbMLm9tYQjIPCSVPoUsW3v/+POdN94kmBgvJ3bvi7XNHcUdbegmROAgePHCrPaO5jPqNdVfCsHfHssw7PLvrXnLN9WarmAJLGYeElJtR4quY4cmkQp7k4SMX54kbUWZQ1JYvSqSl019rmOB9wJVugP1b1Tc1SSoTgjHaJ0rUtKekKRMAi6iFQ1TVS6iUvlku8ghdHU9e2SWTauKRaC0iMSMrMl3OC9/gQqJzVKNSykqEOBl4pIbs7nLrjTkCYdR11BtcrH1D5gFfo2g5xS0A4MJmwUdWm7AKts2h0VQW8gyoEUqioQ8UiLiwiasFotrolbYnQXnLTDRy47ho4fBBSZne+YGtnhysOHMJ5T93UhOKNvTen2jtQRc2wvTOqLuAqA1Gj7iXuvvVWUkrM53Mqb440ibLbLql9IBRgsCwFA8DsbKxZYNl1KIqUe4PRnJ2UmHuJBnsRoxxaSBnCiu63Ohg4mqrm3O4uR6+5khd9zV/EX3e5NS0t43DeE4DGO1LX0lQbXH3F5fg6kMVIjFrXdmjwnnU5xumtTZwITVUxCYG8sHFLhqaZ4ooBpZjoUiIWOmoomQdRM13boiJUmD5ZZgy0OVE1TYkY17QxG2juQ5m6bf7t0/80Rzcc08mUEGoO+YdRFVxwVLlCc6KdW9ZBTB1Z4PTJk1xy+CizyYRJ03Dm7BmCczRVRR87F4Gqqui6juA9TVUbBbQ4LsRhxGoBFG1qZocOcejKy0GUk9ubnDh3hgOHj9L4gPMryqcg0FNjM2hSUoxUIZivgJUzV8hl/e2AEYbrIEJwvj992ktekKQkBM2Z1HW0yyXL5RLvKyrnqIsPiSmRtUO6ZJfADjWSFVH7PXbdSq+D2bq4QlFVc9gUem2fDQBAJQPYp1wyH5ChGWKfNTCshzhSKNkJ+1Pd9p2MmDZi2n7CtJOnJ+Xauu8xLeAJzTrVdMmrrv9Rvvqa/8JisUAUbtl5KT963488qs1d3XyEf3H91/H9d/0cdyye+7GN9OOQLzn8i/ylS7+PEAJXHuw4tHaYqmrIyTLPRkz71GFaX6IjpQTViGn7RUZMGzFtxLRPHNPWZvAjz/0z5GxfWC8WC/7jPT/Brbuf/wnZ4ycrezFtfW2djVnFdLY2YtqIaU8KGfFsxLMRzz75ZzQruZdZLhY4b4GlnZ2W2XRCTrBcJII0LLslKeZhrUWg0sT3PfVPE9OK1eN9wHuPr/vKSfazs7MzJFV7ESaTujBblLV8N+2iteCw92ysrzOdTD+uZ7SHl8/m2IbjqYdGPPtsw7OLBpi0OPnBaPdcbEUdo9DFyk4XresXYCUy0CS1GL5F0gRKZG94v65q/CGF7uXtHjlGvHhciQJW3mL5RnvLHD54lI0DB/jIRz6CiMcBsTT9UlG0dNHyweisMUXUORIWIWya2RBpp9BgvSi1E3zqcOII4tAuom4gLaJRiW1ksbMg5QJe3lOFMnaUuIhYKBSW3RLJdh/vAz4EupTpYmbRdqTgUW/KeOMLnsel1z+FHBxbd9zF/MwZ4nJpGR4U+lpRRM25NA4Tmqax+rGVR4Iv4Uel6ApOhdo5K6uWM3VV0TS1AZ/3pFZLObbiUJwZWg8h4jzBCc2ksT0RNzh6CpUwxWh0v2xUWO/dqq5t0++zKWrOWPM655hMGoJCe+IkW1tn6FUMgK1t1qoKyZlZ00BKnH7oYdIfvpsz996PpMRsfQoI07U1rrruOh66+y5OHj/OiYePk5Ly/2fvX2Nt27b8LuzXWu9jjLnW2nufc+6z6paryuVrVxU2GBxIsA2JCA4RyClIJB4hJCQoRCFKFCXwIcrXKCjKBwRSIhIpQkIiiaKAEOAIZAIymIexTVyusm+57LpVLlfVfZ/n3nutNcfovbeWD62PMec+99xzLwHhu3d6vzp3r73mnGP00Xtr/3+fu7V/a4+lkER5enMXjVz3CL5Z1ME0D1uVKwftRFx6bVjp2QYiijVHxEET1Ro/cfvL/NyP/wv8G7/1D1B9/p4+9rNvf4Xf99n/D+v53J334l/79c2dX/rob+SXX/wXvud1vtf4wvJb/Jc////q0mKYZeV3fPY73M0wTzPztITc1uP1u9MNOU+k7qOLxCFtMuHFu++xThPizt1y06WTQi1bXw/FWzSWy71GKRb4gQprM1Yz9thbFVg+9xne+enfibz1lNNn3uLmvWeYROaANtt7H2KinSB6XVK/rJPL1V7sGKWXg8ZOkKrafTLT3A8Z7N4NUDXmLwERPeMhMqQ63HSZcwo7TfFcITuO9ySV3k70Mj+cA6jdPDJhOu5dPQo7c7jvNXhD4rs/s/urktn9iOT9QDzG9x+D0wan/TBx2nsf3eL+t3KTpx96TouyFcaGsKQPOaUPybc5MsqWP8/fn//5Xqok7H/ntM9M3+RHn36bv+NH/yXeLX/q4LTuFpfsKXcuxhfra9Y4LQvLsiAIpUbpBLqN/Ozdn+DHn73LPC8s0ww6R6kN7zg+OO0/O067/jYxOO2HZgxOG5w2OO3/d07LGG9NX4vvXEC+zfyh8q/yu9df7Nnn381p3+t7WneL78lptSn/9nv/CMVP3OUX/NyP/0vUT+W0e+ZpRnQZnDY47f8vxuCzwWeDz/7Tf0dzQCRxevIWtWy4Ozkp8zyRcwRStvMKmYsSRaRDnvLF/P5RZtDcwaNcpO5RRTfcG0t+5LTMLPMcZSuvHLa1jBPqoXleSNPM3jLiB+Uzv/9NahM+KC8Gn71mfPZ9FUzaQVxEOnzv0+qWQbzm+0JrQuiGuEegOw9o0ogC4lFDOCWsf3aP5AJYbYc8MprFJaSvtHeJWO7vPy0LW628PD9iKjx96y2+9BM/zp//C7/EPEcktdTtWEc06hdPU0Rjt3ODPNOAsp5DZpoS+KXOpgjMWVkEkgBJsa2gOZFyOuZczhs0o7RGXmaWZWFaFiQJLs7WYh4NQ8/Ckua+eYk8zWxtpbTG/eMj9tZTZMqIKj/7N/0+bn7sxzCM7/yFX+bh3Xdp6wbWpcEaTUsByrZRS0X62kzzROrN4MyMVivNW19LZ+79gFJSVIXl5hSNUAU2PGSmu3F15aAk+p5pSINTYsozSVOsDxGRb2thfTyztQ2zhmpka+QU10xdiugEyG+lYi1k0Xd3d6Ra+fZXvsLX338fUY4995f3PJ1mpDae3NxQa+Nrf/nX+c1f/yu8/+IjUjPu7m5BhC/82Jf4W/8bfyd/+Stf4c/9/M/zm+9+i/lm4eVHz2lbYbm5w1tDd9GuXQ4ZolEmwem1ZHv90tJlnNFUTUmiIbk2kCzUBr/z6S/w4zd/gf/o238bjy1KL1x/MREJ0vz9X/gP+Ud/+v/ERx99dPl99z3pa7XVxr/4m/9jfuvxpz7RT6U72f4F6Xr8rqd/jn/0y/878pQ7eMAynaJJoUMWudTRdOf25qZLOGMNsiYkKUkTL95977jH22+/jVmj1o26duoXw1pjXqaoNaudAFPUNm5bYWuN2td388b0mbd5+3d9GZ4+4ckXP8f24QfY+y8wbyHVLPHe1rMIRPb6yYHE1v8UiRq9x/pqALK7U7fS7S2Tc5CrlYL3ur24I+YkUVwuXxiTBg54a0fDSRWYcooaphri/ZxT369D/frK3hyIeRB1vOLeQfrqUOsCrdmxp7tNXDIh4kDs7li7+PH44vKDjcFpg9N+mDjt+fpZ3P5H3E3zDz2n4cZmlXO/Xk6Jp2+9zcPDPT95+zX+sS/900eDUxE+xml3/NyP/d9e4TTv+1Ws0UpkmB7f9a3hrVDLyttvvc2zp09RVR7PD7QWDZEvnPZ0cNp/jpy2I+Xub4PT/uqOwWmD0wan/WfLaX/n9C8hOHc3t5/Cad/9Pe37cdpaMj//4u/iob3Fj9x+nX/sZ/4p1vV+cNrgtDH6GHw2+Gzw2X/239E+ic/qWmFOR2+xaz7b/eG7+GwPshjRPwiYT7c8/YTvaKfb//T/7riUX8U3593ng89eNz77vgGmqAsqmBhi17WBA/hFQl7VSsHNok6kfMLTskvw5PiH9jwlzKF6SLoutUsLU6+hmZIeG2qEzNPcKf3Bnp4W6iasrfJ7/4bfx2e+8HlkTrxYH/kdv/138PaTp3zj678JwLqduT+/4MmTG0Scra68/+I5P/XTX8I18Zd+5Vd5bM7eKPAzT54gVsg0Mo3Tcss0zczzxMttw5OgOh1rZdYQMupGW888Xx8pd7cRnc+J5Wbed5x1XZlul0BNMaoVosqsIgh1K0cEtH3wPv70KfV04i/9uV/g8f0PWJrjW70ypr7pvabnNE3knOK+pxOllU7aiTnlw0DOcu5y24hsiyopJ9Ic9R+lNaRH2TUFKNw+ecJWSjTkm6aeXQwHYhOE3Fqj1C0ytJqR2B3BMauIE8/sIeV0cSQJs86oO+f7B375z/wC53VlnjPzEuu3TDNTzqgoT6aJTYS1bLx8cc9Ujadpop5XBGjbBqr89j/wB3icJz54/hF//z/2P+Hf/hf/Zf7sH/8PeLGunFLrdXrhlK+eBSit4t7YO0xIr/cLQA2bPe1NCWulrCuiglmj1Uf+93/9Hzoi/zc3NyxTJufMMs/UWsnSKKJMd0+otRzOS4qGh5InHs4f8nd88f/M3/6Ff46U0pF1s2dtJBUmFR5fvGDu+2ZWcSArzKc75pvTARa1VjQlEpF1l3eJsYNq7PORPSIRqadVLgFwIWftWx3P4x6ZFzlnHu4j6+72VkkoZSsUM0SUp3dPQKP+53e+8XXO3/gm57/wFzn9db+b9cULzi9e8EyFCKlzALebYRWa7eXnOgHQM1Zape37AohJnOzcqbXy5MkTUsqA8PjyERc/rt1qSPCTCLVLTveGkq21Llnds2kUzRPLvADRzcrMWNsah6iOYzGJyJBKKaE505DYV5GDEZyrg4XEczfClvZMrr2p4CuHYYsaxEg0uzwyr8b41DE4bXDaDxOnPXAGd7Q1cm0//JzWNkwDV4sIL7aV+XSKjMl5praohy7wfTmt9QNvTgk5TfGl44rTssD55QvQifPWMN9wFzQtzPM8OO0/Z07b1zq+NDE47YdkDE4bnDY47fXgtKcC//Tyh3r1iMzD+XFw2uC0Ma7G4LPBZ4PPXg8+G9/RBp992vjUAFMzj8hwB2BRORxA0eP3exRrB+o9snc0huo1CvOUaa3hOJpigiLCpAlvPcLmEZVPeYpagdIppS9Ma42k2qO8kJaZKSunp3d88cd/jNOTJzxuKy/WFZaJ5ekTnjx7K+b8kNi2R9paaAIpZ7780z/N7/yDv58P7x/4D37hF3i5bf0f7xVZEjcinFRo5pRaECdqsU4TCFQLsDUxXJ2UL828Jo36o9aMimPFDgMVhHUrtOpsa2HbGt6ERGZJM+tWsTWa5f3mL/1FvqiJt37iJ/jyl77Eb773IeetMLkzJQ2gLB0gLDKXpikHYE+ZaZ6oa9ShleTRz4gA+sWmY/80TSFpTYokZUrR62ivO3t+eMCFIJA5g4SUs2zl4HY5jBZSVlRBJEja3GitYiY9K6Ebu0PzFurWPq+swikl2mnm6e0pshZ6ndt5nshdCiwSWRA5KbNmHs5nHsvK9tFzAN77K7/Jn/wj/wa/9/f/F9HnL5hEmTQzzQt5WUgmgMaBhnCmpEG2WzmHLasiBHmJXEmqu8Sx1IIDmpWUc9SxFEeycKMXMk5iTGlhShmlMCUDg2rRtNAB73bt+7rUSgNUjaQFvIS8VSM6D3uASZGTo6mS1chTOuabUuYaB0S0H9KijnHSHPJhAJWoW9z9TTQksyYQnd1CmomGrLjRIF0OfZJ67VEkMiA0wO7wBeeQqr9zuuPxO+/xy3/yT/Nj9/eU3/oG6fEMab7UGT38JWTjWRPNDceDhPpqvSJTdbBWsQ6c+zO6Bei7OzlrZDYBtVknuSAGISSph6S6Z+HsfovHYdAsCEgc5jzhBqU06KLaHaTdG+Yh+TZrMS8XNPUmpeyHI8G8hbS+H9hUdl/p9+oY+0q2S2vH/Mb49DE4bXDaDxOnaT2BCDfLzJPb0w8/pyUlyeXAWa0ySdh01KK2bvPfn9NcwuarR81x6XMEjkzUPJ8icxMh52Vw2l9FTjvG4LQfqjE4bXDa4LTXh9Nul3L5npYGpw1OG+N6DD4bfDb47PXhs/EdrT/g4LPvGp/eg8m5RPI6KO+AK4eZdUkr4dxmvd6oXKSqzQ3Motag+3Et73I8UY0+MPtN3REN6aTJZQ6RteCQOJrq6ZRIScinhadvv0VaFh5bZW0VTykkozc3saF1I0nC6nY0+fv8F77IF37HT9Hee5/7slF6JFBF+Ex7Fs22CClia0b1ihtM84x38AIwDHoUViWMRnOm9Q0y5IjyQ8h2WzVKbWylUqsReQSJSTOsG3td5g+++S2e/tiP8c6XGp9/+20+uDlhSVGiqRruR/3nw1A1pKTaMzIiBLwbh/T1d3KOZmwQUWpyOpw9a2LqtV8BUk6YOykpaco4QYCbG8HvF5tA+h7toIjj3rMtZAdWP2zLvK8RYfwqmSkpp7lnKgQeAzDlKWSIotRaESI6O6cpMhis8nh+BOC+fsCv/7k/z89++aeQ+0fU5XDqlDPSANPDyET7molga9S7PIDe7BW7dolGcq0ZYg1MmCahtU4bAqRddB3AiPCK3UA0VzN6xP6ygkF8zYIQVXrU2Ai5YjreGZk5YW+xqgG2lxqgUQd3x8EL0EfGwP686UBKuezbse4BrKIB5gGyQeCSDgeFbvu7fzvaMxPCh6J0Q4DV7Tzz4cuXvP/uezy7u6N9+BG6FZjTRfJ+2FMAvcgO+YSd4q+uQ1gamEVd035oUxFqc9ouT02XAJ04l0j8x4A+aXz2+nX6M9BrlApRvrElO+Z88OqOdx7ZOrYzuO4gf2Fg332ktbBR6RkJIliL5nvKRaoaS74flq6YfIzvOQanDU77YeK0dY2msFNOnOZpcNqx1IPTjnXgh4PTjjE47YdqDE4bnDY4bXDa4LTBaW/CGHw2+Gzw2eCzwWevP5/pp746xhhjjDHGGGOMMcYYY4wxxhhjjDHGGGOMMcYYY4wxxhgfG5+qYNqjWA6ct5W7ZSH1KKqYoymRc8Tj4l2O9qaW7kYpJd7bI4C1N60yq7RWOaK3EDVUAXcltZmbeSbljGlEsKPhWUMWZZpyj4Dv9VphXhb+5J/+0zyWjQb8T//xf5z1/Q/56Fvf4S//xl+JB7KGivATP/U7uX984Fw3Pjqf+Xf/6P+b01tv8z//X/+vmG6fgCjbuvJ//2f/Wc6stPnEkyczqxnNnWyOYqSrmOE0TYi1iKKqRHZDzuQ5YxKRY9lrkJqzlsq2FVo1anVyWtjWlVYqkyp3eab2aPO3f+VXmcyY3nuPb/+Fr9JevGRBeHjxHKYpmoT1QGLr63FI6wRIsJUVdyNnxVvdY/jgxpS0/72GrFEVS9F+rrlFBB1YtwI4XhuuIb2bNOHzEvtgdpHMOZhHU0MI+TEpJI+qkK+ixVH/doXqvf6jYHUjqfDs7jayV1QOebJbNA0092iC5g0157TcMCXhZp6ZHibwmP/Di3v+zX/5X+Oj+5e8/9FH/Py/8Uf5zV/6i9x/+Jy3b55yOs3ko/an0FrDMKZlobZCM8NUMDfUiSZwYazg8OT2JrIL+menZSKaNMZ/F4eCnCf2+sGYHzV0Q4KdDv+qrSEqzPMc9Vh7loW5UWvIQ6eeISNuOMZ8WpinqTfoq6g5SZXT6cTWtiPavJfXUxHyNJHzFPJqBJHelHY37Z41oBJ2K90Pm1fynJmWmdr93MzYziu11kvmTBZokRm0nG4414Z5yLufLidojWTOw7fe5VaVZ3fPeLJMTBI2s517o0yPJpU5pbDVPRtC+8qoHlN2Mx63jSQhPTYXHh/PuPmRsbS5H/i0R/Ot1uM5aq0sy0Jrjdbt7djE85lbd8yjMd5WC45SS6Ou25GpE/4Ye+zNMK8EcjitOfNVbdMwp/Afl2jgp0SW0Ho+972TPYciZpI03j+y4n7gMThtcNoPE6ediPro/4c/8w/zL/zSf6vvv3NdI7m1mMO+dntd5H/qK++wlcLDwwP/zM9/kecf/E3cv3gRtuuXfUw9s0uIRsOHH4RxXta0P+OedrbXdBbtGWxOT7u6dihIvWHshTQuGYJXt3hlNLs8ix/3us6jk+5z4YPS+WpPbpVX33z9qX6dntMll0yzy0NeT9V74lyUVtkboh6ZWvsc7ZK5JVeZdvu79kdsbrRmNDdO/9ENrde0Tj0z7Zpa9/lerdhlbsea+fHHnoG7X2DPENyzElWu5tV9Zs/oi2eKzMfLNeSyLD3bLhLdvB9tYh1aM9yN98+fA+J+g9N+eMbgtMFpg9MGpw1OuyzC4LTXdww+G3w2+Gzw2eCzyyK8rnz2AwSY4iH2ZTwuqfLxNyMQNUQJCdnuKCGDA9zDsHp9RfqDeGukaY6/Wtxt/wdwdyNr9JNp4mjuANolgGH4MOXM++++y/OHe5gmfuLLX+br21/kw29+i+2KcFISfvTLX2ZtlY/u7/n13/oNvnX/wI/85E/yt//hP0x6+x1ElfPjI8s777B9+IKX55X1dIcmwVQgadQrDN84jCUcTtAeDJEkyJSjFqw5yEXi2JrTitGq0WqsQauOtTCOLF1iiPP40XNefu0bvG/G+f334XwmWQOJdTFAruSkSK9T2Rq1NmqttBogbZKQfIj00KSHoQag+CWYI+B2AR/NGWtxPURJUQ4V7STcWjvqqu7/SB+SYEEkobn/mRJpTqQ8911xlEYSg2Z49V53U0ma6E94AUOzDia9fmcHrCQOqnjO3E5BCqVVStm4f+8D1vWMrJWv/6Vf5fG9D0jVmTtZpaQB2HR73w1LpK+lYS0k2S6XA0qiM9cO9Mf+c9j6q34S1482lrvck8O/rHuYdQmku6OiRN1ZpXX5qYgch66OOOFvouQ8UT0aGnoHrQPwocsvldTfm7pUdbfli38SGkfpN5G91rFjrZFSrJsQDSdrayDbQd6tNZrGwW+v8BrNCnc5dWZJiZoSlMJ0OrHkHA3kLDsYAAEAAElEQVQFIe5zWbbOq34QsapG00aiZvNByLs/9gCgImy1BrYcinjf3bHXbPZjj1uvLZpSip9bi2c7ZiJs60oE/KJ5obkedVZbtQtOSsc4t/AlDbI2t4OMLgeJ8Ls85QPkrR+ejuCgcEXCu7X6gdFjfPoYnDY47YeJ0yY981/7iX+HZulYa/dXD+3uYT/7gd89ajrbi98im7OY0b7xlzk1Y1FnnpcDI3eb3znN+rUcv8K0i+nvNrpL9A986hzgn/Cd4fI9wC9H0Y69zuXLzgWj5PhyhEiUepBL7enLRZ1aCtM0MaX+Zb92yX7/Qn15RuUoS3B8WdtLI1z8+fhj5zTb62rTy6NoL+2x+4B3DuhnDndSL/2xf7Hz/QsfcY1mcV4QFfTmMp8dC+yqZMm1v8WXUsVa3U328uXGnVZL1H/vddCb2dUXw14+4fhiE+eZZrFe1lr4YJ6iHrdZlH051vpSFmHfq6DQ4KDajM9Ov8UpnVEu54rBaX/1x+C0wWmD0wanDU4bnPYmjMFng88Gnw0+G3z2+vPZ9w8wdWNPqvFAh9dHRKzWypTzZfF3p3Fox80d9W5cmhEVsmVac6w5zSppWRDAxNlaC+NvhrXKtCy9CRvM08Tc/2E7HjXqA84ps94/8vjyJUwZm0IpkoB57sEGq2wq/OTv+32cnj3jvQ8/4l//3/6TvHv/SLt5i/TkHfjs5yAn9Hzmx/6an+Xrv/AVPvjWu3z+rgBKk4Qk0G0LcO/ZAa1UrNdRTSkFgCZFU8ahN9mKrlmG0NxRF1oTbDMe2yNJEirKtq0BhP0J9XHl4Zvf5jsv77khIeczasZyt0S9U42GahD/WC4p8bie2bYS+yVOK1s38MR0uxzGlZEgguZUC7AXOGqiSgPpRn66vWU7b5zPG4tH1DWlhptRtpVSK2sJxUn8Y37U/oy6sMo0T6SUj7/nZenO7xQaWo1WjOrrYdxmQpUW4N+HmiAoOWVENNRFGN4qmJNwlpwBJwMtZeYlcaOZx7TxrV/7Ddq68jRNPFmWyyGEOKi4RCPIam3HWR7XFfcIZNi+dhpAsvuAqjJNiQgfOSbeCeDiU0bUubQ942Y/+JB6Nka8Lw5JEo3gkGiIp4nWLAJDml8BPVR5+XAOX5xnUkrUXk93XZ2c87HnipBTJqeJOU8kTQH0rSFRbfNS8/jAsx4SE8HFqXU7au0edYnxqJebM1ZqrEtypHdr9NaOtob7+5ek2DShSZmih98RTW9c1voC4iAeB7aclLrVuKYejBSfr637Y6+13EnPPQ4D6oL0tZ5SxswpW43DUXn1wNL2w80+E1EeHx8RTThCc2MrhdaiVu56Xo86tPNp6UTuRH/STqq7f1zVCxYVkiSWZaGWSqs1AmP0DCF35jkd9rTXSnasH4bG+H5jcNrgtB8mTjvJyj/5t/xvDk5LKUegux/yVKXXYW88lgpEPeeXjw/UnpH4uG1sLpzXFTPnM5/9bCQfXH35DjtvbNaC27xx3zOUksB0xWmTJu6mmSlnUkoXTusNhVVf5bS9VLb1dXaLtdoVurt7BaYGTrtBShOqicdtDU5LmWWZu985iPHyxXPeevKEJ7d3uDXWh0fcrWeTXjgtpUTOEzlNnKaPc1rUht9rdotecVpb+7M5Ok/My4lpXo4v5a01zg+PlIcN65h8s9zFfqcUKuerZ9R5Yt02Hs8rOsXz5DxFxq0IrTbW89q94MLHQjQNnueJ9fEBJRoO6xWnPb54wXJzIuUJ0cy5n0kOTrval2WJ7M51XeMZzo9sZeXZs2ecz2dKKWwf4zTN6WOcFl/CtlJ4eDwfnDbJgvs0OO2HZAxOG5w2OG1w2uC0wWlvwhh8Nvhs8Nngs8Fnrz+ffWqAaZ7n4x/eT0mJ+G3sUsoZ7e3RVDUUBDit1R41lCO67W4hTZ0uhJBzptZzl3F5WLyEIuXmdGLK0WxPULImDIl/UG+G7AoHoG0RrRaBzzx9grTGy4cH/s1/7p+jvLinnFfmXv6subPi8PQpnE40fc63zo88/eIXufvCF/n2Ry/57NtvkQ0mSfy9/8j/kD/yf/nn+cr7/yFlOXFOUSpNcJaUqe60rRuihCrk+YsHntzdMiHkbHitNGud1C7R6JvTDWtpFK1UnOdl4+52YZ4z6/Mz+bwiffOe5YmbNHHjgm1nqBs5Zz7z+XcO1cUe6U7zTM6Z8xbS1FoLdl8oW8G90TZhfvvtHj11rFZO00zRxuO6sa1nBEcSkCf8vGElrr2cTohmHu/vaTi2FcSdm5sT4rCdV775rW8CId1dbk7c3p6Yl4nltHD39AmtGaUWHs9nnjy5RVK455N0hzfBq1HOK+eHe1qtNK8RDZaLgmlaJqaUmZcTW21UM0prrAfoJ0onnP2wcZonlmniZlnCKU83IQmcJsq2HZLntEzR9E7Aq2ECpTXef/mSd955h61W3v3wQwBul4W7ZcHNeHt+FkSmznldQSKrI+2l9wiw2mO/AkeWRLN62NAeZY8otlPLRaaaUoAAFhkH2xbPaN4wK3EYc6eUjVIKKpFhsyzLJUIP5Jx7OT4JIOe4bSe7C2juw+EoA4iCdJlls4bskRqceZ7JJOpWIsDFJQqfRJB8WY+thXT85jSjKYVMtGyYz5gK1Yxzf2/ZVpoZn3/rM6jv6i5Y5hNWS8i8uy+6B4iWdaPVhvZDH+zKqpCXag+irT0QFweekJvP80zt2Sii2lVOPYMi5VCt0bMCmmAtPrvLR3fJdmvtsN+Q3B95FBG0sn0DOOwbiGyGfo2pz3+f274f5qHwGjlxP/gYnDY4bXDa4LT95cFpg9Ne9zE4bXDa4LTBafvLg9MGp73OY/DZ4LPBZ4PP9pcHn72+fPapASY/4nexMIdkAo7IbUphXPSFDIMMcJ57dBtv7NKM6z4w++IZhlnURhVAk1Jq6WW1oG4re4+VVgtNBelTl9zlXRKFuu7mGW3Gi299G6mGqvLk9i7uvZ55LGts0jRx97nP8vf8d/5BTp/7Ak/e+Qwvysa/8//4f3I+n5mnib/3536OH//tX+bxvefUl8/xdqa64a1xO0UUXvssG4K50FplrRVToQnQQgnTrCEtiNEcSm0UE9Zaua8b25z48d/1U7z99tv80r/7x5hyOvoC3c6ZOSUyUJNwM9+wnBbSkqEGmTitz6OBCXnKYSAquDdq3XBzkkLZtp4lAm3bSNOEI0zzQqExv/UWN1/8At/8jb/C+f4l9TEA5TZP1LXw4sOPuFnmWPOUeOedtwPccOY5yqXF/TPLzcI8z0zLxLzMlFoxMWSLOR42oIptDXch3cw0K0gRzJQpT6iGtBIi4iqq6JyZ54myrjxfH3heK96zWabbU8ThWyNpELM1Q7aKKrTASkQJ+eAu9e37A84yTTy2QmmNFw+P/C1/98/x8PIlf+qP/bGwy5yoObEJVKKmp/SsACH8RS4yo0NO6RCW44I1x2qAYYCxdv/qH0uKG11RFfZcW5R703QB48hKuNStnVIiq5KTMvcD1h6Cjii6d7+2w89VIKui6eKbTtT2rOYB7mK4QEo5lEsSkfQdbSKq7ZFBkvpCE4e9bVsjAt/nEVLUSm2NLHN/dqHWxmqVx1J52cFbU0JV+fq73+Gdp8845RkvlSUncMGbhyTddxLtNXbNsFKPOsQxR/pr+9y6tNsNNLKdlBTCIwREOfhaLrJbM+//dYmrO6XWV0i11EKSkDMLHNLbnPQorciBsnsdVkc0oQ5Rvc8C/7oNHDsuUXfW/XKoHuPTx+C0wWmD0wanDU4bnPamjMFpg9MGpw1OG5w2OO1NGIPPBp8NPht8Nvjs9eezTw0wmRPlqPYF8n2ZItKFRkTSaiWwfgf6RE4XoIrQ7A70e3ROLsbkl7qp8flMrRGlTqrUskXmQkrUUkgpak/C3k8mJIbJ4XaamFEeXr5gSpk8Lzy5uYlF94bWFdzxpNy+/TZ/93/7H4B3PsPDVviNr36VP/Iv/yt851vf4u7ujj/8X/nb+fyPfIn612z8xlf/ItuL92llRbyxmQNK7ptUXWgeT1lqPRrVUYxmUVtxCmylNePxcaUys7qxqTB//jN84Wd/F1/43Of4+T/2b/FsWphSyNJOUyarIO7knLh5csvp5oROimuPQFvMo2G4VeZlZunNEVsr1LLiJiQVtnU7NrRtBa2VNM1MpxuoG8vbb/PsJ3+SX/3qX+L5y5es948AvPP0Leq68t4HH/Ds7pY5JU69HNtei/F0WsKwcmY5Ldzc3kTt2pxJSTEX1BTtzQh3GyjuIBXUQ1rZpmjOV5Wb+URO6Xhv3YV5WVGZsFa4t8r9KUfpOeALT58AgpaKK8yikXXSAbV1x3TvzQb3A4hw6RE0Zc5WqWY8bit/zR/8g7z86EO+8mf/bDyjRq5HtZh/sUYS0P08VO3qcORHBswF7MPJrLaOh3JEt8OBI3Jt7jR3vBk5TR1c7IjmB0grzevx95wyc87krExzprWLcPy4B4ZZyGe1E1HqBzBNF8I5Ogp1YHIMTfmol3vUKw0ExbyFPajCnj3jxrY+MrFcouEOrVZKKUhWcpoQgVadx3Xj5bbx0Ra5BE/efpubu1u+9vWvMk0L6SbjW2PRBL0OaS1Rd1QFUj9weM+WUbmA6h6Fl+vIPWEPUd81gLq0GgcA4QDufURmS/h2rfG8e93d62j/eTszaUJakKG11okjRblBroH+MjfVKFWIBP55Z+JmF3vaGx7unx3j+4/BaYPTBqcNThucNjjtTRmD0wanDU4bnDY4bXDamzAGnw0+G3w2+Gzw2evPZ58aYGqtIR41/SSnaBrXL2zEpFu/vnfLzXsktBmlRZO7KSlZlW1d+5I6SDQjiyZthSa98ZQL5KhzKAiYRX3PPp9qDal1xzUkSyxpj8DNKXPKE9QN8YjYdZvldln4bErIyweaOW3K5M+8g77VuH1yx8/8nt/Dj3zus8i6sswLv/in/wxbbaQnT/jD/9B/j1/8U3+C977+Wzx855u8/7Byclj686q1zmXCWgrJjSYTKTmlFGqtlL6utTXuzxvP25l3fuRH+Wt/5mf5mb/750jPn/P81389nn+Z4zmAdn5ERJimxHJ7YlkWpnkCDTmkzolpiffW2jBznt0+i6iwArKQs+JuKM76+BB7ZMa2rkhJpAZzXijVsDxht3d8+PKR9z78iIeX9wD8jT/391BevOQ7/8q/xt1bb+OlsG4bv/FbX6OUjdr2J4R5mXj77Wfc3N1SexO+ly9fxu67hxSzg4C78/DiJZiFvHK64XR3y7ZtPLy8ZzoF0F8AquIYDXj+8IKahM//5I/xX/0H/z6+/qu/xrd/9df4kdMzBOHxxQu+9su/gpYC2kgS0WtBIsKbE4nWo8o9M6Zb6dkdNSeLcDqdUBV++md+hv/Z/+J/CcCf/BN/gp//03+KSQLkaws/ySn1pnONzS8u2OSqZnAHyVoDbAWi8WIH1dYuvqZT1Dw1b7Qa6yXCkWmzR8zneWbKE0mjwaNokOnpdKKUclzvGO6A49QgYNFgqauMIdEOiEZfG++44NEY0R1pdhCXiCB7XVczUEc8wGtd1+MgF9dOcZo0iz81aoSCIJLQaSFPYdc/9gf+AD/ye34P7/0z/0eKw1oqN9MCPROjtUuNVXOh1R03ApC3dUVSRnvd5tYu2U10oo/eT8ey9DlG08vrlXP3Xmu4hpRYhWreMyjkqBftHZNaa0ipaGnRU0uUlLQfAF+tL70bS6+YSvSAin5WUV/1konRuuE4fPfejvGJY3Da4LTBaYPTBqcNTntTxuC0wWmD0wanDU4bnPYmjMFng88Gnw0+G3z2+vPZpwaYUo9mpZ4dgPuxOFPOqDitGadlJomg4lGn0IE+SQBPwi6Ti0eIKGYYBqDgh9LKad7CsUVwESTtETYjnTI6JXb0bimuoSLc3N3QSqVuhTRn6rZRrRy7NqnyJGX+xL/+R9kEfM586Wd/mt/+N/+XePr5LwDwLGe2JZrY/flf/Hm+9OWf5ks/+ju4+20/we+eMl//tV/hz/37f5z14X3EGtozJCbo6zSFcyTtkb+oU1pKofS1q+6cvfFeWXHbeDuD3t7w8//eH+dX/8Sf5Ga5ZU4zU0rgoNMCLSKudm+c5ol0mshJwzC5OE/KEbFMcw4S9YhPpmnCrAbI5QkIB63NmFJIVbe18rA+cvviHj56wcuHR6oI0psVyu0NlIqnhExTr5npvSFZ1L3M8970rzfVS4lZFU8ZM4tmazhTUqxUrEZjxb3+qqriGNO0RMTcd7VjRIh3hxAEo/KwPnDz1tt86Xf8dvLphi986bfxbL7lVBUcnr/7Ll//C1894rUJWMsWUsCUSCK4eO8X56R+4DCEx8eNjDBLwkuFh40pLTz98Z8E4K2v/hrP7p7Rnj+nbZViRvGM1YIiUTPzakTjw8hUiEwcoXap5jxP9Mfs7wXvNW7vliWM3B1xI2vPR+j2ZB71PZ/c3qKaMHPEoNVGFSibknNE6XGO2qt4xNX3zAl60z4XufijOOJCFiXnHJlA7kzea5eaR8YEjoqSp4xkowo02SP8kUkz35y63LVvowinaWbOmdIMs9bXR0g6cXszMz+7BeDmrbeRecZywjUBIeHd348Lacrsi9g8sk30irT2w6D1/+1ZTdqVthaYjeOIBmblaYrsieNA2+Wv6xkhCDtoLuxmWRbO57WTsTBNUzRKzKkfcDKIsnNJs6jBCqB5QtNu20EQtVZqbVHz2Jx5zpG9YZE5kVLGXdiTOcb49DE4bXDa4LTBaYPTBqe9KWNw2uC0wWmD0wanDU57E8bgs8Fng88Gnw0+e/357FMDTHTDhIg6hwRP+iYp7u3Y4KjzSF88+dh1us3IHqaDgIgd7PcIcUTiotaldDlrxiVkey5ETcScLk6kimsAXsrpMJZpnvCeBdF6xFoRTpr41q/9Oo+tYlNILJ9+7nOU+3uaw8mNJzlAYF5mnn72s7z9pR8lv/MOn7tdqG6889Wv8v57z2mt108EkjsqUVXyaA6HgFsHrAvxFTMeW+HZl77I3Rc+i5wm3v3mN/grv/Zr/Mav/zo/887nQop7tW672tf37A7CUZo3Okz25eh1GJV+XwM3RBxUUBJ5jn0wNfI0M80LpERz4/x45vm775F/87d49uwZbs6aQy64nR8o25n5dCJNIT0VFaiVUjesXZzK3Wm10WoL+aPu8uKQdwpCOySOPTKu0WDR+zNLlxdWM8Qv8uQwmmiottXC4hZyzMczNCOnjNS4ZoAAIb/UoGW3aPKmHc1SUnSP2u/3cFCcSZU5JSZJvP+1r3FaTuSbqK27Pa6I9X3BsWY0aR3EE6k7LFfT3u3cMbxFs7pmDWeKdxyuE+/CLwTnXCSl7t1f4ADqd95+B7eGlYLZ1i8lYFwOTjiUdnz2WE6BXc0pypGmsMvPxfufKFkSWfQgFG+7/NwPefl+3WoNr/EscWDbI+dCsgDDJEppLWrkBnIHmauSeiaBuuOl9PVyTMK2Xbr9IOSUO847rW79XjFvhZ4l4AfAH4fP/mdIb/WyIP13onKsVkTzwzZTirdvpRxZDKlLqt1jhZMQPtI/m1OG3cb7XA+5qUdt1T1LZM9CiOwgO2wgzCLwRNV3SxnjBxmD0wanDU4bnDY4bXDamzIGpw1OG5w2OG1w2uC0N2EMPht8Nvhs8Nngs9eezz41wFRr7dF8eHl+5GaayX0TS6sIRnIDsw7GhpeK5IQkZcp6rKNbB6z+IEkz1eMaKnJE3gSY5xkVJaXMzenE4+MjDcdFEYGcJ3KX70Vd1DD2YhHTS1Nmzgu3t7dYM7bHAKo5T9wuJ96ZT9xirBjf+JVfZT0/km5O3J9X9OGeZ1lZTif+u/+D/z7zb/tJePY2pAxl4bO//af4g3/bH+Lf/LXfxLd61IEsVnA3NKeoV+ngtZEkDDa6dcWGl7ry4YuX/EP/xD/BdHPDe9/6Fn/s//ov8NE3vsXdsjAn7U3IwhCsVKasLKcFxOOaDljUXS3NjkjiNE3M88zWtohYu7FtG3hjnmbubm9RAmjNjNM0kfNEM+fF45nHl/d88xd+kfpLX+G/+Q//w3z1q1/lW9/8JgDf+aWvoM340S99kduUmYAswpPTiW984+u8ePGcUsphtDiUsnE6nTidTszzTM4BaO7Oy8eHw8inaTqkqOEUDfeQG9ZWyJOS89zXesMsoqxG4+XL5/zGr/0av+32Cd/61nd479vv8cXlDhHhfH+PWOPJ3S0VaA+P0BqaNJzEPBoXducs20qzkGDe3tyQXXBVPv/sbf74v/Kv8vTZW7zzmc8C8PWvfY3vfPObfOb2hKYJFaW1Hax648OrIZr7oSZIoTaj1sgymaYJlcTeNK3ViL7nlIAgNVyY54lWaoBVrzUcjfCE3/vX/nW8+PBD3nv3O7z37rdZloXTMrNMmWWO9XX3OORYPeAhJT2aX2rOaJKjFuohq+144Bb2lzVdgMh2oDJaqb2uaRBrXRspK4hHsz0iUg6QJGoo236AFMHVcG+U1nh8sfHy8QUATz/3WZ7kzPrinvUE62y0vLARB5mUMqmDtJnhtKjlTNQMnTTT3HvGgh2AfD28/6dJECFsuZ+3dqDdgXdZFkQz7nB+/8OwGYBmTKclsM6dcv8YrzWjbYUbSWgWTBydZrQTx+4ze7PElDO7LLWUwl5jum7xs5kdMtY45H7scD3GJ47BaYPTBqcNThucNjjtTRmD0wanDU4bnDY4bXDamzAGnw0+G3w2+Gzw2evPZ/qpr44xxhhjjDHGGGOMMcYYY4wxxhhjjDHGGGOMMcYYY4wxxsfGpyqYRASViKrRI6DOLvOKiODezAqN9zZ2eRuvSLAc7zLKDHSJGopryEXNoibmrsMLuadiLaSwjkdNTAc3v5LHab9HZBNcNFuXyNouVXVNPYruJBXmlLlFka1E9HHdSBbx1bxf1LsWkQY0RBxVIatgEtcC+nV788CqSJdxxnOkkPFJjxKmiHxizqSJm3lhffEcbYU5R3NA2Z9AhJwS85RZlonS1qiDWBtaG7005UXKdiV9Q2N/IkLfG+8REWnvcdMIYsb7syo380T1Rk1Kff6cZ3d3yI99CYDy8gVeGzfzRBYhEfVfJQnLaabWE7WWY/VrKdDmoyEa2WGX1rnvE+9B5HRE4N0B85CKZsXqhlvq0mhi/V2ZcuZmXqgCjy9e8ME3v8H64h7dVlqXofq6knBaLVgzRJWbm5toUpgT3vezB7Kjhq07zYxGwwmJ7M08YeeVM8950Qtj2nllUSWL9MaGilkjaZcWy0UN2YXcPSovtCaHNHFvMNcdrfueRm3dnoGifS+nHDbsfd3C9lJk6CShWaPWDZXIEIgsgfBRM+m20aXf3n2y/8cuS71M+vg5pXTU7LRuY+5dgr1LZiHk597v1+WU3qWkQjzrYZ+H5FZinZMfXpsFsju6hj2dv/MuzxFOoseaN7ejdu0ufd/nLGhI6JP2jAGNuyiIGSJ6PJt1WanQn8VjtuYWmQ/CpdFjl75GbVmltsvzQcjDtdeqxTuGIr2xqB0ZGqUU5jwda94nHrgajh92nvMhP7/OtJFIzojsrWvYG+NTx+C0wWmD0wanhc0OTgv/Gpz2Oo/BaYPTBqcNTgubHZwW/jU47XUdg88Gnw0+G3wWNjv4LPzr9eSz7xtgEg0pKCmc4qh0uoORh3FNeSKp4tpiYlwBvTmu3hv0AR4NpnJKqAsqylof457uWHU0h2yvlIpq7o29LHCq9hqfgGtvjOWA7wYcjhLDaN1APWWSC8kbkmemZWKZEtvDmVIrVlYWd7IoiwhWKm4F8RKAZBXxRlJYktIUsnagt5h7qzWafOXMNE+opmgihx5yseTCNJ2gVibg6enE/Xe+xUkST5eFKQnJL/Kyec7c3p64vT3x0ctKM2PbKqwFT4LbFZldkWvKCqrUErUwk2qs477B7p2Q4zdzTrx1szApFHfWb32Lz/3Ej/Old34KgF/5j/9jqp25u72BWkmyA71y9+SOnJXHh9jHsm6sDw/4coNpw6hYmpBe8rO1Bju5e7ihHFLDhntDBU7zxPOHl7RJcN8b+SVUMrjw7MkdLx8e+Oi9dymlkiSRJVEeNwBaKcwY5byGdDhl3nprOZqpGY16BVWaJ7w5zQvVHJdMFuGd21ve/eADXjx/yfrhi+4f8CQnFlGWnMlJadVwBVcJEtkLjHpIuVOagsjYgd5ozQk4SIfTq0bt0nmejwNXTso8ZTylfgCJWVvTOPioUOrGtj6SkjAlZUpKynoA8w70+2dTSuScj8PZXsf3CuvZZZIiUcd2J+pm1uu47kAZvq1GB/kWklgPIk2i1NYOG23eYh1EcYkaqglFxJg100old3t6+Wt/hfW3vslTSdymiUmVZpWWlKSBIW6KH/aUo5li0kOKLW4kiQaYQXAhVfVmAfESElC8g34/oCbdj3EA0WD07u6WVh3W7QB6B5p5r7GrxEEqRW1ZgdYC15o567Yx3dwEAUqXzHapcsoSBx3VLu/OB9m4XwgnobSr9Rzj+4/BaYPTBqcNThucNjjtTRmD0wanDU4bnDY4bXDamzAGnw0+G3w2+Gzw2evPZ58aYGotnI0dNGV/1IiKJZVe+zNAZErK8uSG1msm7hMoVikNhGlPSEDMeXg4x2a5MeUMQofD6wXvD7O7okvUH7WIMG7WSDkxT0E04uAK0zSDORlFb6M5mgLn82M4HH5E6lyEUoQXrfDWF7+AaEJU+YV/64/yU3/wb+ELP/MzcHcH543t/Q/58Le+hq1n1I3co3uaA0RLcaYUUcjH85lmDhrR6F4ykpwmnp7u+Maf+UW+rsLz5x/Cy5d89gtf5PNvv4OWwt00kzvwPZtm5pQiCioeNSlVeDKfOG8rdHICmKeJU68fG9HnYL5EAo/P1l6vFAerldYarRmlRBR8SUpqjfL8I5YXb0EOQ7xRZVXFy8qUM94atTVqWTmdZpZl7pFvOOsjthWsVB62woMIN2VDp4zmxDRncr6YnzrhLC54K32tDKWhOK0U1od7AE53t+iUwYybZWGZZ9556y1qFbJ2QjsHuZemFGB7PB/A+eTJLaLRoLG0ylq2IxOD2sLSFB4eN8q24RZkzN0dtR1HHVLWsL15Ik8Jkch2KdZQCwCT/dAjgDnbVnH3XjNWyWlGloxqPupbQpwrNOsBxFNSsmo4vUXzwQtgQm3Gb/2lX+bx/Ihb4/b2hHmlVCfl5SCMSN6ox31UhWVZjiwI1YjI76/vNT3pQKhZweGjlx9F00fh8M04ZCXKtrHXUNUOrG6QJDFNeecYzCPy3hyQAGvYs2IqJxU+tzc23Dbq45nP3j0Jw20VmSaqNVrth6L98GfGeSvc5oSkBCRMKrUaW6nxjJ20APIU2U3N285W7I063Y1a/aibutds/vznv8C7777Hhx9+FMRoEdmHOIz2tIogxBIYZwKlrCCJeZ5pLRpx7sPN4+BRo3GnW6O1yrb1zBBVUq8BvdeuLdeMPMb3HYPTBqcNThucNjhtcNqbMganDU4bnDY4bXDa4LQ3YQw+G3w2+Gzw2eCz15/PPjXAFDKzMKiIpvkB9iGfikV11YiIulO9oUTkc3f6vYme9QheyF+lL15E/q7lcbXWiFwmISXBrEvBBFLK1FYp3RBrFU4szDe31LZFxE1SSAYloSnhHVBqKZwfH1luTjiOlRLAo0KeMnJ7i0mXcVrj+de+ztf+zJ/h5Te/zZPPfZ52Xnl47z2+86u/Stobtkkns7bRrIb8MSluTqmGNCPvkrkeBRSEp7c3vHzvPWpr3N+/5PNP3+ZGM5TGMi+cTgu5y9q8GU1CkrbVQp6maCCWMswhe3vcoqGglUbbCmIe/0nsVTRO1FeiogE4lVJCqisSMk4RoT088vDwgH372zx2gM2ATDNeI7NCYnOPLAZV5bQs8bMBW+Ph/h6L1A6YErOcyAK2tgN8ICLaiIaP9Mh57LkwTROwgyNMtYIKbd1IaSKnHFJK7ZJIB9v7qCUjq/Kwnlmmmfn2luX2FFkE7ngVqhuyA585OSnuE0kLpmG7SZybZQkw7gCrSdGs5Kk37pMAf6+ROWBm6KR9x2EXdUYSQBd6S0iIdxnoPnaA3/9LquSkF7+5IoV5nrm7uWFdz+DGPE/McxyqNCnaswD2Rn/TDmwtGhoe2QAHuDl7vokg/UDgSMqBY63RagFCZLqDlWqAu/gFyfcDiLlFllC7HABrs2hKiZByHEToUnH1/uduH5rw/pbIAojDoLgjPYNIdqB3xwzaDr4eEf5mkQWACt4/u9stCLj0pot2uU+HpUtmUqxTKVtcC47nMwM0yEvEjkyR/Xn3Jnna9zZs6bLnsQf07JLSMzFax8zwteO9PSskbODSEHGMTx+D0wanDU4bnDY4bXDamzIGpw1OG5w2OG1w2uC0N2EMPht8Nvhs8Nngs9efzz41wHRckF4z0S+yKFUFt4gSumPNqAJulSllRHcg55Cu1nU9rnsYrDgX7WSPvLUWD5C8y7M4DHXKidYKrV0co03h6KVcgM3N0cwR0YRYwPO2cvPkJqR0teG1kOZ8ZEScazkW8cX77/PtrfL8N7/OZ77wI9Rt4+HFC97/5jdY3NjrUwJUa1QzTqcZl6gJW91RbwgJ9YvRakrcpRPri3tKLbR15XNP3wrZZ2vM0y3zMh9A3x4e41pAqfUA+iSK9oyJxxb7YqVRU+l1YQP4kiZKqbiAiiJdBOsYtRmlxFpOU+LmJtZmXQvbttE++BB5GUB/++QJaco0a7Ta2EPCe03PlIRlj3Qa+Knx4Qcf0tzRpBERnSdQ8OZM03QAfUSwBfGwtTgsgKgyTRO1lQMMWzOkNlqtTGkma2JKE5l+8GiGp27aalE3sxY8Z6YpMy1zEIoZGSe1BgdZxSHDCXmv5wAHxTnNU9hNBxRUEBU0SYSy97qquwLXHbiABar0BBl67DtsSCTucQUsHwf5qGna63L2Op47wU7zzNO7Wx4e71EVpimRe/ZHkNElCh7XTgFAOO4tgL4jovaaon61tzGMSbpc3bu00+wgtHivMk29nqk7uJG6DFVEEZNIkdiBvmdtiPTDX7dZenbODtK7fWTksLMDNPtB0bnUUfVOEjVQEzGnNT8i92Ixt2OtU2QxefdR74Vsc04HFlwOR/Hfw8MjrdYDl9y8HwC0E0V/v8UBYD8U7ddLOVFLjetxDeAd6GvrGR8tfCT49tiP/TqtvVrPeIzvPwanDU4bnDY4bXDa4LQ3ZQxOG5w2OG1w2uC0wWlvwhh8Nvhs8Nngs8FnrzeffWqAaZ5ncMfNwzn9svIiIRlt20YVQTRqEiYNmaabQI/m1hryvNwzDiLimkieOthcQHsngJyjluH12GsDttZCrkrUUzSrrOtj3zztNTRj7q1Wth5lb60gCo+Pj9zc3DBPEy8/es47n3mGThmzxqK7HQoVZ33/fT781nf42i9+heV0Yp4yp2XmZg5i2VrU3NSkZJ1wSTSHhuNJKK3hbHhrzGk6DGPOUZf1ZjqR5huSxXaLwpQim8Ho0ULt4OIW0UQJq6zbRl7maMa3A1tzysPGtGSkgWZlmhO2FVprrK1dwFiVaZ6hz+nm5obT7Q2aM5oy+bTwsG1snaCXaWKaJ+Zp5rHes64rpRRyStjUMNF9y4+9EhGWeSbPE8UMf1zRoqQpMW+N1GvJTrmxS6KzhAxPk5Bz1Pk8r48RKYcABAsQWVKOjBFgzjO1FJrDcoqMhtWF5wZTykwdOK1bsXe7nTpxho00VI2UEu3UyCmazG1bY5l7Ror1urNXe3QZRrOCmCAkrPkRnY/rW4+mx7tTz4KITI7LVZZlCVKaJrCGHtkVMOUUstdOOCowzZm0KSlFU7Zmpa9hNNtr5eJjKQuaICEEd4ZdqYLTMI8I9r6P4Y9zj3zH69M8UUqJOsN93i6xfnOvXdzMQBJZc0xShSkfOM9mD4CQNJMkH4fKrInWo/n7kiQNqXZtvUGhO63V2DvZM5Vq4JUqOk00d7w2zAulteAYUWq9yHD7TMKlwvp6VkBjb24Xkf3Wf457f/vb30YkoRo2VbaGeBxMdszdAV086uK6Xurf0lpkxsilJmttDeuH2pQT1J7ps63s+tdpumTfXGfxjPGDjcFpg9MGpw1OG5w2OO1NGYPTBqcNThucNjhtcNqbMAafDT4bfDb4bPDZ689nnxpgmqYUTbz8Ysz70pj1SCLfHcPSPvFdWrjLU91CCoc727aFlLVH+vcosfQVL1uh1UaRkCqm/RoekbOUAxQSEkBjLWop7uDuTuqR+L3mppsdP5sZVisJoZxXsEaeZ6o1uliVLI1pyTDPPF2i6VVkAziTgGiKSD5QXLDWcJOQN4oiGg7dLCK20qP9SZXsEW2fp4llmZhzgJ8kpWmjWqW0PebsURexS0rXdY21NAHV7owXWau58XJ9JE85ItBJXqlNW1M7xJNpykjSIMg89Uht5u7uLupISqwvgLVC2wzp9UaTKq1LmFttqF/kybVU7u/vubm5Ybm9ZT6dqGrcPzzQ+tzP58dLJsa2dYJRoj+dkJIyT+kg9j1iva0r2SZOy4I0w71iVYKQetS3lHjW9fzIej5zmhfmacLcWM8brnIQ3HxaLs6j4H6E+9nWlVqVOQcYmjvJOgBELB5SQnNEe4t3EmiOWiJPM32pI7NDHFfHNTJwAkSnuHUHc4Bpnsg5M08ZIWoFTzkjbiFRNqPW8C9RWNczKUFKAcyi+6HMAYtsh77/Zi0oSh3NiqQA+ZSUZg2UI/tGBEwazbu01SKDpnijtkIp2+W9Hg8abh1ZFiFH7cBc4Mndk6g/DMylUGqLg6CFnalqZPiIh4y1azmbO80g5YlaawffLmnv96/tkgER4Bny1Npq/FYE0dSfI2S6+3qo9qaEHdSbRZZAZGTokbnkxxpaZE/VaBwqEhklTmQ4XIg8IQiOYSKBjbvMVjWAXnZCiWyEnHPPJLB+MLzIy21PZTjmcskyGOP7j8Fpg9MGpw1OG5w2OO1NGYPTBqcNThucNjhtcNqbMAafDT4bfDb4bPDZ689n37dE3hEldA/p4H5Di1qYIVntBh+rGRFxkYMUdrmo9Ou4O24RrdUOVNeL6GYdNGNx9oi0IIehRT3HiFwKBNDky1yLNYSZvT4jhER0jw66RYaEeER3tRmTxL33Z0k92qooc1JSzsczoI6b4C2u1/r6e4QC2etcXm+B9aifuEN2aA1JiQxMvV4lAqREq+GMQEQhd1CBAP0q0QiOMKZ5DsIxIuug9gZymgSRILs9i+ESQZVjD0MGGZJJdz9+F/Pa97E3M+wGGA7rtFqopeDV0ClAa9s21nVlXhZOpxPzzQ2WYWuNUrZoducc63M+r6RU0ax9zuFkKtPlkCEX2/NmzDnTKlADTM73515nV9kssh+2daXVxjRN5Dz1w0CPCEvY0jLNvQZmRKP3aO80Z9xaHCzUqGY96B7ziN0VJKdotijgFvZKAzEl73LcDvQp9Uh4B7hY52g0twMdhJw0pUROStJoJjnlHAcSjYPNLkcFKGUjZz2uF7VCW4BHa0cCyu5fSEheQ0a670PYTvhMJ3evuBu1+yNOPwg5tRXWbT1qw+aUuLu9o1oj9UaDZlF7VERpHrWC8zzhThC79ch6x4Xwr4iaXx8wvYNnyEA7hnSf24mkXNWTVY36w3g0mIyspI4h3oF6P6j2fUlJj2ub7XVM05W/XI9Y32hU2eLwI9HAz67erhrPYWjPFFIQwS0OfH4F4vtcQobrx8/7/T8+DzvWJjKXxvgBx+C0wWmD0wanDU772Bic9tqOwWmD0wanDU4bnPaxMTjttRyDzwafDT4bfDb47GPj9eKzTw0wlVJIGkBhzZimRNqB0v1oALb/3XDEJCKUKbFMr15+ypnz+UxtlSQcUbKcM2uXQ+5OtpNDLFYsRsjTSoBWn8e6nrFmbNuKeIrGa6psW+kgFdFpAGkBpHu80V0wA3rtQ4DWZbXQwdFALCSwOYWMNqVEtYK1ehhLK5VaGq5h4CLE1qqQVBEXWolIt4qg3VCwhrqRBM7rI9UbTz7zFs2114KMOrNoAGvKQkJi/dzCcFtjPXcZp0ogkDneGs0EIyKl1/U1d/lkKZfarxAA7X3NE0S91b7Wy+kUdVK3FVWYl4n5NPHRBx/y8PgQpDmHRPT8cObh8YHPfv7zzKcTy2lBlplKZJFYa0yH4zkv7j9gjz6nOXX5Zmave6lJyd7tyQwFTvPCuazUDpr3L54TVVSF83nrb433TsvMfFpYlgXPmXPdqK1Sq/P06ROWZe6OFcBQSiFnxeYE6pgJ9dzgELruBxg5Mh/id4k03wCKupLSNdDLkYHTSn3FcZOGtPcC3lFXNSXl5jQz5ylqs7baTxRydWhpbFtlWe5C7StB1K3FgaoWjnXc9zlpNKxDoFmXcXo03RNN5Cl8ZisRiW+lkjW63VltIMZaVp7fP+fFywfcnbu7O569/TblXDBNZFWaG6f5FBkA5YE0ZdIc5C05oe5MV/WQzVp3x36o6pJZIQ5dVkuAauoReQKbrFXWbTvAUVVjr91Zty0Oixr1dr1nGOxy92gWmA+M8b5XrUVDyI8fRlvrWUtEhlAtjeVmQTRRmxHY3kGYng3QD8OqgqOIRMZOEBWHPR2HGb8QkXai5Mpe9qu35tTWvqtZ4xifPAanDU4bnDY4bXDa4LQ3ZQxOG5w2OG1w2uC0wWlvwhh8Nvhs8Nngs8Fnrz+ffV8FU0T3YgIpXYBe3XpmAZeIn+yy0i5X7DffF2qXriZNuNWQILrRLGoa7hs65YllWVAVrFXozb1Cqjb3iGbMI2vCXLBaQ4bawcussm5nWsrM88TFWjK1FmZV0jRdFqAFEczajRDI00Qp9WgWJ5NGNkJOyFoR0SsySghG2cIIvK8TGmCWVI7aleaOSzT52mtwbnXrUe2FaZkwbZgHQLgpYgbW2B4mvDTcegNEUUrduH94iLXt9UqfPH2KJInoNn6J+rsxL5GZsc9jz0iIaHNkWFiL2p/reaUe+xbXq3ULOa5KfE6F0+1N1Cx9DMKurZGmibu7O3SeqO7U9YykzHyKWp63eUYlwO/+xQvW8xn3xl2+O+romkUTOu0HAgBv0YCsbiXWVkIieJpnSommfCnF+k/zzLPTM+bTwrzMzMsJU4VH8LNxPp8Ppzxsnp5lkISUFUmZnHIHz0Zrlwi0ux9SZISeMaNAwkVRyYftuRtWG9YaVnu0vfvV7e2p53V0EsF7/z7v8smK9INB645dy3a8N4nHwUYFkciQmTSi4HNOQUb9+dKUae3SyG0ngL1Ose/R/d3/d99HA4gtZJaaEi7Chy+e9wsrMmXaeYusCUCmjM4TmjJVYLq94XRzA+48acb9y3se7u/x0jr5aGTC2IZ3+4cLvVYE0bh/yoltu+xf25sgdrDeeuPM0ozkjrR9v9oFXbkcLrdayejxzG5EjVMxWvVujy0wobVovEhkNtRWUfc4PDY74vrVCnuywE4STshwkSDY1udyPEdrPYuhr33OxxzXtVywTLVLgQPXxvjBxuC0wWmD0wanDU4bnPamjMFpg9MGpw1OG5w2OO1NGIPPBp8NPht8Nvjs9eazH5zt5CJBxXd528eGX97LJ71+XOtjH/MeRbv6nfZofETWHNgrlH58WnI1FzmcL67r7PUOP3ma17LNeOF4xuPPuP/ufteTvMzv1Qdzj0jvld++8mzH1a7Wyd27k8px3d3xRYQIT3anfOUacY8j6mgXqalob/LF7swBNohEtkPP2LgsoF9m+Moa9t93CaHHC7j3PdnX0fTyOS4R3WPOHvsoEuSQUgB4LMMONh729fFN6++Ja1+t89XK75F1uoOwH1Dy3nSuyxfl45Ldj4+rTev3lS55ti6Vhj0Cff2ZbjdI3/6LHV3mGs+3//6QvX7PuewfexWY4npX17xaByEAUq6eM+ya474ff9TjR7/Y1vU4rvEJU9ttz7oE9vp6st+42/puE6/Y6PG/ILbjed2JtJjrqXYJ5yfZ5yvL5R/DAjrYvvJUV9d+FWyvr/Pq36+vzStbcLz2CZ/16x+u3O27Z77jR5+hxDz3+70yn30OXGPgGD/wGJzG4LTBaYPTvnsMTmNw2us4BqcxOG1w2uC07x6D0xic9rqNwWcMPht8Nvjsu8fgM37o+Uw//eXLpHbMun6QVx7g6r2f9AT7S5/oXPLdr18DzSc75PUqX373Cm7tv/3Ypn/cyeX4/+9lyt/repef5eoh5BOu8V0z/ZjjHs/zcYemg+Y+u2vn3dfq4zf7hDU4nvAT1veV91wR53cZeZ/LZY7fvf5HjUn/+Cv79fe/X6/f5bVrcvjEC1z/6mr9P/6g0qP02slzB5XvulQHlE9aj+s1viblV260X+PV3/St/CQ3fuVd/dqf8I5P+J1f/f8Vslzm9wmfl4/9/RPH/vzurxDqq29xvmuZRY7as9IPD580aeFVf7vY9T7H7z257ya1y7s/9Sz5vRfwu3zXr37/qZ9/9QaXSVy/95WfLz+8ermr5/8YNn38s1db/LEHluMwPMZ/wjE47bvvPDjtez3V4LSr+Q5OY3DaPufBaT88Y3Dad995cNr3eqrBaVfzHZzG4LR9zoPTfjjG4LPvvvPgs+/1VIPPruY7+IzBZ/uc/yrz2acqmE6nE9ai2VaSgPr9ppoUq4VWC9O0kDShGjIrN4OUDvnp3lTPcWhhHGjUGkSi5uj+XjenPJ5Z1zUitm4fA7bvfmpVQedEa42syjxlNjfEnVoq514nFAQX5emTt5hSjpqKFs+mvZFc1suSeAO3w/6ptUQTu9aopWLVDslbThO6TMyLUN26jM0xWt9MwhE85rHWlSc3M67OYz2TVRGD5Il8nml1OzbfvJJSZppmbm9v0RuQXsP1fD5TazkksyFJdPKU9vKS8ZqGNBU3at3Yo9txDzuc8XSaKaWyrhuP9w8I9L0nZJYSDtG6nNXc4vOitFb54IMPwnbmE0/unqApUd2oCNMyUzFai1qgD48PByCKCPO8kFR4+9lbaI7sg+bW7eSSvSIScsCPPnqO1Zh7Vj2kn6LK5z7/hfBBVfI8cXt3h+NstfLy5QtaLYgbeOvNAmP9aotGgDc3N9ycZh4fHyklZMltjsaO21YOCzSTLhtOqGSSJLZaaRbNEqcUTRCVIB/TBCk+O+VLI8N5ztFQrkfl0575oHtTt4oD85RQcVRi32J/w9diHXudUGtICt87neZXDxYYbk4tjVK3ww8lKbe3t6RrwHbBWxB4cyNJ4uZ0x/l85vbmCSnP3K8VcG5ubrukMyTUKonqjeV0w3Jzw0cvHjiXldbnvdUVw8nzhGqibBu1GSlNTDlhZpQStVBVEkmj+aNrr43csWW349wlnTtW1Fp788idXCwa05V6rF1HBbwa5hsiS2/MmI/3uPsrtVBVleV0w/m80rbCk2dP2UrITlFBvWdKiDLNGeTSAPM4KHtI+c38qhZqPKOZkTT8t7WGiEdmSlKW25tuH4nldOL5y3tKDensGN9/DE4bnDY4bXDa4LTBaW/KGJw2OG1w2uC0wWmD096EMfhs8Nngs8Fng89efz771ACTqGItHFpEUJe9BCnWJ6XTdAXEvXZlujSugnggETma7xlOWXtTOwmZ3l4n9dVIOlF3UbposzUcepOwPcK7G2+/T85BOvtnzCjdMXPOLMvMkjOqCUSRNKHSZY6uge59VIuao3g04ROTYx10jz7ueQi5b7AqvhVoIRlNIngzSg3n2g1LUjQiE8IxXcDdwAzfCWSX2VpsNEnI0wQWr4vRAd2Q2p8xJeZlYpoy1RpmYeCxJtoblUVhTHdBmsV6ehCZaNR7FDxkrnbZE9n/c4uGgTmBTvF+V1ptIaklAHorBZeo/drcSDitE6CLU9pOgvE7eiaBdOI1PEA06dHkEKCsK9UK27qSNEVtXYHzuuIO0zT1BovRpE9zpuE0M0qvJZpzrEMpK61Vtq0vdWsh6+u2L+ixdkoAdpKdVB0Qtq1CdVAnzQquAUOitFbYDyVZNBok9si4SG+8qBLv308VhAxWZa9nGq+5Od68V1qVaMZINHW82Hz8PoApJJ2t1FeizoJAA28OLuQ0hRS3N+Qzd7y2Po+QJ2MghMzXPUhLFaY884XPfh6IBn7lXKJBpYYMeUpByrUU5nmOOp/9sNasIgp5ypiXWFwHTUI0JnTMop6yeLTJk37QQARUUel1Un3PItoPpB7+RJ8/FvOHvv+XOsZ7+809C6Z24ltOSxzo8PC7fjUrha3j1ZQzJpBNqOLd567qyLr2es9hd0kS3rExrr17FUcmhuPdj8KOatmONUtpAhE0JW6f3PGwrtRel3WM7z8Gpw1OG5w2OG1w2uC0N2UMThucNjhtcNrgtMFpb8IYfDb4bPDZ4LPBZ68/n31qgMn6gpkZTQSTy2vu2utZ5ohq74DrlzqWl0i4hVFOOSLYQOuLIXyspqRf6va526XpVPwigOHqfuFzu3MY7oZ730TzvqB7szglJyV3onAi6rwvtbkje+MriKZZOxC5YUm7NC8avIko1r3NupPRr7dTgIpSrUaU+PKIADScJCApNtRaEJO3dlUTkm7J8fiaMkZkd4hCmhLJ2uHEKSlTzuScsBoGvA/VMJBdskl3nGhqaB2wd+f2q5l2oBcJUuyZIJISkhNmijePBmjTblIRyfWrK9luS31P973FHSPMWiSa7+lV9UaRANvcG4pZarQaxj31hoeiylYKKWdSzrBnnoggKUijYbEXOEljHeY5iMrL1bOK96i+owiIxgHDY4+PHZbDRQOjAe9RdOnEtTv9bveREeGE/NiPNe2Mellzd8QFlQDrIxLusf7Kxc8UiSyKHfz9surWnOrONOWrT3jsd+tZGCkHyPfGkGZ7hghBLtalnX2aJkEWsS/w9O5pf69RS2NCURfEo0mft0bdCiml8Pt+7Wp7BoNACluOA9dlLy6S0Q7cvjeh63JmiYPT8fr+7gPo/WJT+yHVGpBeyQ4IM+wy69ZAhAXpNuvIlPo+NrwfilRTNBzEI4PHL3gRR4Cw+WqN1gkg8qnifq21aMjYm5M6scjebStMT+JwtNc6Tt02VJmXE9Jtc28AOcanj8Fpg9OuZzw4bXDa4LTBaa/zGJw2OO16xoPTBqcNThuc9rqOwWeDz65nPPhs8Nngs9eTzz41wHSRekUUtmlEEvex10JE6GDIIXMzN+oO4P3PUkp38HZsoLvRamPJ03FNSXHvfWPXdWWeJubcicKvImctoqYppQM4amshr+z3qDWikVPOAWS1RBRYUn89ItohD+SYW0qJttVDyudERLxa5eZ0wjDqnnnQycBNqa3gQM5x/VknnMR6yE+dddu4OS0BFiodRMLQ1vOZZcqkbpnmEeGuIkzTRKmF1iqzJtI0kdygq3ElCTlnUkpki4i/pMR5XRETJpx2/YxZsdIdRvfYZewLbWfUeO80T0hSkiUqTpMA6N2RRIWnz57EnKvRtoigSkokCROvrVJrw1vDWg3ididdRbqfP3/O07eeMs0Ty7KwrmdKfx/Efp9Op1jbPAfBemQmaM5oShc7vSIlaBjW19URhLu7kF1aCxuZ5xkcbKuklBBNNPOQO1tEhtllji0i+0knvGdpCErOM6UWtrJycxPSwjioQNKQSycVptyly2aUbe3R/vCfKSlJwlb3z+UUctw4ETRS3xeFyApAcQerTmkF8zgATFPGvZONO7VulFIws5CRo0hPEfJqfcv73xuRTWRQrKFiqHpIO3vCy6H6dGGSzORCckUNZJpo1amtRAbMfsjyiKR7B+KUE0+e3EFzvvONb9GqkTQf67cfEMCpFpkommIFvPvfkY3UbXLPYMpZ2bYtCFK0A3rDbMeg/hkStVlkfDTj8fGxS0XlkIK2FhkBW6skd1LKIEo1o7RK/diBznom09ZKEGQ/pUaGz8U+dyMx9y73b0dmQ8iVoZlHJooE+DtCbZGxs5aNMb7/GJw2OG1w2uC0wWmD096UMThtcNrgtMFpg9MGp70JY/DZ4LPBZ4PPBp+9/nz26QomM1JSRCes1qM243eNDnIikPPcI8KXl0X1iIipSMglc+rRyACY3WghwCaAXsCF88MjvjT0dGJZlthg2W99AYBYt4g4B3iCauKdd94Bek1Pa9RtRVJDdSJPc4CWVdZOCPtIOZPzxJwXaiuRadAN88MPPoi577JFSQgJ1A7LiYhnl0tqgnQhNysljI8uSUQpJcjtbl5IEg6IOLRwCFc7ZGkOrLUgAq3VkI12w6nWKNt6RO6tVh4f7lmWhdubG7ayHRFLa+0gwjlncg4gsxbAuUdZIYx83//WKsWMhrGcTjTCIW5vbwE4P66cH+95OJ/Jp4U0zSA980QVt/hzv7guM8kdb437+xe0Vkmm5Nzr6Lof80waGR3LsmDVDoc4r2fmZWaaJ1p3N3VDrPVIf9TL1NlprVKtoii1lLi2CE9ubzsYOIqylpVaDDYjuQSwdTMJGXfUQXWJ2P5aQopZa1wz58Qu417ShEh3/nUDb4c9LdPM1dmEpN2HjqyA/qMQssfaDru3njHTSg0AcKe1gntDRbDW8MkOcq+1sa5RA/V0c2KaBJUgx2YhN76O4Is4KfXmbsThbvN2SGf3+q2CkDQzk5hyZsoTaTpRvNHcmOcJ7wcJcLayYdIzI0S4ubsjobz88HnInh126XitlVIqKU/shw+rlYYhqkxTZp77awIppzio1karFWsr1hrNLr65g/ce+Xf3Q4IuIqybRUaFQ+2RenOPQ0PttU9xNJT5qCqTShxk+lM2N0y8Z11F9sOeKaApRebRIVEVhC4z1gytUVuLNevzmucTIpBS5nxeeVzP1GZI+lQoH6OPwWmD0wanDU4bnDY47U0Zg9MGpw1OG5w2OG1w2pswBp8NPht8Nvhs8Nnrz2ffV8E09cl9fJh7AE4HjCOi1xfs+u+isZF75EyAvfHcbtwqF2niLll0C+PZm5AdoH7RCF7VM7zUXBWNOUuET49GflFPMpq9heQ09UVTHKW0iHzvD2TipNxJw3ptV6E/s4ELIhfDof9uz6yorZFUuhQt6lUiPToL1K1QgKyKao4IYjcsN8fVwbsjSdTmdHrmggYJehSpJB1AH4C8ruuxb2aG9bqjSYS6lXgGd9Ztxc3JSaGDUhKhiRwgv+9jSHl73cidRK7keULUtQTQLYCzlA1yhmSoRbaAIpBAydA/p8nJgNfKw4McEsnd8Pf939fa+++bR8S19Tq5XO0/3QZDEuh9eYJ43QW6fe1AAyFDPbIpWkTMvTZa6XVenWMe/YaoJiwgO5oQ4ngn+It9C3m6NFNzM7z6hehULuDOJbjs3f0jA8cxpEf2L/JFEetSxXLsRdSztZiHRuQ6rt3lwrVRW+2/72AjkRkg/b/LM+4Hmqgv60Avcgsd7NnXvvu5WSdBj/lrn7cczxjrvfvoHq33HinPOdGaRZbPjikSkmqXAOLwiRqSaVXSdPGBIKjux2ZXB8M9kv+qtNOhZwJB6ti22zhcpPXxmHu2TfimeMhF/XjtSo7qfvjPfiOXK8x0jntcA/ph590H+1KzLEu3uaixatbrC/srdxnje4zBaYPTBqcNTot7DE7bx+C013cMThucNjhtcFrcY3DaPganvZ5j8Nngs8Fng8/iHoPP9vE68tmnBpjO6xmZZ6Zew7ArxRCgipFIGKnXi5QDZKRvejQ84wDfPeKIBPhbs8guMODqvXsk18xQDzndLrMrpWB+kTUuy3I0Ydu27eIoqpjVI8octw13xAyxBMlAnGnOqAnVN7a1HCDuBs0biV4HURW6FG25uQmH6GtV6xURSW/sVhtLr3Mq5N5ELDZUSWznDVrU29RZoslXJ6IkDZfe8Gw3dtWQukmAS0o5ZLd93bp1ULfCvVs0netEmCRkjuKwngPcmzVePH9BnjKn08LNzSmAVYSs+zP3vaUbZAPbtYkeYGy1NyUUiTqkQJ5KSARLgbTFnPHYZwnIyD2bxN2h1ZCraifv8CTEYUoJk4tzejfsOAz4AfQ5pwBxiaaDiIRketugSZc0dzlyzrglzJx5mo/nxcJJcSjrRt0KbauUdUVyjki0XUgjqSIp0eje61GvNKVMmtMx3x34c0q4a0gR95qrHvH1lFIQbl9r96ghLCqRMdFa+FCvY2pHJBwgEUrNgJzwIwjXuBzUdoIP8g+tqfZ6shbTj/n0ZzTrEvSUeibNjqOC9/8Ot5aoN7u1RpVKzS3q8k4JEeXh/oH5ZumR87CdgEEDzbx4/pxWKlstLNMM5pQjeySyeu7vH2MeqiySgwhai/q6V5BarURjv2a0asfeg3csM1onqD0Lx3uDv+agV+sUh7aYh0rI4p0gWzBIQd7W7ab2AxBw1JP2vvjNHO+t8xyordEOkO7kJ12Of0UuzQ1zuLm9O97qAKqYO1t5NQtqjE8eg9MGpw1OG5w2OG1w2psyBqcNThucNjhtcNrgtDdhDD4bfDb4bPDZ4LPXn88+NcDkhMzKDgwJZz6WUwVSxAk1gsPskXC7BPjD6FSp1rCrzdhr/oGjcziOIGiW/hnBqx+AtUsV3f0AsPvygKbEMi8h4yLkhKVL8XA/aoqKJsL/BdUMqqzbxnKzoDkivbp592On1Y2yKlQj58wyz7EqtVLrdoBizKlSuwO2bsCq8gpJybGQ8TmXjKDQYNtqj7cKL1/eM+d0ZFeYgJihrTGdlp5Z4EzL3JvdNdq+0T262bbKtjVUleV0ioaIzWmlHVHiqAtbSfNM3qWxFhkWOXeyvIpQPp7PIVdOSj4t6KQka3jrNTVVo5YoYNWY5jPbdj6a3E22kOYuVe0ZHtINu1VnXVesy0bNpsgk2EFGFJdroDestshS6c0Vnz17C1WlrBtvv/U20g8e3ip4yKETyrqtJFUSirVG1kvruu289fqfUYO3lkqthbJWkgGiTF0WmFUwUUoHCCPmquJMU2KZM2WNIrXCnrHCERXPOYdPuQfCXvue92yB2p+5xTrnHI0NrUHQJRHJdkdzRnq5YtWopZqTsiwTqr1uqHvPMlFSihqpkWkRoCSdHC/RbRAiEyYAq9tDSlG/1zkOAi5CA7ZOQOLGfD4z+4wmpVQjNd8V2+SUA/TcWR/PRwbDfDqhKInEcoo3397cMk8zDw9f73YRvmVrwwSSOb5d1tBwti3qg0Zt4PCtySdKCd+wLj9V0uWgJHsdZidNqSdMXCikeRBHzhmRwIrDHlvUay1lO/xGU+CWeSPNc7zPhYZTWwB/51R8r0er0okmZhd1XyPr5uHhHogmgLVVSms0h1etZ4zvNQanDU4bnDY4bXDa4LQ3ZQxOG5w2OG1w2uC0wWlvwhh8Nvhs8Nngs8Fnrz+ffd+CsMdDuNFsD1+BpYjCusMuy3LhyCh4Rd569fNu5Hv9P0TYI9LeDc09oqoCR7Or8Ae7REG7Ie71Gd1DMne5b0hDd6Pvd8cwpjwhXT5aakFrRZQjEyDkbAGKm61YMuDEnCe0S0+3bSOnaHYXG5rJOC6O1ZAM7nU8VfVKDgmQgswiB4NWQ3KWkpKkRzNrkCyAZIU9ylkaiKECrVZ6UPbq2rHprYYMUTWkwaVFXcxpmthzG5BY5ynn+H2PKAscckCVAOVjrVXjfRAZERI1biVQhG1d943m5nSKmqb9umm/toeUsnKxk7JtUCtWSoCNNUr/+eY2MhyuMxq8e0dOGdduL5ICmLfC/YsXh7211qjW0KRMOQMhAzYs6od2xwbw1vejGaUUaqm0djlgyLVt932l0cEa6Ot97IbQ0VLQnLDaopaptS78vLhHNFq7IjOzA3SUfjjwPVtFyDlINeVE7rVmPw700Qwzas7uJAMcxOzubLWiZuG3qTfs25Ftl5A7HbC6fNgjs8PcsCufc6Dt4NhagGEtSBMMp9Q9ch61bffMkdSb4AW+7M3kvPtpzKe2Bgg5T1EPWeNhVegy8IsPCBxEKj0LRZAOUv2txyGNY20cP7JJ0pQR2bOWOnn1Q2uecmQimEdmTD8URebFlXS075d5r8V82KTRfDeN/SAaBw4PjX7fh2jwKRJo8dHzbtcESbS6z+vqZD3Gp47BaYPTBqcNThucNjjtTRmD0wanDU4bnDY4bXDamzAGnw0+G3w2+Gzw2evNZ58aYNqjis1iwtYfCsBELsBLl7A5V83FBFS/63rXP8ffBZGQeokEkQRIZkzAyv6+i2zsWsJ1Hanfr3ENriISTa2AZrU3R8vH76qdSR0w92vszxVNuoymLQBvXhDNIBoy03lmnpaYRygwMe3zk4gUKwGW2kFzXwN1RSQdze6iRKOgaZf/OXudVfVdugm1VFISXKFs5WiClvtaR81ToslYJ0lrdkSJ87QGvWhYUu4gn3M+1k12cIKIQO+y1V4Xd9+nV/c15lx79FYQTqcT5/OZlDI5ZXLqNUPNMKtUvxD7VgqUAr2pY9TFDCc53Syv2M4BdrwKWKC02ijbxouPnl/WWpXH9UxKyjxPnE6ncESHVuoBprHGcY1a4/e11chIuW4yuWemBOrTjRY86n72SR6AHMgYdthKpZlROwEmkYBHkWNO/QKYBdm4O1PK5F1SGTc/avzmnJlPp253fgD9kR/hHD66r53qpX5nKSUIXJWsvc5nf47I9AhyLbV2wIRJcics6weBTsICTaImbHOL5nNdYpsQtFXUYr9KLeScSJOSU6ZaxXtMfK89Kl2627q0VETDVnMOUpXIOMopHQdN51JDd5cxH3Zjl5rMuje/5IIfwQUBzKcDny7GLhIZDGmaQCwyPABRR3wX3l4yLnYJqwPCBchrC8G3iB6SfnayqvtqRpNCM+v1feHFyx3owUL8Dcgh1x/j08fgtMFpg9MGpw1OG5z2pozBaYPTBqcNThucNjjtTRiDzwafDT4bfDb47PXns5FOMcYYY4wxxhhjjDHGGGOMMcYYY4wxxhhjjDHGGGOMMcZ/ovH9S+RZRLySRERxD+iqhNxUVMiSSAJHnHOPoPZh7lSziLSKHFE+VPAW99hrYgpXTdW6Xi6i20JKu+zrEqmjy9Vav2fKvQmd9UZ+RKM5iEieNSilYd4zDlI+6rOGbDNFtNFBJ8c9oZIiCtic5lFD8Wa6JalCL0G6lUprRjXwZqBgreIamQ5JQqoZE+lVT90wb1hrNCHqegosacLckB4tF4/aj127RtJMIv5et4aq7V3VqKVQt0JZS5eWClhIOmnG4/090xyyRrOLNNDsUjtSVY4Mkatee6SUj6wObxG1VlVyTphzyCzjEeNDpRS01weNmrQ9wio9ortH4JWIZudM0kStG9YaW7vUrNyby9X+Z2RApMh2QLDm+Dyj7jyez8eenk4nZK97ac4kqUf6jSQSUd0oLkqrcskiqAaiaBJK3eJnfcW0j/VKqctBZY9Oh8z6aPQo9Nqq29E8UVWPzBmzqFG7r2AS6XVptc8tpM0hvQx/yEdjw4mUE3VtuNuRSRCSXusZJZEf4E7IZHsGBnBkB+xZJ42Qp+/7GLLIFg3nPJrz5XkK/9UGmvszCy5wtsa5VtZt4/n7lSllppS4nRa81CMpI2nUYq1bjeaIrUvWFfI0kzSRU2RL1FopW2Wa53huga3sjSaVpInSpa7uUK2SckaSYq2imsAhRNpKSnLs41Zbl7+3I3NBNfAtskEU67F9FY1sIn/VCFQVNT+ycfZ9bz376bCZPcPEI99BNPY4FjsyA/bsh12S7+5gvUHfjgmqnOaFah61XaPT4hg/wBicNjhtcNrgtMFpg9PelDE4bXDa4LTBaYPTBqe9CWPw2eCzwWeDzwafvd589gP0YIr/Symj4hyt9q6tvcvFFKAb5avX8A7m8THpsj1RRdxwu0jopAPQDhT7IuwSyl3C2PoDO7Hm7sZeq1NULi8gx7ViXkKrBhieetM9oimfqpA1XTbV6dI2xS3qF4oD5mTNCEFUALWElK/ty9MlmSLpeG7tTCkE+bj4UcPU3bqEU7Er2Rt0Au1grw5KEEfzaMDWDHofOmpplC0ktklTl8lqPJc7tVTyNB0Aq7LXJzVKuUh993qNon7sZ+yBgrcgbqE/SxCjcJEFa28cuBvqsffd2QLkAnhgt4kgwyllrFUasSZYAE+rl4aCosqUEvkgn6iHmXOQvN8/AAG8CUEc1GJPEyEL3Zv5WWsXEim9iVltUb8yTYiA1Y2silxJd/eTTxIPm9ltRcIyryXV3te+tTjwJA2glb2ecPeza0d2QiocZ5ionLoDvcplrRGJ+rdu0IGe/oxm1olxn0vU0Ay3Ct9wC5861sUvtTxF4u8hN73Ugk0phQ/GIl2eU2ATOHvjvhbe+uw7bOczW9mY09Ql4d2uVWNv8S7/DRh2AvxSykxTb95ojmiLxnk5494JrAMroofP7Dacctj8NSY4Adaxzv3Q0YHV3fo0eq3gfbJXB1cXYT9zue9ruOOa9ntfmvdVO0TCffVjftIPeyFf34WkcvjdXmd2x0Sk730/HMQhNaEYn3j6GON7jsFpg9MGpw1OG5w2OO1NGYPTBqcNThucNjhtcNqbMAafDT4bfDb4bPDZ681n3yfA1GtgIkwpk6RdINw96ngK5CxkzbgKYnsNxqMqJHujuViEiNLtNThNFVJie3g87roBNynFgyynw/hTiuZZpbWjvmFrNUD0Kioa702kfGnYdzxwnlnXlWaQsnOzTAH8HsafVA/wlZRpNZqvretKRpF55nQ6cX+OpnL7JtVSA+gV5nnGieiwZiH1TRWIrACN2qVVGqlHBB+2hyBRNx7OZ+aUYv8Q3PUwrGmZyb0RoTcDF7wZm0UksdWCt9brbAZozssMCE0MRY/6sWZGzgGUrTXWdT0a3Lk7dSskzcjuxzvmO+wZBTi9vmdERpeb21gXB68N72QLSjPifZ3sMT9IIGlCJyU5JEmRJZAUusFbbdQtnnGeZ6Y8cZoX0jTRzFi3LSL6OZPdOS1Ro1ZVSQhLJwT1IEWrRq2NYo11LdROIq1FM7iGUERY5oyoYKuS54Wcp8jKIGpeWocmLxVpjhPObm5Uq2H3gLhQtxoZNylsP/X7VbPIfqn1iOBjhoowzTOpNZRoVtfcyJqQlLDuYM0a54dGbVs8r0Zmhzk0px9c5ADYapEJYoQfhrdqXBMCYGwHttijVo2kUS83pRS1N0sJEpFL/dSK8tic56Vy3xp/59/39/KLf/zf4ze/8hXeOt32w1TYVykrOWXmaYoIvkVGSOqNPGs1kl6a3E3TRFu82+vld4jQzC6HDFXuTrfQ67BCZLQgkKaEori1qO0MeCt4MySl/USGqwR57IeZfIHKUmuv1Rog78JBVJKUaZkPzGoCtvlRExVJaIJZotazHy0X6ZgR9W9T1p61ELVfm0Xd1bycDlcs3igGqLKcFsb4QcbgtMFpg9MGpw1OG5z2pozBaYPTBqcNThucNjjtTRiDzwafDT4bfDb47HXns08NMBn0JmkBTFPq2QI4VqLxVq2OpcxWKwlhlkQ1I5txiFfdcAXN6ZBfmVmPJhu1QZqX/t6IuG61kVNE3Ks5VhvNQJOgKZNlv3ZsYq2NeZ4QD8eapqlLCBPzPF89lTPPORxsrTxsK0/ubpjyhOAd1HZiSdRk1GyobhjGY1l5LGsAgl2kiN68R+obkuYAFRFyCkA0q8wdMJPAzelEwbAMrSnN+osWTh2pASETLF2StxuWWezJnBe0r9lOZpvD1oyb2xtabxTXzDjdnEjiVG9h0EBKypPTzLIsqAg3NzfUUnqU3Wg9E2EnbOmAu5VCygkkVqphbFvFEeZOfCll8rxwuntCmjLeJX7iEdV2M0q1wyGsbIgbCfA0MYlgKKUVHl68DDfoIDinBKKc7x9gyhR3tlqxBlNSUs7cnk77ipElFMVmRi3Oy/sz948PPJzPnLeNec4RoQXWx0cMwXJiu71hE4cWpPBUM3Oembp8UlRB4UV5RGoN+9kzFHCSB6AEQTqlFWrgCOVcOJ3mo9likOsumo65uigkMJcjsm90qW5raAtyUtGIXmsO0BWhVIvPmdAqPZrdV0QXzMFbB2iPiH6xjWnKPdIdNrKVLWS7pZKTRHM8UT766DlbrdTmu1IVE6Fo4u7zX6Q+ecr2eA8/8kWWL36Om68/o20r85yZc0TDN4nDlWnYT90agvJ0ueXuyR14yHt3XCilANGkr5mRl5lpmSMrSYTnL19ERoRAnhc+96M/wrqd+ZVf/mUie0X6CcSxVi+SYQ/CtlrAEzJlkgjntcRmSRz6YvUik6fU0hvcKWspuHUJvcG6lWO/Sm24KK7CeasgLfZCFM3Tsf+BOoJ5xzjVnrHgbBYN/ciJp7d3gFBb5f7xgW3dggQY4wcZg9MGpw1OG5w2OG1w2psyBqcNThucNjhtcNrgtDdhDD4bfDb4bPDZ4LPXn88+NcAkkpAupdrr9x32coSVIyq8S+nwLqOSi0RUYqcwa3g3Oe+1BEW0SyP3y4bczDwWLmpxCu4RLZSUohboru6CWGQLZ9w3QlURFJdLbdWYKyTJCNDcsVqj5qrFfZKk49mSKJ4UJyRy7k4z6zVPI5thB9goNRlRV/cwqqRdVtcaWMgIY8KCWw3prypJhazKLk5ViWyLff126WAYe0RhzQKEY43kiG4XiZyFaZpDgmeNUjZOd6eQ+Uk+RIsI5JzZa81mwM1Qd1ClamXPGAEofol0IktkA4gc4OJ+yeQQMZo4SBjvHrUVEfAgDxE9HA8R3Pq+q6Mpk0QRCemoh575sBHMaV5jf4Ue9fXY/av1iOSMAFszw5pTbOVxKzyWwv125kaX7rTE9Qhi8Jsb8ukGWuPFex9GFoRH1D32qUeP+57Z/jhGf1+PFHeHdQv/MXEco7U4cLTaen3P/Rlgr31pFiS+24b0qD0ehwsAVwdx8k7M/RlaM1rrkuPDb3t2Srfy3e4dsObsxX3397fSJapI+Fkzagm7d4u9ax0wG3D2yhfe/gz52TP0/IC4s5XCWgo5JXISJlXAqe5IUjQpXh0jMm28T+663usuS6VjiIcbgXSJc8qklLrvKbXLj63FoUJT7AMSaxsJA/GQ05RRi9rJrdszyFW2QEjx94XZAXnf92aRMeMOoumytt4Fwtptxnv9UxHQ1DHzCqS1S6q7jN/74UY0ngvVkJkLUAXdMsgWGSZXGDfG9x6D0wanDU4bnDY4bXDamzIGpw1OG5w2OG1w2uC0N2EMPht8Nvhs8Nngs9efzz41wLTX9NsBzqAjWreE/qSawykV8K2RcyJ3AIth7JHAPUp2eTaBlCh7RJ7YYPOo2agYaZriFXdwOeSV0J1XglxqrV3GF8Z+1CPskTrRkOPlnLt8NiJ9rTSqh4ROuWIQFFUn92vVVvHWZYi1vgr09JqIKohHjdFpmpiniepBDtbrVDZz1vVMIjId9myNow6mxBrt69RqZG201tggmvOl1OWghZwztzc3MeVOCDlnthLN08p55Yk8I09BPOdtY6+xKFykvMoelY4siE3XThYxr81aJ7oGPWIfzQ0TyT2i0/0ZamvUBqVVkns/LHDsi7sw5Q5UnazauuKtRV3UbkOLzNw/vsRaEHqsNZ2ADHJCU+aUM1a8NyaEXV8b8r/eCLE1ajM2Mc6tsuJs4rgVlFiDu9OEGRQR5nfe5slnPoNvla999a+wVWPJzqSHa4IYaiErjdqe4RPu9Dq6nazFabWEGeMYRik19rI2cp4uYEKXl3qA9FbLYRc78DmXtcZD8Ki9vqpKbxjYr11qJfcMj1h/Dmmlea9J6o5b41zWy3XphyYJqXrr62hWyLkf0HBKCbAqZjyUjXfeeYd3bk683Sp6/8jL9z/ggw8+4Ms/8qNMOYUEGZBmaJrI80xtlVo6EZfKw8OZaZpYlsjaaBaHq1J7dJ+dyBopJzTpkTHkROPI5x+8H4cSB03Kfi4xb6QkaIe/lHqzSCJqb94Plm5IP5TszfWk/58FhR+H0gCuIK1o7Bd+GOwfBzfVFLWSRQLITQLc0yX7hk7CqODWqOYsc4Y9y+Qad7vE11uj2kWOP8b3HoPTBqcNThucNjiNwWlvyBicNjhtcNrgtMFpDE57A8bgs8Fng88Gnw0+47Xns08NMD0+PlKTMEk3skWPRRZ3GmEc67rSRMmiLGg3sMrWb56Soole5zHke80b2uv8vSIl9agnGZFKC9lrB/ao0xhNxbQbLj2LIXVZbMqJeZp75N8i0t4Nt9WKlYouAWhJM5YCRLdSqBGsO4aIkKaLjNFMemTX2bbyikNI34SkmSlPUW+0G9F8e4uYsW3n4/3btrFMU9RlFOf29pbW2gHoe4SfY+sjqrjXQTUz7u9fQm869p3vfCfm6G03FZpVEFiW+ci20N0A+zXqWsJZVLm9uaF0aeDu4DlPaI/Kr2VDWjtISJIiOWrWnnpNzX1FrHnIJXv03h1umpFSuIddrV1so2DdAabTQjXDvEVDvXnG7ZJJENfeMIPZZ9QFaSCloSjiwv1jl/+ZUbbKukE1KDgPstEm4fTOU37vX/s384tf+XN89NHzmMcy8+Sdd/jCj36Jn/y5vwedZt7/za/zZ//9P8VDNVLZMNuzRxrQcKlkiaZ+BaE5nViihu6xKB3H3Yy6FewONEXV4Md1I6V07HkcsoKv0nSpGYsoxWLP9gNMTpnF5p49w5HtUUuJSLo7ct2UMKXYE/asFz0yVyJ747IvSeX4+x4FV1W2dY1Gd5Jwr4edZlH+6L/2R/jyX/838Pv/rv868vQZqolUGuLOlKP2KcCTSTmXwsPDPYmFKc8hB3+8Z10fO9B3CftuJ6ps50e2UmhGJ75XJen7oXJ9eKS1xu1yYrlZYq3d+PD8EVb9kr1E6rimWKtUd8wcKfRMKsE7LuSUohmkC7VaP/gGLhpB5s7Ftr1fy3uWRsozSPhmSFf1wLJaA6+a+1FbeSuFvJxoFoenl99591iPhvP4eKa2y2FsjE8fg9MGpw1OG5w2OG1w2psyBqcNThucNjhtcNrgtDdhDD4bfDb4bPDZ4LPXn88+NcDUWouao6kbQUpHdoB06aXIJZpvKuiUYI9MH0YbWQUHOHfQupZi7hHSvp2xQD3ya25HhN/cSVyyBMK4E540gKx7U7yu/V7Xkd/ILEiqaDeQPTIdkr50GHbIByOTIjYsIrOttrimc5mHxPOpyCW6LlDKhqQUkfIj6yAyDwzH3DCTQ2a314m9HnsDNUl9Tl22KZo76Di+R2c91nctK/M0kafMtEwhK/WIxu8SvYiC0p8vflFLwT2eZ16WQzYHYJaOLI99H61L/DzSKI71oEe9p5xp1Q6g3uOxbgZcE6XEM4qAC1tv+icipDwhEZqPeUgL+S9OrQV1yAm8Gk2CbM5dkLvVwocvPmLKE+FqQjrd8M6XvsizL32B29/9M9hf/irbRx8AwqbO8rnP8Pnf+WWm/f6qaM64xGGl7vvjFfGGqEX0GI2DQJfc7nZzjP7jZY8FPCLNpdRur1d2LXQb65JG91euK1eIbHb1er/+nnmhqpHhcnXt1qJhnOw3Qei3ecVn9maJEo4ezeRSxrx1241Dwm57pzmj9Tny+AgfvYDbhdngdprJcrHxmEjMJaeMulLbTk5x4JHuS2FOsU5b7XVGRUjd51utlJ4dc52NE1kPfvi49v9vrUtQrzI3jl1SRbxnBqnuR6xDZd4whAYqAa5mkSUjnWBqhYMsHdvs8BdNidYPu6K7PN8x3V1Gug+2Q6rf+iGUbte7JDXlzJPbW2p1Hh7PrFeHyDG+9xicNjhtcNrgtMFpg9PelDE4bXDa4LTBaYPTBqe9CWPw2eCzwWeDzwafvf589qkBJncHISSeElKrnPUAOfaHcd+VXF2C1bp0r0f3VdAkYdduR4T/IlukZwv0FfeQ2lnbjckRjYjn9UbGvvS6q5rCwLuRIByG/UrEX+Laromkkf2AB/G01nqkO6KnKSWMmLP1OpB7pD+uxQGaMY8wpaQhx1Oglg31idyBfB+pS1MPoO+N0vb5vmq0SkohjUuqR03NnDK1xsd2Ar5kI1TktBzy1OO6ZhzmLRx7IP31Wip7dPn25g5E2PMDUkqgGjUj/SItLq32KLMeNUVVc2SJTDObb51Iut3g8bMc08D79YV4vq1WVGBKiTnPMYduZNHYzA7HSgYYWAUXpyK8tAoOj3Xl6y+f8/azZz06r9ydbvjMj36Jz335p+C3/TZ8WWjdlos4+ckdT7/weR7eew/yzPr8I/KUEQUX6xkx9BtWEkF0DlgT7AroRWDfdpFLXd4gzyBHiOwIU+HAng6wUSH0Uhfz2i4umSZXNmM9cn11zyCNvf7s7nMBuLkD6Q51+2v733cwigwg69kDwpQzWylR07fDiKiwpMxNzsj5zP3Xv0GyQjpv3E4zSfZ6qr15nETmwpQUmrziv6KxVjuwadeFlro3GNwbBUat2LKVA6y921frWRRpl3V34otGn5cDjCOHXQYhduxQOT6zr/ne2C7nFGTXrDeTjOs3a2jPbuCKmEWCOPYaqojuK34cLOPUHDWa3T0OggKlFjSlA+xjPRJPnjxlLY2tNtr9PWN8/zE4bXDa4LTBaYPTBqe9KWNw2uC0wWmD0wanDU57E8bgs8Fng88Gnw0+e/357FMDTBEJngLc26u19twM1T3Sp0h3oq0UEpEZsNyc+nsbbrWDUAuDbaBT+v+y92+/ti1beif0ay2i9zHmnOuyzy3zZDrTaRuD0thlVLYFsouLSiDxgAoJhHhBQkLiCfFWL/wDhXjg76B4cAkhFxKFihJUURakVS4XTvJ2nOVM57mfs/detznH6D0iWuOhRe9jzLXXXnsf22XnWorYOXPNOS69R49o7fviqLWvNcQVax4ZCPT1wVHNpERfrEuGwLbom8OYGSknphQ1UFstlHUFFQ7TjIpean2mFLVA7x86uAd5WevRyPPSjaA7kKTYtE5G7uyAn3vUeyOcrJlofldRIGtinice7t+wlpVWL7UaU6/bWEoJ2aJX3AXDcOngcNWES5DdUCAi8t7fDUnrtGdibNHYZV0CgFdHs/YMgVg37+sMRA3bNIE763kNRzInaQqnq5d7SU5oLyhpFuspOVOqsXXw29YupQD929tbsiZqCZnwFgUWoknaZVNB1cGcspzxtUUGy+zYWoIQPfbR9v+Iz5czrZ1QmTm1xutW+ONzGP5pWfj++QXfzMbzJ8/41tNPuMk3OAeoCV6fOeiBm/lJXLsZy2ef8eL3fo//+3/8/yFrJrvwZBbybKiyS5+RmG/De9RaqKtg3rNEcKatji/QmrM1NtSbTErTHvWfprkTQXy21si0MKtEFkFk7mw2vwHYZh942KuoIyiSlYawyWrdrNtxfC+lhGhksJRSqM3RBjeHGZCdKLw3q9v817vfTccp5MXNWJaH7l+Z413mO0+e8fonP+X/8Xf+Di7O3WHmk+MRMe8ZIXTSSSCOi9E8Di4+wbqeSUl7OdvLM5s7qnHwKcV49fIl3/72txGJA47Gm3v/RutNAFOOzIle7pbD4fDo0FXNaEB1x8W7L4K3tpPFxsDb927SDc2D0G27N0JOUwf6Cwklzahm3GHKh/CfTuEpK7njQimV4gZJsRbNBfOU+7NffDaeKfHs2TNcInvl9es3jPHVY3Da4LTBaYPTBqcNTvtYxuC0wWmD0wanDU4bnPYxjMFng88Gnw0+G3z24fPZewNM11K4rzWuIl3X3/d3f5q3r36JYv6C8/HHsj3/0ju+67tf/6Pv+srbc/qqNYv3rz7zro9/nWV39kj1NeF81fffs7r9ou+/rexx/y95/xe0GbkOtX/Ftb8w3vFR5+pyVx9551WvPvxoHj2S7h5yT+fxc10/4tvXdf/i8292fW3fX3+d3vEEG89s13jHw/3Cvnt1jYh0P/Zd3046X+e6mylt3/laX7j662v50Htv/dV3fHSNL/uGf62rvZ2ptP27Z1y9dRt5a5Lvf96vWu/L/cb46jE47au/MjjtrfcHp33hNoPTvs4Xrv4anHY906+4++C0X2QMTvvqrwxOe+v9wWlfuM3gtK/zhau/Bqddz/Qr7j447euOwWdf/ZXBZ2+9P/jsC7cZfPZ1vnD11+Cz65l+xd2/Hp99ZYBJpNdQ9JA5thYzdPcesYvFMHMMQ1LaN/ddoB0PeXnS/R4i+yNtMlZVxfVSX9P36xp2HbmWS71SEdmb80m//l6TVaJJ3FZnUjeA7Aa5XWPLUmgtah6Kao/AP36ea4DdJLPmcvW6Rx1NCYnpNHVJX5d0RmTWu/9csgXeft7+R/8JIrsA/GMg0P68W5QZovar5kuN2jRdtv2R/NejmZg12/fb8b2WYyaDXBtVZBm4O85lvfvqgEdGwr42ZliTkAASdnANJHoFDKoh9bXaaKlLiXsmgUt3wWuSc8MI6Wp14zu/+l1AWFohf+s5T599g2wO50Kzxv2rV0w/+xk5N2wtvYasoCnjRM1NTTmyODykmdEMT5AenUclnsG29YparztfbGuzb+NmG7o/87Xs9Nr2VKVHoiN7gf2623pHRku/Kk5E27d5xLXt4jOw+8c1ymxN9i6gdtnTt+e4/W1m1FJCTn6VebPVO55UaTlhPlGtIN2WoPvUTnaRJ+AWmxk23Q9qm4S3f2+bR8jJUzTw3PzSPTJceBfoetgiF5vd1vpqWy5rcuVO295drLt/2LfMkS659z7PftD16zl3/xE30NTX9nLP7Zmgy5sfM0C3g76/7tS+56VUTqcTpZRe23WMrzMGpw1OG5w2OG1w2uC0j2UMThucNjhtcNrgtMFpH8MYfDb4bPDZ4LPBZx8+n703wBSGnRDCSUspmPUImRmOIv0e7o6nxO3xAKU8WvRtwQLcokEf3cBSB6XUHdbMqB0gkgpT0r325GaotTYgaiIej8fuXLFgOWcO80ztRUKvQS8MJNOmfNl668bbDan0Jm8ADefg0XCudKO+dsxN8gcwpS7dc+3yUcGtcZhnVMKh7u7uAii7w6sKtYbDiEhf7wC4TSIodLGeO4LTahiv9PqygkTTsA74Ufs05lVrpbXK/f09LiFrTSnxtEuINxLcSNHNuTnesC4LZVlppXJNbzJ3Yu1gt4GHbbagF6kqCN6MwzxDM7wa67LSNCSqeZpw6wUuAbNeA7M3AzzkDGaU05km8Xy+kVoOWTISdWcxcGsUq7y6v+c0Cf+9/8F/P6S9OcHTO/jWL/En//B3+L1//z/gzYsT93/w++R/9D2+8Z1PWF+8IvVmjoe7WwrOZ69fc7i9w0pFzHny/Am3OZGtoWF6WDUc6QegkB0rCUfCP7xSWwdQEVIKSbGK4gi1VMIV4nCyruvu9HF4MmpdmebwwfDDFgDrhumF3KeUL/WDu+N7b8QoIjT3HarM0/56ykoy6WcAj3W2S71fzOCqhigCXo3TuVJbNIW7vX3SfSIzifJknjnmiXYwXt+/Rpph64q3xunc2M4CsmwS94xK7rV+wylVA5pKx5KtBvG6LDx59pScMzc3N6zriqbE8XgTNY53Egkf284OgoLEM0/TRLVLTVuhP6OFn5qFvzXzWDeRveYxxHnpfFr2vx3rtXLDN9ay7ngSZNAwYMopartqkL17HMLqJkGOKYTP9UNMkoxI4E8thdM57ruWSvv+90nTgXVdrw5YY7xvDE4bnDY4bXDa4LTBaR/LGJw2OG1w2uC0wWmD0z6GMfhs8Nngs8Fng88+fD57b4Bpq3GaspJkRuUSAdSsqILLNuktkp8waZRm3D+cAUgJsgpZlSQZF4v6h63tAPf2CCBWVFMsljVqq+Sc2eqpbhsKgtWGtUaelIRwXlaojZYSeZpiQ66iohGUdWqrNKtRM1JBp0skP2fdo4WthYNF9LShmlHVvQZp6tkJVmHOGdxYz2eOx2PUlJ3yVdQ8altqVrL01+0SDRVX1louUWUiC0Ikmn/JFs0XmI8zbsa6hgEcjkeOhzmIeV1ZSwFxHh4e0J7NcHNzvGQu5GknxGmaKDmznBfWtXI+r0yHqde7jQZjkjOalIZT3aitdlLVnSjjIQUsGhmu54X1fOZ0OjFPM3megvRr2wO83ipeG5gzJ4XelK2UwnGeydNE6vvYJOpIuju2VLwa4sLNYSafw9n41rdgA/rbG0BopXA+nbjThnjDvfDq0085zInDdAfAoolnv/Zn+Y2/+q/wayT+5A//c17+6MekH/2UI8LUGhB2baI0Edb1HPYqkCZHWgCGemYpy27PWZRJE+6wrIXWGlPKTJ3UksjeAE4lSNCbYVW4+LGjoiRRcp4uviJxJNj8JCXtdhIHkt1fAFTJ0g8IbLVV7dLoL+LW/b0gj9ZagLHGAWWa+h6asTycwj6mieN0IHXCUHGe3dzQWkE16iP3neu4MGHmtFaYjwesXYAaN2qNaDlAytG88fb2ltTrf8Y1EhANNJdl3fEjTxO1BU4oCY1zAe5OtUbzrRoptN78M3zzMWDuuEbqf8cqLsv5ckgSoYmBBkgvS+0+Di7aszV6c8NpAo1aqJGlJNFIj1671aN+7pal4f0ZPZJWWMu2L8aLFy9AM6VU1nVljK8zBqcNThucNjhtcNrgtI9lDE4bnDY4bXDa4LTBaR/DGHw2+Gzw2eCzwWcfOp+9N8DU9si5kHLGrW5PinS5oRNyzAi/CbW1MFi42jTZo84ISJcu4vRoaY3mX2xRv4jCO2Goqop52xc9APFCDtvW5BwZAtHUzwKUtntdbZxqRAYVwWQD0ojQp5R2oN9lo/0xNtAP+dk10cQ6uHk3VoMuExSJxnM5px7tlz2bIL5Hj9V7l84q0huhSTc6VQ2SVKWuJQxHtwePrIKpN+zaZbmqTIcZzQEY7eEB786zLMujSGsAQcjxWo9Eb5FbrbrnEpRS+6pvUuAQSaoK5nTCWfueKOqJVivLcmY5L9RSmfLUgalft8uW9/lgrMsZBcQ7qKW0R+GB3qCuUdZCMnCEpMLxcOD2eKQqkc2iSjmfeP2zn/Lm5T0/+8M/pJweuLl7gjRF3EhpomXF+z5aCnJIh5mnv/wr/JIm5sORn/2TH0LKqMfcY+8TSM+mscgqkMgtYPOOWOO+776BquOtoUTzuJSULUtmI/uUFLCQf+4HAAdRNAURbM0b4yBkAXrd5lQVcfZskU1ifnGabof7PZWUBDwkrtshQ1RIssmN+74LAd4iIIqpdUM21vVMzjNYUEWYY9xDk7JJUC8j5tVaRPY3mWyt5SqrIg5Ssf8XCauZ7etba90Pb9trrWc6zDlH5ol7NGzseLXDk19LisOqVQShIe6Ys8uk4/NxuPSre1nPrnrUIlNAJbI2fF/XBigmHUPlSq59xTF7dhMepORh5zs2SazlJpm9luiO8eVjcNrgtMFpg9MGpw1O+1jG4LTBaYPTBqcNThuc9jGMwWeDzwafDT4bfPbh89l7A0ylNtoUDyopgdXd6REJwDNHcmy4A+u6cneYAxD6Rm1SyNYsAFaEpIlWClYbxZwpXdXnBEqfvCgXB3cHd7TXpNyu7YQMb5omrDVqj2ZHBH/awW8DaE3CpJmsiplimvashi06uD0jotAdxvr3zR3hInWD7sBm1FKx2noUNww85USeLtFcOgzY1SZteywinQh1l2bmlJhyJifFWyP2WIAA42nKHI438Yzm3ek7EHg8f621S40b5/P5EdDnToimjXVdqT2iW2uN7ArvwHZWkjXUDaYgxIQEKDWw5qxt7XuYyOIs5zPn05nz+bzXiFRRkiaMy3pI2iK1xv35FJHylJinmZQnUs6kTb7YVlptnE9njvlAkljf2+MNT2rBabTPPwcRTvf3/OAf/yE//Md/wsPL17TTA8++/R3wqEObDkdOhCTRAZmVYo2H+zc8ff6cX5pvuEmZH/zf/n18vgG5yBZlsw+jy0Mlzh9+yVjZCBEJgmq1O731mp4akebNT/sSkTr45qRx4LgCcAjg3YC+9YyLnDNJI/tFVXuNVvq69pq3HeC5AqQg0q3uZnsEHDFHIYniHr4QINplthoZJnEPOJ9PPHt2iEvXyBpQDRsR7bJxCUIppezgWkrpEf2w6Voiy2Tq2SPTNDHlTGveif4ip/V+WAzHkNgj7SCI94h9IuTQkHJksWzueA2R0g9h2n/w8Knm9eK7nexF+l7uZB54eO27mrRnPnn82wp4umRgwZ7FsE3A3GlmtE4cVivBI34BeoWUJ9pa+tfeJtAx3jUGpw1OG5w2OG1w2uC0j2UMThucNjhtcNrgtMFpH8MYfDb4bPDZ4LPBZx8+n703wLTUhpwXWmukQ+aYpys5W41oORL1/VpEvlZzZlVcBe1LaHRANu91O3vU1vrDA4d5ZovaCzDnKSJ/Fo29os7kRhCV0hueebOIyHnI6VIHyTnPZE1hoFfPFLUkC2RD8hREMm3NuwiHv4owlrJQSuW8nHuWgWNWQ1IoExtCh9EL6Rh1RqecuD0+Jc+py/oy1cplP3GyKDkrh8NEWQtWK7VUMHoTrXjGVhb09o6b4x2Y49ge7d1kr5u5zsfpAgBmIIS8L6c9w+J8PveoqvPy5UvcnZxS1EFdVxTl7u4OMyOn3LMRIPV/W6sgBimRNHF3M7GujdoM63NWFHVFzPfmfc+fP2c+zkG+SZjy4bIxHo5tGwCmxDTPPHnylHU5c14KdKNe15VSC7UYcpyZ5pnDNJMUDsD9mwf+3r/978Rna+Xl/RtsWbmbj3zy7Bm/fHxKwVgw3mThL/6Nv8bdNz6JXfnWc/7Bb/0Wf/v/8G/zP59uyWsj/exnPBeQ9QGXkG8DrKeF88MDD68eyPMhmhiKIElJDuoewI4HGfiKCbTaWE4nnj17irjgzUE8AHU/wIR/5RT1cjUlckrc3t7ycH+ilYblS3R7miZKXS9RcAmQ2bIXbu5ud5CotXZ7cWpraE4BFGZYC5LY5OM5JaTP2dxwA3PjcDiA92h96dkjKnhSTus51lIcvMtvVVlKZZaesbCBaOuNOjX8d7P5OU+RTdOfb2va6B6ReNma3ZkFEUJkCQBTnvj1X/91vv/DH/D6/g33Dw+9AScgIZmd5ons2wESarWQR7tgrbGWRm0gDur0+svhuSIactNOFJKgVcM7Ebf2uE7w5qeCM08ziLC2hnv4aN0upEEYay3U0oKkRMhZqc0opXF//7DPwxDMomarXYPcGF86BqcNThucNjhtcNrgtI9lDE4bnDY4bXDa4LTBaR/DGHw2+Gzw2eCzwWcfPp+9N8BkIkjKpJxBtEe8/LLwKaESmyWwR6fNDTPZP2uqqMv+vkrI35gmWgt5ZKQrOBAN+FqPzOO+13Lco9vYJdpPRBIjq8F6TdIgk02ClvZMhADECjRrlNqjnBKGghCGKbFJDj36qd3I+vV6ZD/ljKaYR5KEOuDGPE+kFLLLMJxK1ajBCEEIOUeDwS3yryq4CmKyR4y3yLs3p9VKWVcQjwinJlQ86pOq0LZrtzC0raYmCCYe4COQyXuU2MxoNbIHijkqS993B49shDznPZNja7RnKkHefcvMbCfV1qWEcQnZsylUYw2nfGk4F2seUeWOO4hATrrv31pWXr568wh8RISkmWnqhwYVZGtk5o6asb55Qw/58ixl9KAcpplbUWxZqBirG2/EYD6QnjyN5ZqPYLC+es0//q3/hFwKfv/AUSEjiPu+L3EiiLhzzjPTfESnib3cp0eUeMuAqdVCHqtwPB6YpgAfawXUSb2+bth0yF2nfgix7dBTa892iVj3vtg4tTaEkD9qP/Agb0Wq+5KH/4SPisT8HEdz7OM2D5FuJ2YgiiQhE1kt1iJCPvdovzlUcySlHehaa3viwgZMgfAgKCGfNqbjATPH6UA/z/u9tz3frtesUDoheb+2qvLkyR0iQp5mDt/4hPzZp+R1wXrEH2GXnz+uebplV0RGiJvHgdYdQVHZpOyxIFsGQGQmOWJd1t9NuW1+i+MGzWJtJ41sCutZFM3AhP3gKz3Lw9275DXuVVvDETQrKU9x714rODIILtL5Md4/BqcNThucNjhtcNrgtI9lDE4bnDY4bXDa4LTBaR/DGHw2+Gzw2eCzwWcfPp+9N8DkdNlkyiBxsc1m5hQSu6RCaYVIEOhG6hA5Br4brYfNsUkxk0bdRr9yhMuyyy5Hw71HAS8bHc2r+hz7Bvp+LRCV3THM7FHV1Phur59oDTWB1O+5XY/LtTW0eOScKTXkaqpRlzHkd/HZpEpCcWvkrAhBSpoChKVxtRnKVqPV8f0+ruFIIpBVo34jUFsYfG01QFA30mR3+kfg3dpV/ddep1E2CaxEwy933JySSzT2arbLHd0NFydNAa6bXBAh9lFj3n3B8BpOh1zW41ILM/Z6qzG7AX7svbHVQsUa3hregqybh9Ot68rpdI5Gboee0dClyllSABlBTm4R+k0qSI9up5S4uTmiwJQycxLW5czqzopxdosorHQraY7WRloKP/veP2Iqlak1bhO7PW+kaoBoQjWTu5yWlFCJ5xKPQ8tmS61JHAZUmVMi5wA667JP9ALeTpemSgBNrY1mjVavJdWXxfYNzAKuY7tUOg35bi/QgV43CW2vzdrtJOeEamQtAD0zJIA+1j3ktUkD1eJwFTAi7jSPQ4YTWSHWMVSIg6M5bGHvJAm8hbxTE07BPeY6zVEzd92yFPrca63RLK9d5LieAldub2/D3qaJdDigSdGkZJmiCaSyk4OZP/L1bY82gpUAFVQDB/cMqn6ItH7wjEOmdJICp+04FL53OTSiXaLecaxa3/N+WNzyJrZzT+xzotSKaIqflPtaRnPRwJDIgBrjq8fgtMFpg9MGpw1OG5z2sYzBaYPTBqcNThucNjjtYxiDzwafDT4bfDb47MPns/cGmKpDQ2giVCJbYDOtagGwYVgOKeRu1hyTrYFXAFtzw4oxT705WUpozrRaIyqcEtGxTEAimhgLFo3fWgsw2CLJvt2TjVACgVp1khitb9q6Boi1DrApKdOUe43QWE2vDWuxRY/qK/bNmeeQVjac8tBwF6YUsrktsgswTYnAHaXWmHstK5IETYKT9k0RkS5fiwhlRNaFKeeIzpvjrVJL33jbmoqB5miM18wwiyaFOSna12aLAJvVvdKouZJyprXGWgq7xg7n5uZIzollWTmdTvtaiQjzcUZzQnuty5QS3oHVW7cIh3UpXSYoe9YBRLbIcT5gR9ufuba6k5thF5KuhbYueK1MOdFKo9ElgC4cb255/sknAJRl3WtxmsDSKuVkZJx0TDydnnBf7vF+SLi5mXvN1kTOmdf3LygirJvs9OVrON7GtF8INy8f+EaB9OKe23lmSoq1M+dW+8NvXqD4NDHd3EBKNBzzaPrmFgRo9WJPiuAawJ1TQohocKsFEZhzpnM7U86YRXZAWaMWpiDUUkhpQvTxgaRZjYwN6MkZDU05iFouGQoAa+0HLhHUhdYKtYYtz/ORrZHm5mtmAUxpmknzxHGesXUJECWzrrXPgwB5CeAp1jDnEonfZb5x7ySK1IYTdVDXWqitcpxnDodDyKo70Guf7+l0AoHmUEqvS6zpKnMGvDXKixcsDydqqeTD1A9HsTalLNRS9iwF35C1E5CKIioULA6AHee6MwawtnqRqu5UFq51nWgS7hT3DlsuVO/y1J7pcb2Pm2fmNKFdsl2bx2fprHk5Y1GtUVql1JEZ93XG4LTBaYPTBqcNTouV6M44OO0DHoPTBqcNThucNjgtVqI74+C0D3QMPht8Nvhs8Nngs1iJ7owfJJ99RYDJWVsj10ZSY5q0R6idahVcIAkqqf8IskUmzUPyCBHhtZDotdTIHkCpuT+oR+OqbbTWetRSaRo1AQOkJxChWcV6NPc6ywCH2hrmDTEQHJpQWyzCNPdalRqGDuB62SS6MW1gLCo0N2qzS6YCTnNDWkN7JBXgkCdEMnlOlHUhah9e6k6aG3nu0Vkz1rWROjDmLu3rsV3cWkQoe3TwyhaY57kTUuV0Wjifz0w2cbyJZnvRoCuM9bpp2va8qsrLV6/669Esb54njscDqsrxeAxCWFfuTw+spezR8Nu72y7PTT1LINaurSuvXp+otXHotWCj0eGReT7stVlLK1GHlYgyV2+XDIiyIu4kibqb54czh/nAt779CUji6fPnPP/WtwB48bOfY9Z4+uSO+/VMaYW1rBxubpinqKH5bY19UenyWAnZnyDoYeZcK6k11qXy89/9fV7/0Z+EzdcVfXPPb/6ZP8uzdCCL4944l0KxqLubmPb9MIP55rgfEpr32p7VsNKo61Z3FFIKMtcUkemb4zGc2w5EwkpGe53V1kLvmrNEndx+MMh52rMxatsarUVUPs2C7FZ0yXiZpg5020zW3tyuVST1qDjRxNI7IG+h81bj2KU59b1XUA2g6hHz/XBEEAMiNIyKIR7XM4RSG1OaL1kKCEhCUuLhfGIta9Q8vhHWWlARbrpdmxu1tQDg2rocVHecMDc+e/F53xXhZ59+xstXLzGcYz9sxbIEoe1SeAAXWrV+MIx1NheW80qe5sgW6WsaANuzEDbm0JCLusfB8wK5nTBFMZxWDWzt7+su6d2zAFqLrBgXaqvQGqqJ5tBK1Bpe1jWAXoQ0TaylgmbyfI0SY3zZGJw2OG1w2uC0wWmD0z6WMThtcNrgtMFpg9MGp30MY/DZ4LPBZ4PPBp99+Hz2FSXyIrhmZnvNvmhsd4l6ufeH3fBSLguyXycuwibh2iViW8bB9idEeIwLMF3/GxkHEFK8y+f2uqh9A8z6VDyWegPj1qxHZLe7+T7ffcIiF2CVd8tX9802w7cI5tW+m0c4UXq2gZlg1nDXCykRRnEBc7mY0tUavS2nu2QKaH+mRmoXMe4m+/Wrn8vjxB1quwCsIxyYEVVydqZpQiTq057XBcxp/Z7TYb48JBDyzi55XVfWUvdsDxxSusgqNSl0ya17ENXeMNEdK4Uk0gEpZL5Tnsg9+j/liWmO5nxb1DjnhFbZrysCmsL5N8IRJK7Ro9e4MIlTcdQNBZbXr7HzgrtT1oVZlKc3txxRxC2klFV3EN3rlXr4eEppJ1X3i0Q6fi4N8bY6vdoPG7FWjvf6u1vN3dhfY5eXSgDoxW26vz3a25COC3rxp+4bm9/QbU93YDIgXa7biXs76FzblGhE1/cDkod/XY8tu2dTH8d1t1d7HdQNI678DULOHX7ql+fvkXQgGhL2zxu+13e9rh+6lrLPt1Xr8lTZQTkyLC4+8mjuHvhgZl1eSvdZQ7rfbp+7YBS7L0g/AHvoni/rISF83dbBzQnI1Cvs2XzxLXzxkB17X4/IHrra8y3jQ2Q/IIzx/jE4bXDa4LTBaYPTBqd9LGNw2uC0wWmD0wanDU77GMbgs8Fng88Gnw0++/D57P0BpqsJ7AD0rg/K1e9vgeI7P3P10v4Jv5gEb4HT43m8f3hf1suH5fE8uGzEu2f0+JP7719y87eB+ItXfOua7pdnkrfncQH57XtfNtNfZGz3fHtv5Av3/9oXxPszvH2NL12Pt23nfbf+kvfeee3rl77kefy9e75f/EK0XzXe85F/6jX9ihH0t934azzP1dht7Sts9Qvf+XqfvMzRr/zv8iqP5np90Hvr9f0bb5HM1x5XGPKu733dSz3CpX7hLzO9L9uFd97/Hd99dPB994Uud9l86BfxuzEejcFpV78PTru+4OC0LxmD0wanfen93/HdwWn/YsfgtKvfB6ddX3Bw2peMwWmD0770/u/47uC0f3Fj8NnV74PPri84+OxLxuCzwWdfev93fPdfFJ+9N8C0Aqs7yZ1cKjdT6gAkZM2o9MZYOZq3WXPEnWVZyCrMvS5mUiVr4nA4oBKN8Kq1aHwlgjejtnV/MDdDeqMs1Yhq+1WGguZMknxZB3eQiAK2Eo3jsmrIAoU9mqtVKaUyMe3JD5fqriDi3R8uixbRxajpiGqP6xE1GLkYZbVCrULSmeV8JufM7e0teY5GhX513S1KOB0mRDzkkabxnK3RStS1nLbtccPcWMuZtRz3mp43Nzch/RTZG+KZ+56h0Nz2iKsmpdXGuiyPot5JE80tottJmY8HDhw53BypeDTA69dYloVSCiKC1Ra1GlWpVqnLQi0NyzGPpRmlRK3bUhulVtYakkshZJtznnZHkXmKho049bxye3PLlCZO92dEoJ7OPHz6WdiTG8fDjNdGWU5YrSRxbg9H0pRxhVmOQEScW21hg71p4nJyvKyIGzcItwY5PAgtjZxg0sgc2LJflMSkEzRIXYvYqtFqA28hZVQha8JppCSITpQr9451DCjUfc+c1urejPAiKfbIDBBlWZYONH1fWyWROBzm/lmJrIq4HG6RieE0mkWzRvW044VkgSqYgVtD84R4otYSUuVaWdeIypsR9Uttk4U7y3qOeqfmeHW0t7M0N6wWDpIxEaCQNKM9+2hdK+dUyF1GTJ5iLVQ5P5wjuyLnLoUNOeqGZ0bIsKsZKYe8ta7h8/GsEtlOfZ2XZQERUi8ua82g+/c8z2GrbROVxlpI35NIBHJq7phi4VPbZx0wj7rIILQq0RzSvUtoLyBcugzYxZGc2DJC3NmzFq6SA7q9Oua9YSiGaKI0Yy2VpfRaxRr5VKfSHmUXjPH+MThtcNrgtMFpg9MGp30sY3Da4LTBaYPTBqcNTvsYxuCzwWeDzwafDT778PnsvQGm8/nMnJQpK9WNUtsejlPxXrcvfmSbfax0X4S+NKpR/zHl/TPu8X2JtXsErqIa71/L/9xo3hA22VhcP+RkjmqAmZniLWRd2Qki6ddt7tTWoq5jv44GE+z3q63t0Uhrl80Fj/qi4nvU2bnMu9bGKgWBXabYrJFlQlPI/Mzqo/VNouDRUC2TelgxLizIvi7h3PGcpcQ9NOleB/ZRTLEv6Pa9Ldp//ZNz3iOZ8fFYB9XcZY1C1qhDuSzRmAwC6IPYcyejhKWQ3GVJuAYxxOI5zYV1XanNdnms+6Yjzo9izFEHNKLdLQfhWd/3lDJmxrJEHVVVyCmTkpJVkZzQFFLEZo3WjNI22WK8JtbvJhoN/9xJojw5zBxUSFsEOgmC4bVQbCO5ABdvIbO17mzeDFqvfqkSTdpEmVKKA0KzkOjuph17sTVYDIlrCyLR9Cg6LKIX2zcLiaM7tRZaa0xT5ngb0t2UlDwlNAfIRzXcy2cd0GSXxe52oCnRrHW/oddB7fLsPumYk3biCYnrVpfV+uc2wNwOFW5hx0rUKRWkZxmEn3hvFDhp7r4dPpN6jdc8ZazZI0wwj7+neaKZoS7km7QD/VpqyJN7eF41jmQiQlLBuGAJHnu11XuNZoLhWyoJ9yCmyPjpPniVoWAewN1dlWZG28B78+H+nsUpASQOtWbeP9dt03kE1NavH0sonRA6OYhAl6S6SBAtckUwY3zVGJw2OG1w2uC0wWmD0z6WMThtcNrgtMFpg9MGp30MY/DZ4LPBZ4PPBp99+Hz23gDT/fnMPGfmnKhirLXtDapyDvDS/nBijnZQcQmn2wxXREDDCcAeAZjgSBQtDYCTqN3oV/Ukt9FESEmuHGdr9OWIBPCZOdIiSugbkojui7jWiuYUDfe6YW2GVuulPicexqW9mVeA5NYUMCjqGmBbaxQc3bHaqa2S3UgbgPY2W9uzR0O8Ri0l+ptt17wCadjAq+1z3BqMbdFnhH2dvP+9rbsIHYCijm1KiflwuDhRdwpNiSlPyH5N4fb29tEenE8notqm0GrDxPCWSHliShlxuURnBUSiHmVtFg0K93Xsc+Mia0xXdS9bM9blFI3rPBotmhtrXQA4HA7kFIeHacokOji4UUqjtMayk2qAWu2RX3M4ryvuTk6J2/kY19LOs1mjLmcrbMktqkI+JFRqHBbWtq8d5rhYb7hGgHjKeCvU1vZIdvD2pTbpPB9Imvr6eveNa4sS3Pxq76OJX6217+XW7BA0RcRcNGxWBNwbpbS4DqA5CDyeJ8g55YRXp28I0pvCubBn7qReq9iJzAazAL3mF+C8branyIVcRKNRYy9x6rDbDYDliNI39x1YN1v0Kz8A8AYuMM0zlAIIx8Md9/f3lHXlvKwcDge2TKdpmva1ykkptc8XI5p76rZltBaEWErtq9Uts5PG9vwbhjRzSLLbQG0NF30L6ENabOZXNWRj/cyJRoN2db0+Lu9vt+7fQRBNpJz2edTWAKW5sdTHh8gx3j0Gpw1OG5w2OG1w2uC0j2UMThucNjhtcNrgtMFpH8MYfDb4bPDZ4LPBZx8+n703wPT6vKAiUBvz3U1IDbtxJRdICU1KEUHNyCpM8wETQfzS5K6Y0RpkUXKOjYTu7H2Dpzl0drHZmVoKVltkK5iRpDcp2xqbbQTQwUNz5ng8Mk+VMk2s54XcG8dZX4RmjfPDOYwzT8wpo4cU0de+2BFJDOJRwsibWTT46hFUFSXJY6BPKZNySEgPU8Za5eHhgdO6cHNz5Hicmab8iPzMQupYa8WsMaWMilBrZemSUqBLFS81VEWElDOHOXcgrbx58waIhnjz8cjt7W1kUUhEGUuXHgbIzPsctmZ2ukdxUzijGYebI9N86A3E4NXhBVZDhnl+ONHWiq0NdeWQMlkSp7Lsa7KBgKTEnGcOxyMp9awFUeSqEd1GpiDM85GTTtHwzxuCkQS0y4qrhZz3/ObE3fMnzPNEzonPP/ucdV0pzUjHAyDkaeLZ80+gCuuycn9/z6effs43nj/jyd0d2YWbm8MOyC/eLLiFnalIZAS4U91YW8NKxdbOAAgGnCmU1QBlnubI8ACmKTMfp/35bo5Haq2oKPN8jOj12qil0aZNmhpXfrg/01oNAnCl1nWXCR8OB3JOu7zbe6TfzQIcLTI2Sl0Dc9QR10dAL5L2g06xhgDH45HTaWGeZu6e3MREesO52eP4sn1/tcqG4KVnmqgq0+HI6bTiXT6bvANgP/7U1na/eTif46DTKnnKHcScl69fXaTX+8Eyk/PE6fWZUkMCm+eJo9/Q3GHZ9mSbx8x5WfaMG9WNwCvVoNZK64eStVTMHL2Sv0s4CEisZfONzELSnaapy2cdartkJrjjtl1DQJ1omCfULjd1IOcZEcXN9nkEEQF0guwEvJoh1cAN6YkpbsZqTmnG64eFz1++YYyvHoPTBqcNThucNjhtcNrHMganDU4bnDY4bXDa4LSPYQw+G3w2+Gzw2eCzD5/P3htgqsD9ugLOs9sDTaZ4ILbIWkQf1Xu00QIMS62RVdBJISchSdTNFAKsamt77UuhR9ElpFlbZFJTOH+ttcvk6JJN2yN72h1SFFAQj0htmvIlA2AL5noYQWuNYo5rr8faHUQ1rrU5RPOQm4ax9dc9DCAkeLESmyEeponjYcbKirv2LATZjWd7hg2wW38OUaWW2p8jalZO03SJotJ2qeqWUdBaoxUBF1p1zusJgKM5iDLPkRWwRUOtBaG0WkNi2R3icDggfV61VqZpwnqkmFp3iSRAnjOeAm4PhwOrC9UK2ok2IfgWnlVFpgzTFFH2pEj/iai17GuzDw9jX5Zlj2abGW/u35BzYupR1FozIlDqgr1y5sPE4TBzPp1AhCkn8rzJODtx9xixAVNKCBHh1nSRRe+7KZvs9ZIJoa7UGkTPnvHQgqjV4zPCnuER0WCj7RkN/XXbZJ2NstbYEw8yXkvZ348avJGNsV3T3VAVpjk/OjSA01qh1ABcQfZ6uc3iOiERjsOJTgnraJRSipq03U6Shs1uPpO6vHxdV6y1fX4iXVZprQMcoIIqWGuIpyDzbU09ZKqRLhTXbvXUMyycaZpCbpsEE7vU8+33SwKSEnmeMSR8NScoEtLzKVMtMifUjYk5/M1hXUr3tU2SfvnZDa8fnpZ1CZtHUI/MCiPAO7ZRLhlIFpL4ZV3R7IgmzHuN2W1fbMu4AJeQBCOBeu5BFLvMVC4ZHaW1wBlJ4ePNKaVR+2GnOZgkVoUHhJeXhKsx3jMGpw1OG5w2OG1w2uC0j2UMThucNjhtcNrgtMFpH8MYfDb4bPDZ4LPBZx8+n703wNSAczOkVB5q5Wae2HapmeMpJhIyu6iTukX5Y6O6tbh0vPHuzOFEUT9ye8Ytwh7RQxENZN7AbnNG2TbGL2uj8ZrTo+xJo/lYB+Pco8RN+qd8q60KtZbd9lS1g550I0/hKE6vZbrNRwAJwukWoAhJg8ysk5bmxDQlck5XcrVtxBwgyKpJ3Z/R+733T/pMlUptAfittV5vUhAXsIt8suZKK5WyVjQbolf1HLvjaHf4i2F18q016jz6Zb3NLg37tiZn0tfBaqOVbpCaQJVpI7OU0HnCO3EY9Lq0sXa2HRT21QDpUXBR2SW3LlBqRK33qrY91FvK2q9j3Y62TBRlnqZO2EKrFTyHwFj6YcOc1gzXtEsaoR9WPOSj5/PCulaaGWKZVtpOkkBvZtg6IEdN0sPhEI0NDcwrpUSE2zrhCF0yvS6UEtFsd4vsiGsgVYlauX2VWmsI0g8/Yceb/eCCWICuEA6hmnrGTpyANmB3iPlq+FwAu+72mKeQVb/tl3sNUL9IzN3DPjRdDjBRszTmIYS/0z9X1jX8a3eDsEVNypwSmjKaBZGIrnv/Dzpe9IwDREmayNOEriUOVDnT3Dthha1tNZVLbb0+bLiu9z3Ya7d6+ICkPv/NMmXz7As50e0ynjXm5dfA7U5pbYOt/Rm82/j2erOwVbt6xs03tF+bOLNFRlO317qtB0JDeVNWXlfjzcZZY7x3DE4bnDY4bXDa7n+D0wanfeBjcNrgtMFpg9N2/xucNjjtAx6DzwafDT4bfLb73+CzD5bP3htgWrqZtNb44ctXpJy5nWOlMnBIiZQmEuDSEG8091h43x4tHsFa4+H+npt5Jk8JcmLSTBJF0cs6IlugNjapN00LaVnj9u4YdR93YvBuwBaPr6AipDzjzRGEm0NIM5tHlsPDw32QgUazMYy99qqkab/27ZM7zAqtGnmeWEoDaxzmiWqOtbrLYI9pwlOmqdBaBRWOhyN3d7e7vMx2oIqfalEPdcpKfpKo6xrRYIOcpytiOIALVgPQLRtkON4dqWslSWbSLol0xUrj/OaEhL4TzRnkUm90k6rS91ZEMXNKX2Ngl00GIYaTzNNEEoEWRlrXimihWTSu0zyRN4DIGZ0m5id3nJaF8xpy2wDKDiodDqADj0dd1m9861v8/Oc/p1oLKW266aCxAZvjZiynM58cjhx0Ikvik+fPOJ9XSmvcHI/9+Yz70xkIQpqPB0oprOeF5NCWxlaL13EaRqmF5bzw6U9+DpLiJ004umcXQNQJzSokce6ON9ze3vLNb34rrr+eQRovf/4Z7hGxvzkcyZqotfDqzZuoySuKaGJZl70BH8DheEADkeI5auFhXUEnzuczWpSjxD7mnMjHmcPh0A9S/aCiIX1WTSxl3YFtWVZub2+4OR45HA68uX9Na5WUhLsnd3Eo22Sc67r73l7+czsAWMMxDrcxDzPjdD4FXDm4KQaUUlnXlTdvHpgP8y5Vn6aESxwoTXWXvofZr9iFU7m/v2dZVn75l77Lkyc3zIeZeT5Sm1PNmewK6TTuq9OEmbGuK0i3aeIQsWfLwKX2q8RRIuXMlGdIITFt5thG2N6zRmZlbStrbTx7/gm1v16XlfvltGcpHA4HFMUNlrKAJhCnlJWcDrgKdKKM85PRmvcDSUiKI+9FwJ219AMdwoMrf/Tzz3ldKmfG+DpjcNrgtMFpg9MGpw1O+1jG4LTBaYPTBqcNThuc9jGMwWeDzwafDT4bfPbh89n7S+SJoDjm8PlSeXpeoi4fcHe8ZXHnfll5mpXk4exzzuAtnLav/RZRFO+ReHO8NiqCq5OULsDq+9Qj8SLSayIeMWuYG7UW3HWXfeac0RQZCefTPTnlqLt5OFDOK94M64BpXTYWzeqiFuQm3XQLqkjp0gCumaEpMxEyNiEiha01jscjU0ohxSQii7VVciOayiXpkUKLaD/0jIswuJSD3BSNv/vrdVUajdIis2CzAPcwQDHjdDpRa+X2cItK5ng47s6zRd9xx11QElPqzcsEULlct49YF0NyAIKKRjQTD/lvd7hpniKq7ZU0ZQ43twiZFy9fItaYPJGmkIiaSDQDqyETRAVrcRDYArNl1/9tGRrRWO+mVTSlTixBhLoFdgmgKesSksgtK6K2cIqkILoDlUdIH1Jc38x5+uSOsizcv1l5/vQbbFcWhLouLMvC+bSAZHQ6YKK8OC1Mt09oyTj3ea/LCS+FP//Lv8Th7o5pnjmVhbqu1LLSWuP5s+dA7N+rVy+5u7sjqXJ7eyTPE5omVDPnspJTJqe877kQdr2WBVVhnnNkbeREypd9tNZYliUyDBzcokZr6430NE84l0yWlGTP2CmlhP2lmcNhwlvt2Sob0Jcd6LMmRCLjRlWZcsZEAsD6yFPC6TLtlDGDac5IUk7nlZTynnmA6m6rpbY9y2DKkQmx2SbEgaSUwqvXr5mOBxrOqzdvWMpK7X69ZwYAzSKzQ1XI87Tb+JaJY+7s6RImncun/l1DrHE4zCyl0qyFLDYmhALn5RxZFKqste6ZAvkwI6dExPo7iRCH1tYa8zTHtVRZLeax4VMzo7aojXtzuEF7FlB1p5izNudNP1ie1sYPXt3zqlRWhHaVeTTGl4/BaYPTBqcNThucNjjtYxmD0wanDU4bnDY4bXDaxzAGnw0+G3w2+Gzw2YfPZ+8NMDlsAi+KOa9OC0upCMKvPHuOJaX2GP52G3cPpxTZAS5Ax3fwcZOok+rxee9yr8dSzvieCuSUaIGyeKvY1cfMIhIuOfVrhTNLj6BuUdXLJZ2gnl4PsX/Hvcv4eg1C6UC1fX7/7P4TgL7JCfFujNVCpulRM3THVOHimBIyxACviGBudTQ1JchQW9uliFvk1ptF3VRr+Oo8PDzs194lhZ1UNwmhXL2HxJw359lG259zk9551G3EH5OCxP+THnVNOYUTecham3vUkr36Qi21yyRBRXvTslg/s4ukT8zYUixCOtrXQ3o92yug3/Y2TxOtVqoKQsZ7VomK7s0Y3awTQm/KKHA8zNTzQikFa73ea8/yaKXiNfYczazmLDTuHX79V3+FinP6/FMAzi8aayn44YhMM5IyqFBqodaoa5rTBbDu14Xj8UBKM/M8R73eDvTV2i6t7g/ZE3H8Ak5dqq0EmW01bF22Wq5b3dNNOnnxqUe+JfG+edTy3IBbNWGbXLmvRwBozzgBksQhRpPgaM828c3yerZIf82sZwMEGe91d6+m0ld696vt83TfufhPGMCyLtTWmPrrtR8ARRV6FsxmW1st5S07o7Z43mYN6363+cxFLtrXaq8H63i3xc2m1cFr2JQmKK1CX7/DfNjrpG6y6W1skmDf1qsZjuyEs+GQaPiWSsIN1tpY3VmAkjIOnMV4uRZWoAo0rvx0jC8dg9MGpw1OG5w2OG1w2scyBqcNThucNjhtcNrgtI9hDD4bfDb4bPDZ4LMPn8++IsDUN6872Wf3J7ZKoX/5L/w5UKhl7bcQcKeUwpQ1Iro9Yui9cZgSII9GhFvYwN72BmZv31/cSarQaxSWHj32vkmlFNwSU8okzYhszdM8orLqIR3drrmBrTnuATRtIyEI6Wm/tubcncbx3nQvQFe6dC/tEXyrDStGs944jIiYusG2aK3fW8xI3vdXgnbMwx2SKmkWal33+TazvVHe8Xhkq1v68uXLnkmhpGnLJAgDdSMyN67HZtjuj/6tNbIWROK5TLZ6lK1nXei+ZiqXa6omUo59amZIs4tBdbIspexOlTSB9zq0Fo3q+kQCoAOeKOuC4L0O5EbItnt96g50c3MTTeA6AFLpUfbIJBCJ7IVaCkmUlHNIXw9HHnhNq5VWF9bzuksLY+rClCdkgtenE29K45Qnfu2v/BVWnBff+/1YNzfuS6EdbmhpwlMizYmlFlopiBveCWaLJG9S4OPxGHPWkKvOecItnH2fh0OzaJK4rmtkC0wZzUomk6a+2m6YVdzD/txAJIU/qD6uq7tfPMBY3Pa6weZExdnQe/dNNpAeqW+GJFDC7g1BVDnkvNvOsiw0a339u6+5IChPnjxhWde93qh3InMNItxqDYtokIXBltcjklDN0fywFtyMeZ7jYIKTc350KKlWUY+aqSqXw01zo1rUWt7s2mm7fwOIBpGJEDbosvu5SK+Vak5WpSE8PJxI00ROiZubm504ITKdtoPU3sjSCq3RsfDiXziIC/M8cZgPIEKpxnlZOJlxBuohXl8N3oTJ0zyal47x1WNw2uC0wWmD0wanDU77WMbgtMFpg9MGpw1OG5z2MYzBZ4PPBp8NPht89uHzmb733THGGGOMMcYYY4wxxhhjjDHGGGOMMcYYY4wxxhhjjDHGeGu8V8EUcrkedVaJmpz9rT/+4Y/5zt2Rb98csHmitog2Tv19374PuPRovEc0ujUHaUCKZAN9LFPdf3e43PHyerNGKxH1y71G4RahFY0oc601mvi5IL2uYhLh0OVr1tpeL3OTCIoIpV1knKUUUsqIJsxaRCtVSVtdTxFan1Pq0UPziqZeC1IjIqspIVc1SK/lgyHpDWmd4T1rQ5jmeZdbWnbqWihrCQlbSTSr1B6ZFhWkxDXzNDHNUR8WeiZCbTEPCTldu4o6llIeZRbkfDEJ1S0rY6v9uezNEa0ZbrEXmqK52ZbxsNmOpESpJeTOqsyHw/7MSTWaAPbtjTZnIU1uLaLnoYzVyCTpclUg1hO4kRtev3pFKStgILFuglLWutuFVedwTGBEI737e0qJGrj3r950O4thCNVhdefFeeWMcPzGN/jLf+1v8KoU0tM7/tJ/578NwG98+nMefvZTyh//gKrC6kY9rUxTJseDkFR221JVDscDc48G5ylHJosZSRPN7bIXPXvCLeSVyFYDNeqh7k3k+toJUeszvh61e1OaIpKe8yM/Csl1NHXbsgy27JRNNnotZxYJH9qkn5u8W6Tvc5dri1vIlt1iVg5mMbuUlNu7W9I87dLlV/dvaK3ilfAz2SL22uWxEUWPfbRdyl1K4eF0IuWK9dqtZh4ZDN7n6xfb3TI57Mr/9ApzvEfil2WJNVDBPUeWStu+E2sREndDtK8jcDweaD3r43R+QJOQuni/1LpjaNRNXWOfRZjm2y7d7euH7FlQpYb8tcJe67QY/ODTFziw1EYTaB7lBB4pxMf48jE4bXDa4LTBaYPTBqd9LGNw2uC0wWmD0wanDU77GMbgs8Fng88Gnw0+++D57P0Bptj27oJgEpInB/7kJz/DvvmMm/mbFJn3plJJruRwfXiXZDpRU7SJ4W2rbxk1QUMqud1zk1H6vki7wYns8jQIoN+MNHfJnIgGSEsmaQqJJAHGSaLeYIWQ5vVNE8IRS7s4WzQiS/v8QvpqqAitFrQ1WpeZTVPuBNBroXawR2SXC+5kJNvz9dqXIrt8b9uxaZr252YWSicOEUFz1BdtbniXwXmXBR7xcMgO0ptEUtCoP3u9L33dtutua7y9t9V03OSTa1lImpjyHNLC1q+tXwR66eTZSqXBDvT4tsMSzn2ZTehriWuYWQfI2LdHB4EURJHTkZf2glqjIZxmRURJacLXEvc3B3OSpLC9tfLw5hQ1T8053Z/IeUL72qzmnM04mfFiWfGbO+6+9S3+a//af5P/8P/9d7n75Cl/42/9zZjIm1csP/0pv/Xv/J+py5mlrui6cMwZzQmxkIJu65FSYj4cmA/zTqqtGa1Vkmas2Q5G1mufmoW0MoA+74cSc8OL7Wu3SYilN0rUnELGrDlqAl9Bvfc6u+LbqxcprXYg34DeAzlJeUJ7cd/aGko/IF3blMmOFtvdNqDX5MyHiTRPXabqHehtl6lGvVrdlMsdfNfdZzRF7d1SK3Y6oanEoa9V1vUiMRcRMjnW0kPu3lrba42GvW+i+yDV1hrrGg0Sk6VY91pxtpqzl8PQDvS9PvJ8M7MslWqN9fQAEjYKUEvUkFUNcqmthFRWU9iJXuYhouCBkWsH+ibCCiyinLzxo89f7VhcBfayveN/uHzNMThtcNrgtMFpg9MGp30sY3Da4LTBaYPTBqcNTvsYxuCzwWeDzwafDT770Pns/QGmK8RuUaxzd0wDPl8LN6/fcOONb97ckOcDUCNy3Bz3HlV2Rx1kSgGublB7TdGccCJSv/ly1rSDunIBeJFonOXtKiroHtHW7e9gFNalMmXBE1cg4SDO4XBgztGobV1XyrJ2g7nUqtwe3XqNzwDmYKzWWjTl2ywSuDkemA4T2hRNGUdo7tgSdTan6bKY8ZXIqLD92VLUX3Sn1jWi5/2OOSVSEvKUsGYgjtWMulJLgNxaCgC1NrSsaJrxUmlmTFMCN8yN2mtrxjzCaI/HIznnvR5orEPUmsTYm/7VUmk0rNFJwCnW0CnTStROpc9DUiKJRKaHRUPGViqtRBM699gHdmJptFpwr0xTZsq6N+5zIvK87aM1A3OUIOPWKtM0M6UZdyhr4ZDmAEeEwzwjBsmVWWe8ClgmizIdt8Z0sTdvzve8WBc+L4XP58y3vvkNjr/yXXj6hP/f977Ht5YH/sZWWPLpLSl9hz/7V/7L/Oj3fpfy6oFf//YnnD/7lKRRq3XrDFlr5cX0guaG4UzzRCkFEHLOLKeVUuoeuV6WpR9+hPNyBqKWZvNoohhZK2230eKtHzbC/tJ+MIDmFbjUZHW/1Dh2wLWDDB61TOulLigIKonb21vO5wfWWljXlad3txwOB3LOvHj5+cVXkpKn3oDRhaxpPxD89Kc/5du/9Mux78A0HTCDRqO5sBTDvDLlSq2NWgzrSS+aIwtGiB9rznl5wN05n8+8ev2G29tbtoyN4/GI49GMbwPt7sMpxfe3Pfd+xnCPNb6uHdtaxZrQOpE6guREa5VijdqcujhIwty5v79nKXXHj/lwCBJ147w8sCwlsghuDkjaSCRs5DDPCMLD+QQoVYQHnHvJ/JMXn/KDzz5n7VMzYOd5IqNgjK8xBqcNThucNjhtcNrgtI9lDE4bnDY4bXDa4LTBaR/DGHw2+Gzw2eCzwWcfPJ+9N8AUmx8G4N5odgF6gNenhZ84/MVf/S6rVWg1muxZI7mRupFr2Au91xp4LLKrYEQ02/dIqCAJUpcjmjvRtCt+RKIZ1eb0W9RZVVFRWjW8xaa6Ga562VBiM5Owy/7meWadVmop0cysNxYTEaaembD9LSKYgdkmrYsFB6jNSDUkeqXUmL8oiDNlepT0yrDcMO99+PZMi5hfs4akkJY6UK03+fOQAeYpQ0rYahwPB2qXL/YF7FHZkEgqveGXW8hLW4+49ufZnDV1MikdqAGsNtwMsQuJtmbUeg4QIwAfkQANd2yLuNYS2RzT1DMkor0iHVBqa5HK0aetIti2EFdGllLa8i324fToMzDPB9YFlmXl9uaOrBOqmVY2OW40ZlxtgZ7JUs3QlNEsTAhvloXWYuYng3uDejjyb/wv/xd8/uIlgrC+ec1/62/9TXzO/Mlv/0MAnn/nm9ze3ZAEppxoSVjLCXeLzA0RJIUji7UddMyCFB0P+bdZSCdxNmS7fmKBPRtHJTISUo7mcxBgVGrpmQMpsjj6UosQcu23mv7tTRp7k7qNfO/vH3rkW/v658gWENjyd1SV+XCMZn8CmjqMiJEmxwgZuDUDb12qLixl5f70QGkBhNYzJNCe5dE8/M8aZi3u1aXTufvrNE2b+XI8HlmWhZwzx+OReZ5jzVXRHFk83tf6OlMmfKzPjzgcbZLWlHNfP+Hm7pb7hxO1NOifNYfqTm1BGqpCKQ0kov8P54Xj7e1+SGmE/NyMneQ3X13LGvvSd9ucPvcJS5mlNV4thZcKr1fjTYGyAbqG1Lle+esYXz0Gpw1OG5w2OG1w2uC0j2UMThucNjhtcNrgtMFpH8MYfDb4bPDZ4LPBZx8+n31FibwrU7vMDwiwWprx+rzy4rxyK8atOLdTRrrMMPVJyea7El90Qlrp3YDcHwnpCEC/+rxfXhORqO/YpWCtdRA0pzUDt3AK6JI75+I28buZR/1Qidqhm0y1topeSVUDLGOOsl+mR527fG3LaCi1djLwfc45J4g/+wy2LXXMuKyBb/O9ADry9rQFTUprNYxIBLCoXSrstVBjDtu/W/3TiPoH+UGaU38UIedOmk7/vOCiQbx+9UOQk5tRW8PsgPtGxBLruTEq9P1oaEo4AmoI3q/fDfvKATVJr//pb8E6+35cG19sgTDPB6w561rDcVOs8lbnNiS0YK3Eo+A7EWdRDKV053Wg5EzF8eORv/Cv/jV+/KMf8vJnP+cPfu/3+O6v/xrVK59+//sxjdM965Nbzi9fMrmTU8JbQSTsubWG9yyM0irIW/ViAW+Ot5BPeif0eMM6ALM7nveDznZY2mqQqupei1dlqxl72Rc10LSlPwit9Vq0KaEi1Na6D1nU2026X3s/4PT7qWrUA56m/Rljj4Os1Q1vvpO4dJAHejR9CZK/eh7ROEDtNXq126qDdpm5aEI1MU0TtRbMfc842oB+k3fLdvjYTLcDqGyHkZ1sY61ba13+vkFO2GFKGURx2iXLpr/n+zWFthbSFGuTcu6EG1LxdV0uB6FOQt4PcM1sc+7uD93+NbM6nA1OzfnsdOLNWlhtT0xBPITu4btv+ccYX2MMThucNjhtcNrgtMFpH8sYnDY4bXDa4LTBaYPTPoYx+Gzw2eCzwWeDzz5UPntvgClsNCYjSDQQ20C0GhW4b42/+w//gD//3W/wZ775nOdPD2RRxIzcEUy9oR3cTS51TEXjRzcg9ACC7XWBvfmc0EG0g5FKGMDxcNzrGNZaIQmaw0hbreDs0riIpjq1NColALPXpsxTxtxo5jsottaYUialHBH2vich8fOQx3ZHfjifWdY1akQKHA5TZCLgYFMYfn/yANwawcmUkJQI8A+prLeGzHOveyndeWNDW2vUtdBqQ5qjXU5aa9SMnOaJPM2RyaC6O/hW7zTnfLUeAinqxpo3phQy4SQJTxOtlb4WHTRrpdZKM+8ZHE5tBimyBNTZnXoDCNkMySSaz0lCNOSTZVn3NZ2mxLqeMascprtwdi6N92CvHIt2ss/TzO2tICi1NE4PZzDheExBoASZPLw58fBwojSLKLCA6kTWxEEziwmViDzz7AlJnOnJLXzynF/79rewf/Q9/rf/q/81/5t/69/i2XHm/IMfAvDZ7/wOp5cvmJeF7z57yt10wM4rnhNeGw8PD5xOy36QyfPMWguGk1tGEbwaVo3cs2C26LZ18Cnt0ixuG6WslAIp9/XQkASn/QAQNqa98SRKb2IXo1bpdYEVzCnn8J2taV1KGiDXfQZgXVemblerLKgq5+XEspxD0ozgPVJei+FiiCrzfKAUw2oQwsPDwz6PKR+CJCwOIGupSG0cDzPugrvQ85Cgg9rNzQ0vX67RJBLvGTCZw1H2bAhEdpIM01VSz7ownGaF2iwa2gFrbZTa9oahsaaNaQ4b9050EGDNdpjSsJl6PnN7fMJtnjjePeH+4RxzQTgvhTylkNpOGTUuz+WRSbVJwU/nM81gunvGq/PKvRknSfzhD37M4oZd4Bd3p6wrPVGF7XwwxvvH4LTBaYPTBqfB4LTBaR/HGJw2OG1w2uA0GJw2OO3DH4PPBp8NPht8BoPPPnQ++0oFkxLg6N72Oo3bcKABJ+Cf/Owl59X4jV/9M9iyhCRyi5iKI0RNzmab0YYhbjK4VgKozB1K6YaqeyRcevS01jUArM9hnuduoNZBoVBLZZomDtMREeX+/j4edpo43hw4HCbAwJ11XXcZWshgD/tmr+vKPM9oypzP54i0q0b03ls4V3cI7QSUBMqyYB6RzFYb61IQ6d+LXQrs0256PRpvXcooKOtaEI31y5o4zAcOh0MAV200j2i4u7MuKy9fvgTg6dOnUTOztWgyZo1SV/IUMkbt39kluB1IRKXLa7ucs7ZdqrplOiync9RWnWdKWVlLodTG3dNnpKzdWUM6l1IYc1wv9ubh4YEpB4FNU2Zd17i0X6LIKSVubm72aO9WkzPpBXzo0dzT6YS4Iylxc3vH+eGBlCZynjmdT0DUbz09PNCas5aV07ry7V//Nd68eeD16UTLM6/K1gBN+Fv/0/8Rny8LP3n1EqkGb16RX7ziV549Yfrpj1hq5bPvfa87j3OL8CzNyGlhVSenaBLXqmGlUXoNTlHhcDz2yDqcl4W7w5Ee3ublq5eoJLQDa20NF1DNrOsSRwQJgp2nTM6JaZeI+r6nnWbjGrUhYl3K3SLDgqgdejwe92j3BeSN58+f79kdcJGshqVGxkUxo1bDrR+4fLtrYp4S61I72EYWQWRPBGGnfNjJo3k8Z62NaT6CGW6R6VHLSsqJ29ub7psODq9evUZVORwP0eTRasin25YZECcHzXPPRrjIcPfDh6T9cAjsUvfDfEPpPtjM+OzFSxphaxuOiiiSMia6109VTZzPK05hqY1zr38c177I2Usz1lJxD4n0NB+Y0gSdhB+WSjEjzwc+e/man7564Eev7ild1r4llUj3mXAF2X8f4+uNwWmD0wanDU4LSx2cNjjtwx+D0wanDU4bnBaWOjhtcNqHPQafDT4bfDb4LCx18NmHymfv78EUl3rvdTawf2jGi4czf/zjn/Prz+5QSczaH9EUwahdrifCHlnv/8ceKBX22omXuBldCikgF/Df3xPZG2S5+L4BpTZUnH2bVGgtoyodbMM4mkQtSs2JQ89Q2DIJzB3rMtRHa7OnO8j2QkQY9cpJUqJ1Rzczaqn9ETvRifb3fAfYzTib+S799OYkTUy5NwKTIMTWGmt31Ov1CGcpYfIatWNLaaQU8/a3NlQlYvStWdSaJKTG7v7IiLa6p6ohmY36kZCSkqcZEWW2GYCyVs7nDaBiJ601TCp4Cqlktx/HKWuhWSXnXtPWO8F4d07VfW1rCRKyZhENd0fQnaACKBp4gJ034zAfcBWaCr/0X/rzrH/0x7w5P9DUOdWFtTZElbtf+Q4TkD97wo9/+3fwN2/4/Ic/5Jt55s2ffJ9kjel0BuA4T9zMM08PR7wV8ACd0oLYrLWoW9sXQNLFbjdZI+qIRwNH9LH9hH/ZvkrA3kRvywzZbE/Ee7ZNr8GKEHJXwVvD+9HIHWpZqTmjErVvrRlukbEzz4cg0k6worG+1Vu3G0E1BRER0XRz2fdYRNCcUXPEao+UR7TcCXnmNm0ngP2a6AGshfQ8cSEa6ehm1uKAuGdMXLDjkm3hHbwTTkirN7tFYq1rJwcIWfV24nALGap3f2hEzV3bsEgM8QZZ2eoZI8JaKuYB5tf+tdVE9j3paJu7xHN6izUAXBOeEydzPrs/8/nDmfu17hYA0EtGX5Cx74nw2G7GePcYnDY4bXDa4LTBaYPTPpYxOG1w2uC0wWmD0wanfQxj8Nngs8Fng88Gn334fPb+Ennx6BGt/dL3txs5r5aV/+/v/yF3f/k3me6OzLoZl4BF1D7nRFIB73KyDemRvmMCvWFYyNQEo0fqk5JajhqO+7UvDhByTgGHUkrUIUTIU4C3Nu3N5ByuQTUZEM288nRpsLcsS0TUPbIaNuOPR1aci4RtW3zvML6RTxMNMmtG9d5ITUJemJLuJBCbH9cyd8R8b9i11oKKktMUcsT+3LW2HtWtXS4YjtHMqGujtmiqdnO84bQumEXU080vEuAkJIlnK7WSUo5tuHKgbe+naeq1GrU3JhRyiqj2zc2RPM1747X7+wdKl5mKRK3TVhutVXBHJe/OK8D5fOq/Z3DB6hYBD+DfosAQ4F3WgtXGnCekZ21Yc9al4HZpGhh7ptze3pJaRdrMd/+rv8nPX7+k/vQnWIJTjQwDVcWe3vCNJ8+4u7nl//V/+r9QX99T7u/5Tpr47B/9IYekPO0Lc5syt/OBJzc3nM9Cqc5al5DEdoJ5+vQu7AmneCVNOWS20uuWGmQCiK59TJMGmVuLdZEAXVUlJyWly3qEPekF5EVIIoh3mbEb4hcba7VQ1xXMeXh46PVww9emNIVteZcHi/ajXsU9Dgo5T5TWDz+awgec7n+QpxlHaB4N9Fr3c5cge/fW/SN8BY39yr2x39b48HpsWTy1ZzyIy344sQ03djyQINk84S0ksM3Ch1NKlFL79+j+xu6HrZMC/XBh1UKS3iXfZo5ZJaep748DylpK/N4PogQ/Molg4pGsZI3U5a0iQi2NRqNu85gmPCsvlpUfv3zg5Xmh7qhw8cMNKS5/Pz4Uj/HlY3Da4LTtuWBw2uC0wWmD0z7sMThtcNr2XDA4bXDa4LTBaR/uGHw2+Gx7Lhh8Nvhs8NmHymdfWSLvq4cACcNZ3fm0VP7e7/4ev/GtT/irf+7XADgm4Zgyk8TDYjDnOSRkEs3XrG0gKKgZiJA1pHrajXDD2fP5zFaf83A47KA/zzOtGq02Uko9UudYD1JWaahWpinvmQelFJZlJRq3zUyHaXeglNK+YdfyTnfvc7iqQahBinUt4N7hS3n+/BucTydOD2fubm7jsw40o0rr+xPImlJEaQ+HA/ilZmw5h0R0LYW7m5uo/Xg4sKwncN/lnduckyrTYaaUsjtIIsh2e23bOk2JnDNmxvl85v7+vjd9c1ThME37Wm2y4Lo/dDhD61HiyFoIkyp1Is0T+TDz8OYN59MJFSUdgsS3tYTuZNXQBN7BZyO/2FvBDRqxkZvDruvKUY5R/zVnHh7OVHesrFjt9qTKk+fPWEuhWoDMf/S3/zav7u85nU/cv37N/XKmWNTuZDlDSrSXn/OP/t5vcSeJu2niO7dHDq1yNx148sknQETFbVmomkjieFJKhfPpREqZu6dP0LzVIHWsOtM8M08zU86c39yHXanyjU8+6UAdtrwsC8u6cj4vvSkbiBiWlJyi9mgp23pELVRaj9L3Q8Z10zf3CxBmTaznhZWV+XhgnkNCu0k6W2t7ZL/WitMPOh4ZAeZxUNo+v9nH7jOuWINWt8/Yfu8NiCNbxvbnbdWBhrqgEIDoyrKU/RlFoJQFSbKT22kJP1RNexNH9zgk1PYQc7bHDTRLc5yL9Fm1N+V0Q6aM4/v3JGUS4H2eiuBJOuklUorMhJQmkH5wZXu/Z0js50NBuBzuTIyKUrs72nTg1dr4j3/7e5wsak1vQP8uKI/X9erAPcY/nzE4DRicxuC0wWmD0wanfQxjcBowOI3BaYPTBqcNTvvQx+AzYPAZg88Gnw0++9PKZ/oV73/tsd3I6BIvs312vs/yq7M3dmO42pgvv9slk+DtsX/v+u3r3+XLrs2jqLVcvfb2fN5577deu5bUvf3993717fn5lvkQb8pb19rm/IXrv+N++7z9st5f9rNFiDd5HY/u+XhH357D/uzbz74IX7oE+/z2n/6FLzXkjbOuJvOuz8a14nlqKT2izdWaXq1N/7EWjQ+xLlTsH9ylotff2abiV5Hfd9hM/P3FZ7heu+s1fNvGvszeuZrfFz8nPTj+2J4uPsa7bWdfj/fe6tG9vuz390z6Cxf9ym9dHRD8+jtfWKvLHN7+F97xzPK25POL/tUd7/G7+z5fXn23n8vjf337f499q5pdfOatb38JYr3z1TH+2cbgtP3Fd85jcNrgtH3Kg9Our/SFiw5OG5z2p2EMTttffOc8BqcNTtunPDjt+kpfuOjgtMFp/7LH4LP9xXfOY/DZ4LN9yoPPrq/0hYsOPvvnz2dfoWDy9/z15cPojbS6FixL1EtFt4eRzfPjH7ls1A4oXDbq+oFjo2R/5YtOdAU0KnvtQK6+Hw3thLfKTu5gsBmQiHSge7ehvGtNrv/eouFJlXRVu1IkJG9ujry1e5d79+dB9rqXj5/1HWjxjnVQ6d/fa0peEc9lGff7mBn++I3LM21IdnX9C5j36KhdosaX57l+vrcc9Or3aycRHj/v1a0ffXa7fqxZ1Ke8EERQxLaX2zWl1/1UEUwkotQWf0ut1POZ5f6erCHjTSl+NGnfu+0ZI0puttWONdyM1Pf8bdLbpKTXa+AWsuS30X9bE1WlbU0rAXHB3FC/zsS42Jpbt9crKfa+3184QF3ZFtvx4fEIEbr39y6gJOKY9fv0/67Jdvvctf9GRF/2OSR14JJV4mzPdFmjvSar6JWtXdmCh+2Jw8WhhZDoXg5HFx++ZANtDfEeHWzkizb6BV/zrbqyXw4wfvne5lgXzOLKBgS3yMhAo8bqVmf1vFZOy3rtfbuPvvPwcnWnr4vNYwxOG5w2OG1w2uC0tzZncNoHOwanDU4bnDY4bXDaW5szOO2DHIPPBp8NPht8Nvjsrc354PjsawSYLlGtd40txhsGERN+VY3PSuVnvR7lrTSeJngyHVBJiDVqqWzL1UT2RRcgO7TmuEVjK2GrxxoziTqQMfVrR4p6qI72uqiqjVajVihEjdFSCmbGPE8hWZ0y0mJTa620WvAu70spoVcbv9ZC2+R8vbbkLtLrQGo0pF/rfD7z7W98kyln6vGAWxifmVHrwnJeyDlzvDkAsNYQpk1uzNMcwCNwe/ekT8IuwClCmjK1NQRhyrEe0dTPuL2bERKaEvN8xCWa9InIvh5u0UgwaUgadymq1C4d/CK2OnRpXiJPE2mKGqwhaRRqlyUutYAIDw8PlFqxvnvNDC+FYivXTh6kpCSdSJJB2m4Pl7q3cW0ldZ9WSmm4Fdxhng+cTieWZWGeMtLnU5Yzh8NEloyZosWY5YDkjKVKRcgtGhbKT37GTz79lD/63d/ll++OHFPmOE88eXrDkydHFDivp7Db1nAzaivkrJg31vt7nt3ckKeZNM+YxHOrCHfzTNKEmbPUkKK2UvHWOOYj113Vam/SeDgcOC/LhaxEAKMm3SWitVr3i9Qv4EzThHvb5arWrg4q5uSUyHni9njT699K7E11FCV3/zJpAUqbCYh0GSlYW1iW9REpHY9HVKPBoHs80iZxXUuhtcIGnEdJzHMmpYnma8iVNSjFktBcqLbJ0CemOUft1S45do+GmtKbRabuA+7OujZKXzdNGvPYJKsSvtaqXfbRg7gkaTThNKOUhibtBNJtEKe6BSBrQjTFobYZzbwfbjugx2+UGo0Jb29vKevCUldO5xW5nSkpUfpa/84f/ZAfv3oNk9CaUzvAyzvAfkPLL1aNHeP9Y3Da4LRHmz04jcFpg9MGp324Y3Da4LRHmz04jcFpg9MGp32YY/DZ4LNHmz34jMFng88+PD77xXswPQpzXaJZ2xu1O/RPX9/zD773xwB899kNf+aTO751uAWEWRJTdmotiEJKiljbHSLlCbb6iRA1C93BDJVLXUS4RFBVtTd2U1R7c70kuF2igG7ROKuUQq2Facrc3B73BnatNdZ13cmjtW5EqiTJnNc1jAK/fKY30LPaG6IhqGZqM16/ecPrN/fc3hw53tzw+uXLoM7WKNWo1Ugp6jg2M/AwkJymXi+0U201Wi202rg9HFBNpASrnTELoN+crZmFU61rrJ1AsUbKib2Z2L6Xsjf0u35NVPeGhk58H0JCV5tFLVTRMG4KXhXNE5IzqdeMnOYDT/LMy/s3kAPAy7LiKiBRYHaLduMeEfLmlFIpa6W59ZqnSu01P7e0EJHU67smSg0yL6Xy2aef7bawtkv03d3RHM7ezHn25ClHM8618PMXn3G8u+VGI/L9n/0//0Pq+cz55SumVjlMiSfHmV/79V/lfLqnrguVfm0x0ACO6rG2zRrz8UCaDkieSL1JGx7PVluNfSgr67qiCNM0IQr1qgapu3c7qXudYESoZvzyd3+Jw2HmJz/5yZUXeidEx904nU7UVsO3ehbFBr6tNXKO2qelFNwFkRTE2A8Bl4yT2KOyFiTnPVpvrfXDmVBKb8ynjsiKqtFaEFUpJbIl0N7ULyL428Fsmmamaaa1FzuZ5SlxOUJ1u3ZHW9Qo7VzVJfGRCeKiQTA9i0K1gbSoj6oJt6jhWrf1tQuIGf0c1f2n1UptrR+i+gf04l+lNpo3smZEE3VdKaXXiNWr2qQe85XecLOWsNPqDtPE2YWH0njojQ1ft8aDw1L7EfgKay+zfV820Rj/VGNw2uC0wWl9PoPTBqcNTvvgx+C0wWmD0/p8BqcNThuc9kGPwWeDzwaf9fkMPht89mHw2fsDTG/rPN++/iPA90cvnUrjZ6/uAVCMSYU/8/QT0DCqKSniEQHdGu5tcrWt0ZUCei1R6yG1MDDf77xFWM1sd6xN4oYGrlxPPKKGjVqhlBXytL3zCAiDRDyyE3La7xkRb2hb4zEAb2GkmiIa2RprKdyfHiIayczaMyuiEVh8Xrp8d3dS2GWlW/zWAbMw7FJbEFMHBSPIcWM+UUU9MjNcAAsnnea5XycyLZDLNopqN6rYB/p6qup+3fhcQjMkd5DUswqM6o3JjOTsDc+28KekqBuKQ6sxX8zxZniLzIrtIb2vzbIESU1TRMGXZYFO8ACpX8/MqbWFHNfgvCwcj0emw/wo+r1JoV1AVLi5OZKaoTbxS3dH5mdP0Clc4bN//EdoqUzuJDfu5plnT255+uyOWs+0Jmjq1qKKuKMiuIZMdG2VRtxHddtX9oj2utYAklKCrPpBorYaJG3Xux6AustEPUD87u6O482RH/zwh309otleNNazHcwpTq3bMeztdmsXOeg0x2FDRCjdRi8y4cgCKK2SdSMLj0ygSGm5bKHHfgSYd4lpNzRVIaVMa5Et4+49gh8NArcov6pgLpgr19LZZg2vm9/3eew/Aa52ZUeo7geK+Gw096tb1kDzHeytXeLxZr5nPohuqHC1H/0azRsp031YiYyrngXhxpX59XUOiXExp7hTRXh5XnjVjDc9a+ehNioSRHAN8l+A4XcA8BhfbwxOG5w2OG1w2uC0y34MTvuwx+C0wWmD0wanDU677MfgtA93DD4bfDb4bPDZ4LPLfnygfPb1ejB92fX2yTyWswpQgD5/fvTyxMND4Veff4t2MyOHidvDARHDWqVY26OlgoEJ1ipJBJ0Vz706p0fE1mphly2qknMi5100yiY7BUVUSFO6PIeHpC/kbhFx9YOTO0FcA/1GCFsdRlEhS0Y1oeb4Lr0LiBZx0ETKGfMwrJevXrGWleM8U5dl3zUz43gzk3PG3ZnnmVJLkAcRKd14VmUDXGFZC1Yr1ipWKioR9ZeePTHlCVHZDd+acf9w4pYAcEfQaYq1FkGVDhDhnN4/198gdakjhBTQ3TmY0xyWZaEuC8u6cGh3TAje51FqZVkWpuMxCMTBEdp5DcniUqFevCErIEFQr1+/CYmlHMkpcT6f98gzwJzjepER0kgpM02Zas7h5sgn3/gGS1lw78umQikLQmSl3N0esaWQp1v++t/868y/9qvo8Yi58+/+7/73aGk8u7lhXSvffv6cb3/nOxwOhw5Wih7nfQ93CWlSmjivy8JcVg45M+eMmu+kmnPmdDr16Lrx9O4JmFFL4f7hgSSJJKnvzfYjHKaJUlZaq6gKN0/uON7c7FH/NM8cDjeReeKN1gKwW4uDlNlj4Ekpsa4NOJNSYp4LmgIKIrPg4vCaM6KCWeXQDyW1VmqtpJR7VkvqZu3UGpF4JHA252nPRhBxlmXdfexUH4jGhalnN2Q0KbW1yGSByDwBllJorbFJqlUVNO2k2yK14OK7aEhJ3TFnzwwo/ToboANYY7evrZ6vIKScgkQEWsdWF6NZZIwEmUlIvE2gNoxNDu/7YalZ+J0ILOYszVlU+ONPP+WztfFqS3rRhKR0dQD6MuD1x7/Ll3xsjHeMwWmD0wanDU4bnDY47WMZg9MGpw1OG5w2OG1w2scwBp8NPht8Nvhs8NmHzme/eIm8t4ewg278KRFJ5XL/BXhZKn/3d/+Qf+2v/iVunt3xs9M9szUyMGmOULlfNebqoLyWRkqNpBqR5CZXEUIQUWDLIkh98beFiCji1IHNzbBm5JxwTzRrLMuJUlasaZdmXiKuIlGTMJynIaReAzQjNeolti7j3LMcCJmZ5szh5o5SC/bwwHJeOPRajQI7GJdm1KUgKVFa1J2ceBTAR0XJ0xTzKTUkeqWhhNOKKq3vwZQy0zzH2qtibpzPZx6WczhVSshV87YIuPZsDRXELtkEDlRrPeMDNmliSonDNJFyIs8TqRw53t4xHQ6XtVNFcqYB5/VMLY3jPNO6rDDPM/kQNSxxZz2dESJ6XFvBvXbZauumcZFxliUcaV0LIopZRKW/8Y1vcHN7hyTF1phHSBsbmqNZXsqZUhbMCiq94VtZ+7Mbz463qCtTSpxboaxnTvevOf1kZTmfQYKUIaLbpVVen0+0AufaOB0OfGrGsazcifDkcBPxZXes11udpgnPfc/cae68OT1wd7wl9YyG+eaI5oo359WrV0xz5vb2BlT46Y9+RMqZ58+fx55PEzfHI+t6plZHNXE4HDgcD3tmzenhxFZvOAAzIZJ6NkbFa+v2cCF9+v5HZonD+dwBVnGXTiQBlDsYSByQBEHRfuhpu1/mnHbbrmVrcmfknCMDRyKTaLWKNWfuQF9b6+QS9pu6lFpSzMWAVi5A37rMt1nYTWmVUkv4ullkVmwT2TJmRCL7pmeFNJxmNWSx3Qe2Z67WoK7YIqRedzY8X6i17eunOZHSBCq8frhnlcQ5JV6Uyqerc1/jULzZU9uR88uyBb5+9sAY/5RjcFpce3Da4LTBaYPTBqd9+GNwWlx7cNrgtMFpg9MGp33YY/BZXHvw2eCzwWeDz/6U8tn7A0xX19S3brH//tacHO8RcOnfijyDgvN5KfyjH/+MtVb+4i99QiaTvaGtXVsL0OVfHo5aa0Py1kSPXcoJ7DLLaGC31ULtdUT9GvBjbhDEdB0tdbNeDzHC99vjSNe4CluDuS4ndQ9JaP+JZ7ROUF3mhqApYVb3GVw3BsxZe/1TQ1SDIPwipxO7CPUikUBIvdao1ZDbeakgGoa41bBMmWTONE89kmpoqtRmiEOWXjd1M3KJOfdY52WeKUjherPNPQDLDaRhCJoyyTsp1AsYo4pME9UMTZlJozFg6VHSSaI2pvb5rOc1ot1ie5PB1lpkevTo7uM9i3keDscdnPI0danrwrqufe2ENCsph4RYkjAfE22pVFv56R/9Yw4vXpKmGXcnEZJSRJjnA2bOsq64WBiCQ90kx922T7XwuhT09o6/8N/4r/P6xSvsdOJ0OjOXsu9jikmjSfFOTt7i2W5vb7k53DDnIJGyrLtNuzsqQRDTPAc5tKi5CoRNuEfdVIsouagyzVM0YZRo3mfm4QMFpmlGNdOqhSy0Z8toyoSMO/ax2dZsL/Z+z+jocui9Tuu+MdJJJPxHVPHaQmZd6h6pj2sHyWx7dXHqnvQjW45S+LnmhBP1YM0Dn122usj6KAuo1Nrlv0EQzeNg5viOLZs9pSTd9r0n0cRGt+7HIuyyVu//qcYhxcyoy0JpDfdYP73Cp9YakhIgmAr3tfBiafzkYeHUnBW2yrq7PW2Aeg2tb4/xP1/+KcfgtP0RB6cNThucNjhtcNoHPgan7Y84OG1w2uC0wWmD0z7gMfhsf8TBZ4PPBp8NPvtQ+exrKZjihlvDrrhNC1T/4mxs+6yyNcxyjIrxgPO7P/gRrx8e+I1f/Q75cGRqFVnOyBXQC6DuYFFrNCR6mdwjfls0E+h1Tn03oJQSIjkApHumbdH+vrmtxYbijyOn0o3mAvQgXBlF2HrI6qaZZIq2DvTWCYSYs0pIW1OKJmbaHZz+e86JtZwDPEU6qdkF6PuaA0iGnBJTymhmB/rSDBfFzFi7Y0qtaM7MeiTljLiTcmVdzoi37VaXLVPFLepnXrIzoo5nygm3eLZYvy577PU2pUdfRVOvibqy9g/neWa+uaGacby5Y5pnPv/Zz6nNwJyUggiTaCcexa2BR6YHGpkLDw8P+95e6oRKn7pGdB1hWZdONoW1LHtTtZSVSY8RoRdwdW5uJ0wKdlr4wR98jyd3z8h5Ao/6p9oj2vPxBnNnWRaMxuFmjnqnGzAlpanw0Cqfns88/ea3+Ev/+r/OH/yD/4wXf/ID3rx6zaGvkwAHVZJIP0A4a1ljL8149uwZc5rJm9R3WWm17rVJVZWcErc3N6zWMHFyz0zZ6oqWWnY7TikOBnmeUIVlnR6t33w4kFKmrA2IbJScMpJyROz7waGUNQCz2e578zShREPHagZX2TdhF3HQigwEYatZuyxLl4luGQjWny8+5/2g6N13toPTttZJI3uhmdFw1AANYlFlJyd3Z60lCMEatVUiMyaOejknWrXd3rdsi2bWDx6xRqUG7qhEhs1ueU4/dIbtLusa0nJNpCyknpnjQFmD3CQpOk+8ejjz0/sz33+1sBBZBG9RJRcYDxTdXtvmP8Y/+xicNjgt1m9w2uC0wWmD0z78MThtcFqs3+C0wWmD0wanfdhj8Nngs1i/wWeDzwaffYh89s9eIu8dY5McPm7AF8C74Hz/85f8H/+9/4h/41/7a/zK8zue3dwg9aoGozmSE2IK1mOJHVS2zdmHE4DuUf/xeEwkjeZbPe5Pa/GdTfK5yeakR62ThuwtpUS+dOaj1kpZC25OyvOjDIVpiuZ1rRut1woOOc8koFllPZ+5O8y4GdUtDBgiSj/N0KKmaRDEl8cNA/iF1GuixnU62CXF8b0xX2kNrZWb/vwiwnSYKbXibCCw1duMe7Y+h0tmRKynNcf9Mqec8yPQ9U6mayl4rbhGtBQiStzMub254Zf/8l/m2Xd/hd/7v/57pAp1KbRaeXg47Y5jZl06mjkeD+Q5U1vl/v5+X/PNPnKedoJu3YkRBwngaGY8/9YnsT4qTIcJ1ENWbAUhGgNm4EYUOa+4VBw4zjNWG3TQKKXiAopRNaL5T548B+DcKktdWfMReXYkffs78Cu/wqd//z/lJ29e8/LTz3j2K7/awV0CSFtDzBAX1Ltd9/2wWqnbTpqhSLc5iRONJNI8cWDC3PZaqFumxSaDjcZ7EvLPDlzTbvfC8XBDrVtdUGeeMylP5HnmfD7vEXgAUWHSjGbhcDhEvVLViL5rQpJxmI59zyOLw1pDVFBJV+QS43A47hk6J87YRpz9s4bRxJDcmzn65rtxMGi1gof1uxKAjYGEFBWCHJZ1RXrmDyKBJy0yZqbDAdWGddnp4XjEzGi95q4DJlBKI0/6KHMJETQnSukZFkqQjUQ92IflzHqFZa6JooqkicPzb/LT73/OTzrI101xf8H1GH59FJO3PjD+p8u/jDE4bXDa4LTBaYPTBqd9LGNw2uC0wWmD0wanDU77GMbgs8Fng88Gnw0++9PDZ18rwBRzuZ7RW2/y+K3ts7ZFwtli+kLDWYDXZrxscFec3IxZFBEHh+YFxXFxRHuzLKtIdQ5TRiWxLcQW1Y67RO1Ir7bPwq4AwsTBHGsOPWNAXdn+E5dHm6o6AY1mRkKioVfcKsDN2aO5zZ0obelRJ7E7nl0RU0hCQXr2QMo5iKjLFlWUKc9kzbRWu0QwANk1pKQ5TayyhPzXW0grp8xtbzbYEx4oLQAqqYbDq2AN1lK5fzjt153mrfGedoIO67NmmMieQQGQcorGf9sS9cyOpSyYOWZRPzLeCkdqtUIzkihPDrd4Xnsd2U3S3C+VhJwSOSnR4DCzSXub2SOg177nrTXOy7l/JuSQmpTDfHjUfLHWys3dDckbdXXKUihrpZaQdTavu63SZdPuxiFHwzeaoc2RBqawltiXszkrieff/i6//q/8FZ7/+T8Hhxv+6Ic/5sVnn5M08fJ0jswRUY4HRUtF3RBxEvSIuSPutFJ2G1sfTrS1MqVETSnWfso9c6TSWtszGswii+B4DCDe642602rrEmN2gA27LF1GnGhueCt4uTpPdcIxD3mndjvaGjsup3uaB6Cn3A0i8JapE/HWiG9ZFkoppDT1bJQgkbVGs8iUcrefAM7D8YgJvb4wuw1EHdIgPUcwl10+7QRO0HFmmma6s+GuVDe25neG7zVPAebjkVYrbx4e0L4A2g9/rRnenNoPi83DX1OO5o6RYRGZTc2MpTfMDPQRWlKqZJYCP/zDf8Jnp4VNmOv9cb7kfBf4tR8Bx/9o+ec5BqcNToPBaYPTBqcNTvs4xuC0wWkwOG1w2uC0wWkf/hh8NvgMBp8NPht89qHy2ddWMG1z2ibwhRk9esN3aH/XdSAkWieDc3MWM+Ye1fZeDdD7X943ZQP7o0xEc7guL2v9c67sX7SNaB6Lu9xCRmrGDiKxhJf/4CKbUwH3LqELjtjBqdlmOFtUvgEWss8uNVXdsin6anSA32YkqogKGHskOOqAXkUut+/FTfZIf8wnnjGpRs1LtpqX1huqRRR62qRzHrVWl3VFenRbVEkp7qvSM0C839MvTdcAkqcIaLN9t6PCtk/OXjOydYmrmeG1IQaHPHFKiapKv9C+LtKlsZoksjK47JFf/UAnzB49L71WqFnId1OemaaM6iaTDGljSgl1cE3UYgGAzfqzhgzT3TfNMfR1lbgh0onJzan9Gas5VYTjk2f86n/lN3n+F/48nhOfvXzJ6/sTz1U5lboDfcuGtC71Vd9wpluH4db6IQXqWrB6ZRMatWmdkPBeSz5rrZSycnd3uwM9sK/JlkFwjSfW7WezUfOG2Ja1wO7s14eNDSgRpbYgENGwAxHBMESDsAMoAyw3e5ymvNd23eS1krsstQWZC0KeMmWTrncb2PZq88+oxcrum+4b5sSIeW4OK9TyuJng9bXzlDcLprnvB0mROBw1t12ubRaEolPCrdGsBfn142y1ttup45gIVRIP1fn+zz/jofpjaarv0xj/s+Rf8BicNjhtcNrgtMFpg9M+ljE4bXDa4LTBaYPTBqd9DGPw2eCzwWeDzwaffZh8pl/9kTHGGGOMMcYYY4wxxhhjjDHGGGOMMcYYY4wxxhhjjDHGuIxfqAfTOyNd73gx5FWOPIpfRaQuaUKAxRq/9du/y7ey8kuT8Nf/4p+NyK07rGduD5l5Uo7TjJXSo9fKaVmZ0kTSL8bG3IVajYWI6k9TjghyrzXpm0yVTSbp4EKrjvVMgJR9j7mmlEgpA13u2ZuwuTvrsvToufdn7tFWjTqUW5RxIrE1Wyu9AVx2Z26NeT4wTxPmjWma0R5aXpaFaZp61gBkod9z5Zinnm0Qst80ZebDgeN8iHl4r1Xaa13WGrJG4BKyhD2auyxLlyfGMx6mvL+HO5pSyPeAdVkiKuyGWdxbpx617k3QSs8CCJniRKmVH/7D3+anv/89yqt7ltMJzJmmicOU9wyH80PITEWE4pHxsa4rL1+/4nA47HOGkDJO09Slml1GOWcgJISSIxruRLbGlJS1rJFJ0pw0JeZ5JmmmVqMshdZrn57PZ1opWGuk45F5npimTEZYzyeqCGvfl9elsObE3/ibf5NbgM9fAPA/+R/+j2llRWrhd/7Ov0srhYwgLhznIxnHvCBpolqL6HozaqnYEvU8pdvr+bzgSTkcj9w9fUJz67LcSzx6nmdubo9Mh5lpnsmp1xUtjpshKT3KqLF+A5HItzGLdVIsJMsJksS+pinjDq1WllJRczQZS1nJeSLnvMuFEZjnCSFq6orCq9ev92h7rZXSG0UC3NzcRjPKlCh1YTpM0SAwZ06nNZ7Rr/MfhDlPe/ZHWRulVFJOzIcD8xRwZmaczieaN5BobFnNelaE00rlcDgw55Bp3z/c73YbdYKjQaCmHH5tHo0giayCpVQmTT2bI54tmmZCng94Sn3aQpWJH3/2hh+/euBldda+/u/NGrhKwnJ4lAHyzvG1PjTGu8bgtMFpg9MGpw1OG5z2sYzBaYPTBqcNThucNjjtYxiDzwafDT4bfDb47MPjs18owPSLjE1qCtskBDqAuAgGvHhzTxWoWfnN5mRCWTYh+NYATxMpezTFQ6lLiat2Oecun3VDVHEVGgEKiWhQdyGcWGJNclntDjTer6Gm+0W35nPepYuO9w0IyWf8Hc8oKYV08+q1/eH79TdpofVrCkEMgmDOLtertT4C+qSCtXDudVl3J1fRL9wupUTSxLm0/vpF1hjy2QQiO5k50FrU5hRRqvY6sraJQyVqnhIyPXdImkBCquvmu6RUEKap11YlJI1uxvnVq7he1xxH/csOBtCdMSOdOEX0IsXt8l7Ry5zzNJGnCURotsla2WWW235u3iJIB3Lf3xdRNIHa9jnvCtXYY+kAoyok1SAhHHfbScF63cv7Fy+oP/gh9umnfPbiFb/xm7/JYZ6py5nfJ9YoaYBUioK5uHtIOfE4QPS932rnCvEs5k5KmZTj0LKutUueozYtRMPFaco7eEiXJtOi2VzIj213TOsLJimeyyxsXZLsdTy3FVRNiMPqhhL1Z9ey9jn4o89eVjIOAlgQp3Xdt7eLDH2z1fCbvr7dNmtr1Npttvv55fAhaJeStw7eSaJGa9RK9X0dm1kQmkmXRXcNbpevbiLsWirW2t6c0CzeFwk7d9hr5Zr3useldB/ufi2K9c9VlM2UPr0/8dn9iZfnhbKvzwV+/EoWfFnG8b9C/jSOwWnbGJw2OG1w2uC0wWkf+hicto3BaYPTBqcNThuc9iGPwWfbGHw2+Gzw2eCzf7l89t4A0xfu/wsOv3K0bTQ3cEgoDePBgQavXZlcUeDZfEDnmZQUsco8zT0DQWilhdH32p8SCIpBRJEVTKA1Z0Iw070+5/YsYVSXebYW0c3tcwEEAT4RiQ8Hx4KkENgrGvZHS0kRVazVqIfam3t5B2K5cgd379F428mhlrIDstulXiSEs3knljdv3uyvz2nCqlGlsPYHmue5R0PZicK9gzQSztVrmO41If1ifFtju9bruZqAyqUeZs6Z+TDhQkTB95qrRkrKzd0tANacUhrLOda31bjucT4w5UwSAbN9TaZpotVYA9GEIyCJeY6sAc2XjJDpMDNNE3meaK1SW6XWGsDjhtfL2m2ZCrbXrhWaR3O7a0sNQIzmiUkEmve6ov1nSqDgrbGeI3I+idOs8nv/6d/nkx/9kIdm/Ce/87v8z/7Nf5P5m9+E84quBS2VlBJ3z4/UN2+odaV6YZ7ClrRnqAQTb6eMIFhJyjTPiCqtRdaBEQeLbZ9FBcnKuq7hG/013daxk9e2Js0dUiKpxsGgr5PmxHpeIwuhH0o0xeqUWjkcj5RSePVqy+5wzOqlQaWHTYT9RP3Th+VMTjkOTLEhOypsxydw0px7vWOjnqIZIgIpdYC1aLZX7FLjVyQhSUk5MU2Zly9fXj1jzxyQqBUbNYu7P/UMAeuE7a2hCPM8dyzYDj6RD9Vw6nZdCZ9YljNzniJLoRmo00Q4N+dsrfuS8wff/xmvmnPu/kVwL207Mwq8dQ6N1/5pQXeM947BaYPTBqcNThucNjjtYxmD0wanDU4bnDY4bXDaxzAGnw0+G3w2+Gzw2YfPZ/+FKJh2gOnGEpFdZ06Z1g0BAdGMufFgxn/wD34XAe6OB/67/+pvIjdPcG+8fnnPk9QNsm9wswYRQA6g994sLGk4o4CZ0JpDq7SteZsKmpRDTj2yyL6grUfqmzfwbvjduVRDxlmt7U6j3VA3oFeVaM7XX0spM+WM1UZdV2prHLqUzltI7LCQWaaUMEJaambkJKzrSikljK1e5KKv798wTyFH3IB3W+/tOfY134yzPxsSORWS8r5Pj7MlYC01wN6MKSVqWyJ8Sthjzhk3Z60r82HmME9YMdpaMIfj8QaI6Ox6XkNSnDIqThJlzhMClFJYr+SWqkqr8fyalKWsGM7t0ydMU3okOW5uWFlj7XLi2CWvL168YJ4n5nnagWqaJj755BMezg/UFrLd3J8fdzKGrX3fgXN5oNWKmGM5DhcBJBNlXbBSoIU9TZG+wHL/hhd//Mes5nzb4fz3/wH3T58irXJoxgRkg/XNPcvDPWBMx4myFpo33IxDnimqeyM2wy+yYw/Zrp7ToyCzpC3TREkpYywUa0ir0XwxKbhTWqXVdjl4SfgB1yRIIJj2A0va7iEaMmS33SZba5yWM/PhwJQO+3zMQ8Ia2SVhV4fjsUtUK81k35cgVo2DlBnQ75sTT588wV+92qXW+/MKzIdD2IIqoJGpYEYphdIuB6RihTxNHayh1sgUmOYprtmbaAKkKfVmkyF1N4fanKUZS2uU2jjXspkMoEzTAdwj4yEptcGC82DwCscQmjmfWTQWtf5T7Uqq2rFqjD/9Y3Da4LTBaYPTBqcNTvtYxuC0wWmD0wanDU4bnPYxjMFng88Gnw0+G3z2p4fP/gsJMG2R+E3CuIGIuW84SXOnmCE9gvumVgShnBd++49+xN1v/gW+OWeqCZYUtQDzJBEZbb02YRcQ4niAivQwnId0bNsM6ECPMXkKCVyPLKsriRwR0dZ3X3gsj5wzNLkA6FuSPsFJKZHzgfW8OVOGFhJa2Q081qP2SL36dhW7AK9cQDocUfpzASoYvsv55jmccZeemlOtRuaABVi02vBObO798/16KWfYMimcWBNRXGM+br5nTSzLgrlzuLmJ+p04pVYeljO1VEA4n06x1813gsUcb4bmkK6aR+ZERPMjdKp937Yshk3eqCkxH45IuszZLEDLBTRnckrkfgiY5wN3T+72TJGNrMtawmYcKhb1ZIn6ukl0Z31vjrqQknKYZg7zzJSDOJbTmYZx3GqzpgRJKZIwlNKMyeD0ox9Rf/YzrFYmByeA02rZrbW1SqPtJGxrY11XaglAqa1hOHnK3N7chBSzVtI8MU0zohep6mYnSTPigjWj0g9U7li7gPw2WpfGbn4Rr7VeK7g7AOASWTHTlKm10FrtsmBwjGYlDlTdtgxjLYXD4cDN7S3H2ztevnjJ/cMDmjJpyhdbJqL5ZuG7Ll2q64JqRtUQ9X02IS/W8BY3VEPC3KxyOhvWMcGv7Ej3w6FBa0hVau0+2Y2klAruuxS3NacaoAlJkQm1AaX3tS5LiWuoYpqooqzuLC785NUDxYIwzu47sEe15X6N6zQtv/odeQv9/a1////t/duvZUly5on9zN3XWnufExF5rSwWq8likd1ki+zuGZI94MNg0NIALWCgB10AvUv/lwRhMNCDgHkZSBhdeqCe7umZHknNafaQLF6KLNY1KzMjM+N2ztl7LXc304P5WnufiMjMymImMyLoX+FUnLP3uvhyN7dvJcw+s46vAp3TOqd1Tuuc1jmtc9qrgs5pndM6p3VO65zWOe1VQOezzmedzzqfdT57cfjsSwswAdvGXD/b6ofii5HttIiDnBrS/dEP3+VXvvkNxtfuMITkkW+rmFZiCi3K2h68RcENtiZ06/8MTvU0AdTQ6iQT5URGEgJR3OFb0BNRxZOjjy37YHP0yhZx92f1bIJxHL1W5rpobR7imaMC38giEFe6MGUcB2IM6Fk2ANBqLbZxhOhE0aR7tlqNrU7QI8NaTzVQa62EFNv9ZTPwtTmgGO05jBASEpwkAqDURsWwLJl1l5dSyaVgwNXhhhgiIUTmm0NbX4/UepaCy3yx1ZlUSs6ksL4M2JqXAcImKxX8/DRNrJJTX3Npzh4kBNIwMA5DI4WJi4uLlmICWiuHw4HjweWlKSaqWCM0Q9TnT9p6WVGvWzqM7KaJcZxIKbKUmXmeCSmyv7PzYQyJMCQsRFSFUpULiRw//IirUsg5M7bmbQHQUlh3dml1VF1qW8l5ps4ZLdrsQzETYkrsLy5QlKJly2oJMXhNX3DHXssm57bqL0Ih0Lxvq1e7vSsEas2bIwzNptxuml20Y7Xt2WFIHK5vmuzTHayZUkpxqWZ7MoKwlOzN73YT07Tn+uYGuzFCik1O3ohF1cm++vVUrc2J27s3rjz5knWvehM+a47WswRyPp5cYfMHq/2H1nxwRVW8TnLjyVK8FipNzq0t2k/yxqBBhDbVWyJHXorreGNCJVLMmFW5rvD+4xvm9nL5dP1TXS8iPMd3ty+e+fxpmu74m0bntM5pndM6p3VO65z2qqBzWue0zmmd0zqndU57FdD5rPNZ57POZ53PXhw++8IDTCEENw47NWHbouJPDVdixMwn/smWGeDP+a//8Dt88PWv8T/9h7/JRQoMdUGWA3W+ISWP4EKbPFNqMaxFhYMIcRCPoopu9SUluPNeW36pKopu40Vtk2WauTxwfY5Syva7NwkLkPPmlEspLZI9kmKi5MJ8OKJaiCEwDON27XUTGd5YzPLC2ihsbM3qvAFeO04C6ppc0uDXXg33eDyiVQl7l4iudSyPrV5nCIFhGLbnU9zxr2sTSqDU0/XWZnmKcjwu1Lxsz4hBLZUnjx9zfX29rWMtmbnOINJqZEKIkTSMmBnTMCJD4Pr62u1blVozMYycqM+ddhAhDN5YzvlLtpqxq/XYOolmHOeZGCP37t3j7t27iAiHmwMphW1+PvjgA6SNLV1Exji6/DIrZckcrw9bM75aKuNux8V0wcV0QZQIFazCkLz+6m63a+somAnH6zbXauzUUIVYlaCK5dLm0ziYZ3kQII6R4zK7w2qRac0Va1H5GBOlKteHa97gbYZhYIpeE1XN0KrE6GuFVnTOjIPXqDV8ncLQ9sKWYXPClAZIA6U14TNVz/aQuGW0AJt8et0T7oDd6a77Z3VQZkbJhXGaqKY8ePSQ3W5hKS3qHgNFC2vfv2nctf2v3BwOGAFCIOt1ux+shUJ9DHqrTquZeXZL27+HZd6eL6ZEWV8sS0ZCQlSpypZdsCJEJ7paCsd5aRkEkWDCvCxkzZtMen2xkhjICjVXliFyExIfH4/8yY/e5xHNoQMaZcugwkCc992GV13yNpTnOfnnftTxN4jOaZ3ToHNa57TOadA57VVA57TOadA5rXNa5zTonPayo/NZ5zPofNb5rPMZvDh89oUHmLZoL2wLsn0Ht5y/RykFJGAtuq0GBzM+PC7Ye/cphz/gn/zm3+NCM+F4xZ0pEAOEFik23+/tXwOBiNfOrFaoAkxDu7+fU5uEU0QQXePkHtEPZ/JUCS6EFeSW7C3G6IRkTy2BGmXJbvay1pk0RFpzt9XCzQ0hmEcqTQQFlq0WauRkKiC1yfJEkJVskkfuVc2Ja410xuA1Wc+yEFJrTLhu0nVdRKRFZV3yC+0y5oF4rzFZmgwVrNWtNFk3umdVTOOO2CL7p0Z/7jD2+71HtosyjiNmLn0MKgzJa1ViULU1kjMjjRNazRvKJd+EQbyh2mo7ntAgHG6uqaWwzAvH45EoQl6zF8Aj9q3ZXUAY00hQ0GJYUWqu5PlULzcQCUTEIkIkkDCMWiAlb2S4RvuRVk9T23hwye2iGcsLZZ7b3Fl7CUjksiABwjB5XVpcGhwT5AKFsywSEYYhkVJAWobFmrkSYkBWWamefmLwjAFTCNqyT4hoq/FLG+eqhhzWLAIBw6+valhrsKi1bvJiU8/mUYwQIqwy0GpnzzhSWybDcrjmcJxZlowBIUW02OrBXTosQkgJCWX92ElGnNjXpoglZ0othHDKRSp59hq24vMR6upbnKAIodVFBtPCMAzEYXB7Kfm0z0UQ16RSrSAWECK55E0SnxrxVfWsjWJKCQMaJ4bX3uDdH/6Enz56zDVQzvx11bPXW2nrdO7Yn/vvya/cpudPwM90UMfPi85pndOgc1rntM5pfnzntJcdndM6p0HntM5pndP8+M5pLzM6n3U+g85nnc86n/nxLwaffakBpp/p2G2QrUGfGAW4KRUrB/L1gW+9/SZvDcIdzYxpZAyCyWoVgptuWzRzWWCQgOKO7bwZndlTZGQQcEPFDM6OXTcMuOxzO0c9O2FtlHf+3KUUgsTN2Xvd0ZPhnl9DzdrwmwM2r3EaQri1dlUVNX/CeGYkEgTM64bWM+dNI6PVoW/Sz+YctwZmIts4t99t8wfPQA1EXZ7otUn98zQM7hDP5jXEsGUxFLzGZmyZI7o6nEaqJh4Zr+rSW2vkFRBCik6yq65wm1fZiFfVyMviP01ia615npq5DFlcjqpFUTJlcQKzUqm5bHMSJGz1RJclu4PDmJcMUVAVr53pFuPR81JYrUYsUJaFmjNaCtYchduLEKNAI+0UAqhnIECgskpmQVvWybl9uc24pzA1amjPWKv/nSskJ/oWMm/O5eRcASRKq5PrLzaYeSaBqjttW5/OM1qqVrJWain+UtFkqs1itpc3abVrq3pGRK2V3OTUoTWHtLYmPm7PQzDEk3gwn/+WjaTn41DbJKw+H0ou2Z28uENvbzibzbsRn5oK0vaitflbX6ZiiG1c0o73Y5Zl2V7+Nlmw0V6uhBoiJUYOc+Gj6wMPDjPLelVZ7eM5G+mW5z//+5Pxs3vVji8andM6p0HntM5pndM6p70a6JzWOQ06p3VO65zWOe3lR+ezzmfQ+azzWeezF4nPvpQeTCs2A2j/bpJQOPMkbcJN2oeCEpgBbaLSf/E/foffeOdN/sE332GfCxVlUN9aw+gR9RATa0Dd3bZiKAQ7RdLb9UspJ6cm5gusHikNyGZMtTbjbrUex9EjybXWZxz9+mweOa4ECZtsc0Vm3o6t1TcR0a+RQtzqkUIgpbhtQjC0uDNSOc1nCP5cVSs3c6v1GSIpRvb7PTlncs4cDgcfe0oEMZbjSdKnKZHGkRiDb5iquLkLYxqQCYbkDuW6PWMI/sxrA7w4JHbDgAE3hxv/rjl5jwqrN/sTQUIE9UivWZPM2uqI/Lk8alyRGBhNkSSbw91gIC2iLeKS1Dwv5HmmlkqMzZnGyOXlpT9rrjz86IGvxbKgpRKI5OO8ReincfL5vLnh5ubGN2wwKsp0uSMOkWNe2jrW9qOeiWDeVi/nvJHcMMaNmPb7iYu4R/FIu6REPWbKPGPVsxtW+WLVjIVAmBJLXnxuzl4mRGQbs5ZKzZm51pZ5EJ24mrPzaPhp7mJKlGX2Y2R9kVByrZTsa7zu01o9w+PmeECSME4j+90FeSNG2ersehNIb/QoIZBkQMXtdN0n9VxqipNXLoXjMhOHhIRETAOHq6tNHr4+s1plF8dtXMuyMO13/qKTl41ADIMYKE3uPUwTw5AQCRQ1CC1TR87IrBi5GmkYUREqyvXNDReXO69R3HxIUbxJYkrUOPKkGP/qO3/IItF/2surCL5eYs/39tL83S02sPaVfAJDPH2Nzz6k44tF57TOaZ3TOqd1Tuuc9qqgc1rntM5pndM6p3VOexXQ+azzWeezzmedz756PguffUhHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHxwmfqmD668qjPlW2ev6VrFHk9UOPshmBjDIDN6pc18rrcWIQ2SqF1mJYUEJUQmvVp1Qwo1rFxBBbMxr82jEmtHojrpQGai6bmq/WUw3ScRy2sYQYWeWmauaBSLFNUuh1QV3WF8XrZ5ZSiTG4pBRAfHwul6veaA+PNgYJHsFtstgQwNq44xZwNIpqk2i6nFWarHWV0krLpogiHuW2irE27vMxxhi2SKWZUkvGNECIhGH041bp65kM2EOkLi0MreamtMZsa3ZDSl53NoaISPB5WVM8RHwKFEot7NLY5La+asMQ2hpUz3o4u3YInungc+3STMNr4pq53DWs2lmDXcviSDEyDgM5FwQFU0rx+p5ubrYFdQFilLYWHoFvhxBSQDGsVjbjE5oM2ZupqbbIcZMehxhIY8u6EPG1CoHQbFOMJq32hodidhYcbjPfpLjS1kS3rAuwZqtm/ixBApg32su5bFLwXD3jwR9YIATPMinlJH1tz/l0/eJtNCIts6a2Y6TJis/kowABYgzUW/JYa3bgmQcnOfjt9ptrxkVt+0JNtzq8agpNrmurNFtWSapsjRp9Pvx4X4q10eDpx7idLWFNTuvHuYHqus9o+67NdTWjBiGr8PDmyIeHTAWKGbVJvr3e71MTuF7/6S8+I2Pgr+uDO26jc1rntM5pdE6jc1rntFcDndM6p3VOo3MandM6p7386HzW+azzGZ3P6Hz2svPZl1oib8Vn1kc1g9UZ2Go0kYoxo1wBD6vyUS58Ld5lDBCbzCwvCyEY4xCR5M6ymlK0ohRvbGZbpUpEIA0j2VyquN9fcF2vsFKBU51LRBjHyZ1zcHmfhNAcvRu4mTsGOMkWSyneVExbo70weE3Ptsn92VyCu+RM1IBJYoxer9G0spSKMLgzBaQ1nFOg5EJMiRQFMW+kZ2qUnP3awWtaxgClZmotmKk7WBStRophW5dlWaglIyFicWBIg9ehrHVz4uuaSAhgvkkkJEQ4qzXrG22adm2mwyYNVMMb9EU/tmplyZmLy32rj2qoVIY4IgRubm5Iq6NPERMhhkRs0lyt1cdHI0Bx3xslEMwlk3f2F4DXEp3GCS3V7dAq+XjEmxcGUCfV1RnE5E3XTI1cvfmfSOBinFwuW3XbfClFl2kCVRdqVSw6LYYYGcbEsBtuSbbX5805Iwqx1d/MxR3/6qy2mrQiaCNxa3VoU4xIiGiTi1pzqqnVvy21Mi8LOQuGsZTcpMHWJKXehLCUzJwXhjR5M8JWg3Ql1g1t3PM8+74oBQkuDa92Lj93KXJKA9TapKKNEGj1fENgrWpbzhrviViTSwvzPG/1fY/LvF6aEIRSW51UVSSE9uITCSGRwsnRQ201SV1Ka42YpNnK2ijSx+F7P8SEEKjZ93FKCRrR1lZbN6uRJXAU4d1Hj/jRgysyMGMUfyPYCPjpF1f/U86++gS/+Fn+suMrR+e0zmmd0zqndU5bf6dz2kuOzmmd0zqndU7rnLb+Tue0lxidzzqfdT7rfNb5bP2dv3E++xsJMP1sOC244FF7BapGjih/9fAJH10feOPN1wj7aatNmCRhpXA8LkyDemRahGWZGZIQW4R+hUigVq/BOYwjl3fukJd8itqWvI1lWZat3ulafzLGyDB4tHzdsP67G0JKY6vhCCDMy7xFw88b9okIaRy38VaTje9MDT2rVzqm6JFkU2KMG+kUVbTVdd1C0CFAEOalEEIkJXyDNngd1rqNf21kV3Pm+sk1tVZS2/RpjcA2Uh2miZwzc87IkIghuMMx82yEFtH1YQgxCDfX11uEWBAuL+8Ql5nl/n0kpK2u6jBMUA0tp7qzIfjaSQxorRwOBwBi8AyBFCIPPvoQU2UcBubDERFhP06YejO4apVZj54tUipWCjEou2liHCdCSFw9edJqexqLZo/4RyHuBlII7PZ7vv1rv8KDhw/IOW81N8HQWri6uvLjWyR/utgxDJE0JoZxvLXmqyO/c+cOZMNiQWWhHDJFl1a31B0xJCK+flYVa40Y5yUjIuzGaVvH9d9Site/nY+k5GtRTFlKc/QIh8OBXBZKdaIYd8EzN2Lkcuc1c9fapcfjkVoqd+Nd4iMnKTGvuavicflS294NAWi1iRWCKDENG4nkan69LSrvDlZCIKREqZVSDSN4kz6Esc2fhDUjQCnqWUBRIjFFQvR7zkvZ5jqk6E00JRDiwM1h9swJrVxe3oGQti1j1V/KqlYOh2tKyVhVxv2OqmAhMN69C8A8Z66vbnhc4bHBFXAALERAnpOJYWebs+NvFzqndU7rnNY5rXNax6uCzmmd0zqndU7rnNbxKqDzWeezzmedzzqffRn4ygNMzZXgHm79VFHaxkiRubTodS78u7/6CfqLv8Avvn4PgHGpJFUGg7xkwhCQFIljwsyjquXokx9CJEWDJkGT6BK9VbbmDilsYylaqXVtKtYkia52hGBNHuebSeQkSVyjsGbuWJUTAbE+KwIhsEpaCWBaockwXULoCLiEUQRCDBuJjNNEjAnMqMUdhEsthZoLpubR7TVjoDUoOyccn5c2XjVyntHqUfxSa4sCy3achEAuhVQLJokoAq1xoA9L2jgiMSohxm1ezZTjPJPzwjCOTNPkmRBmHA4HrBjapJB5WbZztZZbMdcaPOKuMbLKaEsp5FK8qVgILPOyrUcMgWWZqS0DYTcOTONAGjxLRJLhrxXCeDEQh4GYIkv2TII4RYb9SH2kqBhpaNvGPHNgv58o2V8grBqlLED0DBRNm4WrGVGc9FWVUVwKrUaTb8qWtRHjQBgSwzB4g7paYSW0+dhko/nWLvIMluprIYKF4DZjQpJhi6QP0wgpENSbQqZxRGJsmSOxZcw0WbC5PFOA3bSjNicr0qShArGtTntPuQU1awJyWkaFPPW9j1dNfW+hqLmj3waMvziEGDF1Jw54tklc5cyREPTWOEJMLWPHswpC9EZ+akLJZSPVVQJca4UIkegN97QShhENiavZj30yFx7MhR88vOHBYSYDFmQjW88Aed5MnFbKx2gnv/cUPjOP4KvnjY5PQee0zmmd0+ic1jltQ+e0lxud0zqndU6jc1rntA2d015edD7rfNb5jM5nnc82fBl89pUHmIDNEE6obvwEJCS2+KAZf/7ufd7YXTBEjzC+Lsbej0QohGCEKAxDdMehbDLOGAxLQkjuoKxJ887vLm3TGF7fshY3uGHwKHFFveZlNDe0dCaDRZqBRWjkUatt3xP86rZymog7dVWqAdZqdCrOJs14VNii8RpAtEXq08AwDC5VxJ9xLbuqgFVFm1wwZ5creiT35Ohjc6ahZUnkZaGG6hF+cekhjQiGYURCpapSVRF1AhNtdTLP6mLWqqQEaRwI0qKsphznI6UUhnFknHakIWGq3BwO1Cb31UZMEgIpJWojvVXyqXj9Uo1xM/pa6xbFlRZRX8+xECh52epoTtMFwxAJUVDxtfRkCWPYD4zTSBwGWDKqShoHLECuhWqVMZ6yA1CYpokYBS2Vmis380KuBiWQSvE5sSaPjB7BrqrEmPxlwNq2XwvFtnWJaSClRJnnLYIfQ0RrJS8LS7Nbr/cZCHrazhIDIbU6vAbJwjYfw26H1OL1PmNEYtjsK6xy4O3xBENQgWEckVJYSnZ7l0AQWJXghmcteE1ft+9qiqz1Vu1kx+uOMTOvbWu+V9W83m9Rd4SrXBtZpd5GlIQEd/TSiFNo37c9U81fxFS9FiwSXAobI7UoSy63bQSjaCHFgBBAIFclEVAiV9ln5PFc+Pgw89OHVxzMKDg3W7OfANTnuGp56l///USQKz7TyXe8FOic1jmtc1rntM5pndNeFXRO65zWOa1zWue0zmmvAjqfdT7rfNb5rPPZl8dnL0SAybg9AR6pbHUclyYFbU7OqvFvv/cDvvu9HwDwv/gPf4/9xY6bq4fswwBWsPnIZIn9MDGktBmWGeSc2Y/j9tmyLC793KK9ZcsqGMeRIhmtayZCa5YWvVmenTmUcznsbtohCLUU97whbM4S8I2v6sYogsRErQVRNquJIbLFXcUNb01EUDOX8h0XcnFH3pgR04qZEhGKKQjs9peo+saO0eO+Zt6cbhgGSmk1P824PtwQJBJj8ujsMDpxAeMUSWlk3O1a5kQAa4322s86D1UrVgWKYFpYmxLmPGMY4zhxceeOE5U52R1vjsyHIzkXDocDEoTdft+yEuKW8VBr3rIirMkPTZVpHJ2wgmzN9lbHUmomCYxD4vJyT6nFa8VimJi/bYgQkmDBGzZeXO65c+8uIUZ++sFPuT5ee73X1aE2ZzQMkRgnbFAYFYnuXKpWrm6unTARhjRgyuacro8zFM8QOOTFsxrG9dq1vWwY4zCgEjzird6AruRMbBkN2prtma5S5sgwjgyDk3XRyrDW1RVhd+eCeZ4ptW7rj0AcBkLwzA5rLwPjfsJ0RID5eKRUY5kLIRYkJiRGb97Y7NKKeuTevOJvKdWdcKuAGpMTC4DlVku1rZG0ArC1+oueiJ+5+gTfCoEksWWItBwG5zJqs70102bJniEgITKNI6UqObuU12CTSeeat+dXay8yVakIN7lSrFJ2Xlv3/nzDn733kCugABWgGkncFtyXyTbec8gn/N7x6qFzWue0zmmd0zqndbwq6JzWOa1zWue0zmkdrwI6n3U+63zW+azz2ZeHFyLAdA6jRcHdHQEuMcOMqoaJ8MSUYzv+X/3xn/LLb77Gb/zi13n9tT35eM0yXxMkPlOXUESI0etrllopLdq/Ch43uVlzWCEE9vs9IsEdQ/HGdapGLkuLAJ+uHUJgHAaP1FuTvjUZm9aKrZ7aGpnhzjaEAFU8Kg94XdVTlLG2GpgSg0d6xclgWTw7IIbIkLw+a86FmjPTMNDKs1IFJ4vgTdpKayioQDRvPuiRXmUYJ69lKdEjurVA2/Q3h8Mpw6K6I1IMwkBVoxTdpL0pDaTkZFCKO+aLiwtik1OWWvj4449PzrvV8MyqLFqpYt5c7uaasM5hyyi42O2YxpG7F3eZDzeIWWuqN9IEn56JgTvBnDPHZWaaRsaLHXEaiEQGG8i1Uqgtu0LY73cM00hMA9WUZT5Cy7K4e/dua7LYVr3ZT0oDQ8uIOJYb0jAw7CZCjCyluC0b1FwJk8snj0smL9WbPObCgpHnfCvLI9RKETbHa7WSYyTXjLKKSWm1RAeGcSTGsDn6NCQkBibBC8iK29e42/nLTMvoCDFh4teppVKapLc9ot/DvA5oLhVDKEUxzVippGnc5nqVnaoZ1YycFbNMac5z3ZOrfLq0DJRS2jHm2QshehbE1qBS1uyL9aXC//Z6rNJeDNeXNeMwz7zz9V9A1Xj3p++55LYqy5IJ0ev3rmrwlAa3a9zHFBFqiGiaKGHk0VL4k7/wF8urJXvt0xS2JoeYP/vqvJ918e7LvrAsgVsX6v8J9KKjc1rntM5pjs5pndOei85pLxU6p3VO65zm6JzWOe256Jz20qDzWeezzmeOzmedz56Ln4PPXogA08mln/7eIGfHGBSBGWGtjvnjh48wVe5eXvDma5eYRJSAmbTotW1ZA0JoUXFzh12VZVkYY/LamqreMA1O5wSvtRiJHuGELfLt0VK24w0otbZrmNe+JLCKYXXdNH5hzLyupDZ5n0g4GYvayTTa5ggbUZgbriqKkYJtNVnXjZpiavfycQkC4jUjaWNFpDXIazVHzaOzXg9W3KmdLcUacWadg7YoVZ38XJrbxhpbxBx30itxekoEiFYOx+PZ2njd2HU+aiNUMIL6RlozOqZp8ujwNGG1gBlDSqTQ6nNi5FzcuTayyqUwTgMhpXUBPMItSoqnbJMYAlECUYSqRm11WH1u3FnS5McWXFZsxpbtgXgTx5giaRigNTo0M7IVRAJqylIqS8loawKoGNkUbfU5Q8skqbWeHL1WRFpGi5w2uIg3hEwpEVNo9UFlG0+IXh/4GbuOPmbPjJGWoVLQ6rbdTA3MEHXbVrPNUdaiKBVaponhTeuUllXgRrbtlS0rqI3bm9+1fdT2wmZga3ZBOHNk4lkB28uBKmZsY63NXlSNpWTuvfYaqsZP3n2PUrzmatVWJ9ZOXsczEpodN6KpQagx8XDO3L8+cv/xFQALUESahHfF8367jZ/byZ/78U+8SP+PlxcNndM6p3VO65zWOe056Jz2UqJzWue0zmmd0zqnPQed0146dD7rfNb5rPNZ57Pn4Avis688wPS0kwdXderZh6qlHRRYjOao/OGemPL9x0948Ed/xluv3+VONBKJWlsVRwmMQ2zXjUhMFPWocQiBtRajRxoraZy2zWBm5LWO6iqVNKOoO3xCIDZDVPVNOs+VtG5EYfvxxzkZcYyRmhdvjKYKqrcip6XVYDUzsLrV8RyGwTeZGqWYNww0l636OLxBXqlKDC3CaqsU0J9/bVbmDxbQ4DUnhzS15watRjUlpdEb+rVnxNjmbZ0nz7CwU4S9reypvmbYxr9u7BDSrUyPVTpLEJc4NgcXY0SikAg0taM7WZ8AeAAAPrFJREFUtCExTCNaMpgRQ+BwuPF6lwKH43EbUy7ZGyeaO7p58YwQzwpR1jqZAFoq2RbKUryOZ4wYwnw8tNqsA7txt821WqUuitWKYMQ0Mk0DMSViy6RY52U3wGHO5CVzzJk5L5ScqUshiWBBNqc5TBNpSMTodWwluFMehoFpmlza20gkRq8Z6wQoID7PZVEkFOKQ8AB8exkpxVsLhtCk2iOG2/pxnlvGTdzWVqvvtrU53TAMlOrR/6VWrL0orPYE4sF+g2GEUiolZ+Z5hrY31myeDQIxJAKC5tsZQL5lZCO0dWzemLFSSmXJhdxqqvq+rbzx5ltAIKbvsaxNM2OklNIyeM7IEvE6pgpVIlUiByLffe8D3nt8xdIOrSFgIXqDzEZkIQicvRTdcrsbqXwOfM7/Dun/2fJioXNa57R1rjundU7rnPb0Bb7wwzu+ZHRO65y2znXntM5pndOevsAXfnjHl4jOZ53P1rnufNb5rPPZ0xf4Yg7/ygNMtAj3pqvktpNfo74AGKRh8MhwM7rC2mYOrlLicjexR1kef0QIYFIpuV1b/F4eDV2jrwPBDC2VUhZyWJuBmTeqW/9Wr11qGCG44+cs0nnu/Naf1ehEgkehW+3FNbobwuRyN1Xqskac9dbK+aYbEFmj9x4lr6pAJIQBmmEDhLg2TBOvk4oSRU5SuiBMuwsaXxKDG341IUp0qan5cbvLC1B5lonxsZRVhskpur1uwhAiqxwxxsgwjI1o3en756f6sGsTQK3eqiylRIhxa2QXYyC2+YviGQPLsqA0SWuTui5LEzq3zSUC4zSy5BEEbuaF3TSQ0ugS2FqaRLhlEhAJuFS39VekqjGQMDWCCjSRaAggFkj7SK1OOON+opSF5XCkFq81uhqySGDJmevjgYdPHvNb/96/xwfvv8/3/uIvuTPtudhPDGvd2ZA8gyUK0zR6SV28vm6IgWqVuTq5S/Q6ojGJ17Bt0t6i9cz5yLadqplPTFvH2jIEcs5YhZgCUdw1FPMMBjVDq2eDhJgwLUgcSDER03jaXjFiElADQ8m5UmpuDe/kVk3b9QWgmhJqIA0jJsKiR5cli2w24jVeE6HVNvblbRJwE0JM5OOyrXtKI9/97l+csizWyH976TlvZmmtliwGeYE6RJaY+NMf/piPb2YOwGG1e/UXwzgMfu9aUW0vKeLZN09tlGc3z18Lt916+ISjOr5KdE7rnNY5rXNa57SfDZ3TXnx0Tuuc1jmtc1rntJ8NndNebHQ+63zW+azzWeeznw0/H599eoBpveYXHAh77uU+MxzWDHU10PapPvVDcHnklqIgZ0c347vFIwJip5tby1Gw5tRp0eft7+08zxQ4zzpYHfwpYUC2e2zPt54HTnByajZ2GvPtyVj/PNmMPWVATx/vG8HHDII1AnFnIy3Su17czq6wXla2Z/zkhTmfk1vPf+t6JyLwz583VruVZbCOa81CkEBzevGZa/uU2XaNcwni+fytUfT1nG0uGkGdj3t79m127cy5nK+5bNfeFrXdR9W27In1xYGzsVZVxtGzNNbj/AVglZSexhVEiOIzp6HVCFW5NcZ1bk/jaU8jdstWnxfVPp+/7UpPXdu/v303Vptd56Q9/zoOO99XdlrTdRybfbU9tv7t596e6/XcbUFv7+Jtz8Jp2y/LcrK7dh/bfj/t07OJaPbka7jk3F7uTnfb5LS3NvU6ptNHwlNb9NPwmb7v+SfIrb86fiZ0Tuuc1jmtc9o6b53TOqe97Oic1jmtc1rntHXeOqd1TnuZ0fms81nns85n67x1Pntp+ewFUDB9Bm45609xOnhtxVILNci2OHbuE8/t45MWweyZr06b9WmcOYNbm+ipTXJ+azOEk3M9Gbcghm9M4/nGfuu+crr480b21IbZxmGGtcHIU8Z5+5nbuWbPvcf2rMJzncez1zo5uvPPecbJtGOaD9E2X3CqPSnbRuP0iayRaXtqPHJrjtexW3vAdW1Pz9McwtnnZp6tsN5zjRavz3Ii4vaM+hTxnNmwS5obAaQEMWAnf/nMS8g6x+d4ysXcfsG45UjPTj6z69N3T1337MPnrak89Zs08jmRpBucnb1cPXOdp16CflY8Pbb1ss9bRwHYZOjPt0175mVpHbFsRLzWMH721eFT8Dlfin9mfMp1v6xbdnxJ6JzWOa1z2i10Tuuc9jdxy44vCZ3TOqd1TruFzmmd0/4mbtnxJaDzWeezzme30Pms89nnveULEGBaJ1zOLMlOv6/OBl/sZVn80LUGKbCY8diM/+t/+/v8xtff4De+/ibfvNhzPBwZTLm3cyldCBCTO7taKrUakYiJbE3z6mooQIrCsmRq1TNpavBaoilsjfigScaag0gpIkiT4a2OCqzVfqxm1Fm5vLhgHAaGFKHsmI8H8rKACnFM23PfvbhsUsyFNARUvb6pNzzLBGmN01gJwiAKIgmrleMyE1Nom91ATnK6cPYMc14QYnNGhcNxvpVlkVJyh9qOrVoxU5e1TjtCiIRWN9Voa4XXwlyWhXEcW+1RZ7LjzbE5AmMpLjtVjArUFKlmPJlvuDvc5WIYiKPLPqNBUGVZjqQYCUkQC9x77S2ur6+5ORyQVeIYAuM0uVTSjCVncs6Mw0CKEbQyjeMmgzWrBFFCqKh4g7lcCtdXN9x743WsVq4eP/K5iy6dnPY75pwpOYMpy+EI5rLO+bBs86cm/PT+fZ4cbjho4d7v/DYPdxP253/KEowShbTWZE1pa6AntRKaAw9RSEMiaCXORwCmceJif8k4TYQYCdElwaUWiipLyVvdUTNjXhZ30k12CqeXjmFIiJwEkGIGaliprJkdIQjjMDAmv0+Ivi6qypwXcl4oValaXQatSojC/uICzuxJYkBqRapQqztvNRAiQiWEwNjWPKWBGKPLk1WpVSm5ej3ZUsm5kMZ1nwd2F5c8fvKEXAomgRQHVidu5nV+VxKqujavFOq4492Hj/jxw0f8ZK4sAUoE27pqBpBAbXMYQtsv7YDPR19/ffT/aHkR0Tmtc1rntM5pndN+HnROexHROa1zWue0zmmd034edE570dD5rPNZ57POZ53Pfh78rHz26QGmn5MVf77Tzj37cy5iYJs7ls1gtElPTeAa5QePnpCr8dZv/TpmgaCKDGutRkWktFXza6i2mpYE0jgSZI0nutNMKbY6n4FiTeZqYKvRr9JCXb9ox4iBK1ERadHkFtUM4k4zCaQgDCEiQwCtiBnzzQ0hnqLDMQXMoFRDRAkRgoFVI8aVIH0jS4iE4KRk6rOj4SQN9GuG05S3ZnJi7nyXJXsUVbX545O4NJe8NSkrql5DU5VhGllln+t8mK11WxsRhUDOmVqdHFTXOpanhQ7RZalV4KCVQy1czUce1MK9/Z67+z0A90LgMiVev9hTqm+4FEfiEBmmkdF0CxKHGBmGiWm3R7V6Q7VSWJbFGzfWglR1p49nAbAuVwjkmqlaCRFyPhLqqVGhYqgpKp7FUmuFUig5+3wbW6NCM+M4e3M9LRUtmfzDHzHfv8+cMxd37pJD2KS6aRiRVg9WdaaYgqnXkg1QDWqzVROfu1wKEYji+6JUbyIZ0uDZD+Z2udbW9eh8IOfi2Q1qLPNMCKeGdpjXnkUiNVRiiAzDgJV14wRK8UZ2pyyJRtnmsfgYAyKtEaSetvZp5YUUB4p65N9EkNZUL7WXEKvVn0frJgPOrRGgmTfq8/dDQatxc3PdzDtSVDGtQEBMsGrIWRVRC0Y2uDHjsRbul8oHubJEr7Ncz933+uLZXuo8/cduua01U+j2SZ+Cz+UwT3kN/T9afg50Tuuc1jmtc1rntFt+qHPaS4zOaZ3TOqd1TuucdssPdU57SdH5rPNZ57POZ53Pbvmhl5HPXgAF0xnORy7P+ew5x5kIhmEBFuCjw0IuT7h/zLwdRsYk6Ga0BbFKkNM1am2OXdqmtHL6UiBGj5yagahtskLjOUMzQBXPRfBxySnFYHNqQQJDikTxGpcxBAKCpYTVwtEULGydtEIUpAJt84h4wz9T/xdhc2wSpI33VMNRMYIXFPWmgJye2Rsd+jwKLg1dN/1pLVomhPmzmZ1kgN6Ibn3ek4Fbu1YuhdCaDaoqWhXVSq1KOGtyJ9FrfFpszzMmJOyY4mtcP3yE5oWlRWvTbmKfArtp5OYwN0IOIH6dOAysPc9iCISUGMbRMzlM0doi3KUgtVJiwOrqUHzchlFDpNSCiXGxG9HW4DGEE5mZeuPAqoqpIo0Aa61YNTy47VklyzyDOgFoqXz0F3/Jow/eJ6ty7xvfIF8fyLM3XdQYsZi2LJd1LVOTtuq2iu2dR8SJJvjLS1WjVCfvIQ2rBwIzUlrJBwxpWTXemK+USgynbBpBPMsiCaEUrx0bAhL87mp2cvTNdtgswZqtel3XYurztA5cQnPa3pBStVBaYz9ZX3TWLBytfu46741kDdpLhngjvHbfXAshDmzyZNVmpX6c+w1Hxjga3CB8OM98XApPDGp0QvWtICfDPrNyu/X32SG3cgr6f2L8rUXntM5pndOAzmmd0zpeCXRO65zWOQ3onNY5reOlR+ezzmedz4DOZ53Pvli8WAGmnwMiEZ/wytFaw72c+S/+9e/zn/z2v8/l197m8fEKgKiV0QoXCTDBTDnmA8OQSCkyxNCin+u1A0FOSxZjk7WK+Eariq5EoYaoejZApBm3n6dq7jh1va6QQiDFSJRIlEAKQIpo9cyFqmXLPEgpUrJv5MPhQErJjxHQWohpYNrttnGrGfOc24YWlpK5c+dOk5oKW3M33D2rKYIQJLDb7VqU3zxK/5SNrs/kTfCkcZh5tLaUW5JIM6PWSq4LpkaMEa0K5pt/urjYshuGaaBYYdFKQXnnW9/iG//gH/Cb//N/yv/rP/1P+cF3vsMPfvADAO688wu8mUYKtmUmVFWgoCaIROIQtnEShHGaENMmKXapskegg19jjbQb0BxHKYU4DKQhkpIwjqNnSkhz9BJQ4DjPWLvXsNvz5OqK+TBTcmGYPdPBmsMYh4mqcHVzwz/7f/5X5CjE1+7yH//v/vf80b/4l/zwO98BIIcAtZDnmaDKNCbGccCsYurj3ZwYUK1AjBTMsxRKodppnQKyzXVKqUlDlVoKObuUtZoSh+RE1iL4KSZCu08uGRVjKaVJO2tz9KfasNrsO4RAAhYrtHSazVbM33tQq+RcyYvbzFIqxYwaIEikmFEaua5SUMSd9dp8UQS0GrUquR27WnYplZWy1Ayxiih+LLJlCDw4HrgRuBlH/vgn9zmYUgIslZOPbvOxLeTZnTo6vkh0Tuuc1jmtc1rntI5XBZ3TOqd1Tuuc1jmt41VA57POZ53POp91Pvt0vLgBpufMoTz7kUfoZf3W62gueBD+3/z5d3n0+DH/2//ZfwTAcvWI+eP3QBePMAvYvHjkXZWllC1KKUCK60C8OVlI0aPf4rU3z2WWq3GLAM35uNzUjdFOQVF3NlrRHMhNsjmmVmNyHNntdu1J/PrL4tLHYRhQLdv9druRtZlYzl5zMwQfY4yBGAVp0kJwKWGMa31Jb1C3OjqtSi2VmBKDbyWqDrdIwdRrPpoZsdXBjLW6A9d661htEXX/zsmu1AzmUeMUYNpfbBJRtUpEmNJIutzzG7/9u7z+638X2e/5vf/N/4q//49/lyc/+hEAH/zRd/j4/odMMWBFETVS8Sh6aFLduo2jsixKsEpsji5IYEyJaBAjTMNACL5AwfwaIoGxKMUqCAzD6Atop+wHrUoxwyoc5pmq/ryHokiM7FJiGqfNnI+lcrw5oCFy5/IN7l7seZSPXEdg2vHON7/J1JzVX/3JnzLPM29dXLAPAzH4etVSGYBAYJRVJm3MeSGMI7U2uSg4SaWBcUxoOWWyaCM5l1qKN6k0JQ4DkhJxGDz7ADwrwowYImm3p5TMcZ7dikUwEWKrhWoYYurZAG39h2HYpN5UpRTdXqa2xogBtxHxMYkkzwIIkdK2mNEyYBqp6Jmd5VpZlkxue8GJz/MZQhBScoKkumS1AEcL5CZ3f5wrHx0X3js8IlelAHlLpWipOvXkclaR6/pecHICT3/Q0XGGzml0Tuuc1jmtbfPOaR0vOzqn0Tmtc1rntLbNO6d1vMzofEbns85nnc/aNu989tfCCxFg+pyVA5891vwqNEMpBgX46Pqay48/5vvvvw/ATpS7+wskC2aVqtV7Z7VL1HK2mggpegQYDASPqIobeBCXBK6bJ0ogBFpzutKeyY8zwMKJpsLaKHB1jMEoBGJrtOYZAGGbiTUqv0aCpdUW9Q3UJKPVN4CIYHh2xXr9GCNrUzu/f9ii0FvNS9iaCsYQvX5p5ZbzrrUg6hkNInJ23Tb/7drrs60/Ir7RVSvjOHkGxebgfVy1KgQhpoHd5V1iSJ6dkQtvfP0bXBB4s9W0/ei73+PhvHAtgVgqyQwIhKgIgSAnp4bAVq/SJ4ggQowJQRBRJLljB4gEgvjzI25IhnoWSIBa3IEBFDOyGsdSkTSARp4cbjARhuQReYmuvVVVDvnIrIpKIE0D7/ziL8Gjj3ny6CM++slPmK+uPKMBUC0EUWLyiDnqEtuwPpSCtTVXgaKBMZh/Ll4DNsboTRhFNue6ZndUs1NAXMSfOXnzQIkRa+tYWz3VlirRslQWYnJHLEG20roYrHVKz2XZoe2ZLfNhdd7mRME61wgmga1powTq2pyvZSCoWvt9tXuX2RZVlNP1awUT88/UsFrRWtBaqTGymLC0az+cMw+PC0+Oy/Z6tfmg8ze01ZzWfUlHx/PROa1zWue0zmmd0zpeFXRO65zWOa1zWue0jlcBnc86n3U+63zW+ezLw1ceYBLWyNyzk/e8iWzxXtZmWcVOG9jXVFGUBW+U9f7Dh/zn//f/BwC/+au/zH/8H/w24Qj5cEXJC0G9JicGpVY3SgTEiGnwDdI2icsUQ1Ocrg7F/4rJG4MFjFoMk5NjXp0fKm2oXqtUrTYHY0hRSvHnizESaBkIZoA2w1ZCSMToUlVVdVIQxcpJIkrbzDlnOHPIZidHvBKH6irNbZLRYXBHT8Cs3iKCnPEaovVEiOvmXYlnPX4d3/X1NdO4azJL47XXXmOaJsyMw83NmUMW0jgwhsjdO3f46Mc/ZUH45t17yFtvM8jAsLv0aczK9fUNj2tlj9ditWAMFgitGuZmTyKE6FFoxaAaIpE0CJYCpbqkcz0rtuaDis9PDBFVocyZNA4suXB9c/D5MFjMuFbjF37pl5iC8O5ffsyd/c4zJCRskfCsyvsPHhCGiZRGduPEr//WP8S+/z1+cP8+f/bP/yV1OVAXr4UarLIbE+MUiQTyMZOXhYtxQksl18pcso9ZIkkFiYEQAfPsEc9iEUqTtq7rXmtlyYWqSoiRNAxYCMQmxVVgaSRS1Ou9mkE1z7a5Ph7Y7wIxeaSejZzUGypyehGA4A7dvO6w2kruAsGb3yGKibrTRwjJMyBAKPVknyjkrAgKLQOn5EKpSlWQdGoQqKWirXBxVW9uWGum1IxNE9ecHP1Pr2cezQsL/oKom28PbPJynsZT3knO/rXnfN/xtwad0zqnrevXOa1zWue0jpcdndM6p63r1zmtc1rntI6XGZ3POp+t69f5rPNZ57MvB195gMnx80/KNq8tYuhOOpBNuQIWg6OrO/maRa7SyHKciUshqbEDX3x1ieq033mNT2AulRQ8yqpWmXMFKU0eGlqA2u9eFUL0JmQheE1Vq0rV0pwvBJq0sDn/KK1pmQZK0S3CHxvXuPMFkUAah9ZQTM9qmvqGGoeRcfRaqKpKKfUWwYhADIkY/frnjlpbVsC6BFuWgdHI6NTUjRgIkohyIhRrjdq0qE/9Gs0FhMh+f4mpsSxLc+rB16cWwtoEDrBghBhQzXzwox9w/0/+lJtcmf+z/wu/9Tu/wxQj0mS6h3ffZ28CpVJbsVarIAqoX0c2woEhBUrNVHVp6XxYtnkJUcimp/C2BGpVaq5YhTEGIoLlQintuaJnNJSSua6Zn97c8Pd/7x8z3bnDv/vedxETjnPhqlR++9//bWKKPLm65n/4/vcRU6YUSa/fYf97v8vdMXDvT/+Eh9/7EVEKoWki711OXOx27MeBWgrDNDIME1TlZp45lkxtji0MA3G3hxj8pcQMxLwGrb8nMMpwsr0U0SVTSiGYUYXtxQmvmrr5rYA3VJzLTC2Z4zJzczwwjDuCeCZBjGmzPQWseOZCLYZSmh2pZy9sLxY+32pGVWWpxc8VYc4L1aCasazZPer7qeTiYzNDzDjmhapegzVyivqHkLCWYVNrceedBmQaeLhkfvroIY+ObgdXi78YzkANYLQNaGzXO/fjnP3+fM/1Yjn5jq8CndM6p3VO65zWOa3jVUHntM5pndM6p3VO63gV0Pms81nns85nnc++LHzlASbj+dkDz5squ/Vbk2a2/zdoMk3/2hAKXtN0aavzkwcP+W//8E/59bcvuBsiKU2IFsQUEWMIgXWNMba6p8C2YRDZNoKcrbqgmHpEWoKxykvNXNqoqsQ23WvDMz9OELwWaYyRFCMSgksp2w1qLb6pkqD1VMdyy1DgdobA+veaLbA6bzkf8Nl5J8d+Ot8waKSwbk7DqNWbs6mqNxBs10ghbhkS63xVNYbkGQR+rnJzPPhGVmU3Tm3NjMPhwDhE0hCRlBhyIR4W6s2B977zp9y9e4c7F3sALmJivLykzjcQPEugtvtBJZwRsIhggmdVNHvweqEVw0hDBLFtLY9l9u+rcefiDpoLWgqhKtUK2Yx5laoCZoFHV9eoBGQYOFQlYizHmcOTa/6jX/01xmkkPHrE/t++TrGKDJGiihwPcDhi88Jruz1TNIK0PBUphFqpOTOOO7JVSimIiNfrVCOOTjhxSJ4BoIo2ci61tlqjbrcatK3PajvS6tlGl7y26H6tFWlSZXBna+o/pfg1Y0xMux0xeWPItcGi2tr80LZNtNmrWxS5rC9Kq5Nfpa1N0o3bUFWlNikq4HV4q2c1rDQkJr5/xGXk53Jtz0RQDKGaYDFCFGqEDx884eM582Tx4xeEClQMXeWzm6PnjD5Of/t+6Oh4Fp3TOqd1Tuuc1jmt41VB57TOaZ3TOqd1Tut4FdD5rPNZ57POZ53Pvlx85QEm+IwJe2pW3YTs7OuTI5YQwFyUiAhqUDBq84fvfvSABx8/4hv/5HfYTwMSBWogaEYwgnitx9VZpZgISPOAp3HGGBofGK3TXxuFompbLN0a69Tqm3SVP67Od4upmm11Lb1Oqks+V4lprQU41e9cN9PqzP28U+1KuE0C53LT85qq63mrAzjNsTUJoUtZ67aR/TnmeXbZrHg2RIwujQxyGk8plarFZbdtLEstXDd5agiBcRyJIWBqXF/fUKfIVAf2+wv2IUGoVDU+/uGPCG+/xeU77wBwd5ywKDw4XvuztDErCiouV1ZfFBGXWOZSCGLE9qzr+NQiMclGUrVYc0TC7uKCm8dPyKUyGNRcWGrlWDyjQYcBJPDk6pqS3YEd27EPj0c+fPCQi299i/3FBfbgYy7ffIPD4QYQSinYo4fo48fU45G37t5ll4QUfP4eP/6Imgt5ydy5c4+SZ3Lxl4GiRjXbGvnFFJAoXofUFC8X6hki7qRBxe1ylTxLCCQJnn1QFz+nkXFESIO7hmwnJ1uLv8ykOLDbTW2PGSW7ZPbcVlb70up7cd07OVdqk75ac8S62Z1sDt/r6J72Ym2S2eYAtutLq6MqIpRycvQxBtQ8EwICJEFToAh8fH3k0WLctA1YRagCaqex3Hbtn8Olv0zev+NLQ+e0zmmd0zqndU7reFXQOa1zWue0zmmd0zpeBXQ+63zW+azzWeezLw/hsw/p6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6DjhBVEwNW3oX+sKXifU1pC/+KcqcFwzAwBR5f/7x3/J3/vam/zyG3d5e4gkrSRRhgAhxE2DKqFF+Q20SehCk7EJp4wBoElSPfKZhkAI4sdKwKIiBlHW2qQBYZXo+TliLqespZDiBdVsa2IWY6SUwjLnVi/UqFqxnAkpEGphOYvmqirLUlDz+qrTboeEQJA2pvOZWyPLiD87rRmauTRxzYIAqFq2LIaUEmMaGNLQ5I1hu397yG3cVT1ivL/Yk3OlVP95/Y03maaJWisffnSfWgu1KGU26k0hFOXumFjMuLcbubdfa5DOZC1M04iJ+eobzMVAK2bV65uu4wiCmJJCYIyBIQ4EM9QCcYiMu+GUiVGUWhUh8PbX3uZH1zcclhlipFaXVrZgP4EAEnnn9bf44b/7Q3RIPL6+4e477zC9M3Lnzj3kzTdgv2dIgV/7rd/ix9/9c46Pr9jNmT/8r/7fPP7oI14fB6Qs1KqeBQNEEWIYCDJwvMnMcyVnyEUJYWAcvREggBjook12G8/WoclWcwE9y7pRxfAsgjQOTCFsktFalVoq8/Ho1ykuSQ4SSCn5MWqUqq25nmeWrHO9Zqn4cZ4NgDSZalXmeTmTtrosm9bgsJgfX80bW1rbV+v+UlN/gmbCEgISImYup7ZaPG9ABK2LZz+YK8xrdBnx47zwMMNRobbrLKZUziWqsj3P2mjztre5/W9Hx9PonNY5rXNa57TOaR2vCjqndU7rnNY5rXNax6uAzmedzzqfdT7rfPbl4YUIMG2Qzz4EXFJ3+5QmVrSnpl4ExeWqfp4f+f0PHjAiJBHufe3NJpGDiCFiTQorGNqkcDSjw+t/xvXepxqaa73PVda63iyGgIUIEZcGno17dfK1VlLwWpYmQqmVtNWU9E0PUJvEcB1LqZUohlIxK6f5MaMUr1caYmTECGfyvnYQcKpfKSKkJoNdpauq7uhrc96lVNRcejgMA+M4MaaBY3MKZ2pXf/YYWXKmNLlijInYanaWWrlz9y773Z5SS5sPl0QuxwOaDVEYQyBIYAhCbE5wKQuas8sR1ztKIOfFZZzVWLT6ErTnHkJgNF/fi92ACpgVQgyMKW2OXsNaU1PY7/eYwFIKQxAnLG0OnuY4Q+TuxR0++vG7HLSQVXnjm9/kjd2e1y0Q7t6DcSSY8fpbX+OjH/0Yro/sFuXD7/8VlguXU6LmGRF1uS00hxkIFjgcM8e5Mhf1ep3VHd6w2ZN/ZqlNvBm1VJcoW7MztW3frP8vuFQ5RFCUWoq/KFSvzQtgtdlgTO4AfUJbEzt/CVhfBNYav+D7ZCV9l9Cak3zRzdFbm0MRlxlX9WOLKtGsOd7Ti5SaEqRVIhbBJDhRVH9hknVPAlqL11I1IQPZhKtc+ej6yHU1sp0cfdnu4i9mrPt4o8bTrHFrDjs6PgOd0zqndU5rpts5rXNax0uPzmmd0zqnNdPtnNY5reOlRuezzmedz5rpdj7rfPbF4QUJMLUIHnDLXXzCbPoR9tQnK8Lpo+DXVjs/D54Af/bBxzy4OvDr3/429RgJukCoKBXMnX0uM0OavCFZdscaBGIwry0Z2CLzHtX0iDZoi4B6zdOU3MHF9oyrc3dneopUDoNHtJdl8Wu2qOxuGgkhMAyDP2GIgPg9TKlmaL09WevmWx3YugG35mXN0ZdS/H4GwzC22qURkUDRStFK1dZcrpbtWheXl+ymHTFEblp90/Maq+tmlBgY2jUR4eLyEjVlzpk79+6x2+0opRBSJIhv6Kvra6Y4em3aWknDyHK45qP7fn+rBbWKRGHa7QgxQYwcDpWjZo61cjybV0TYp8BokFG+tttDTVjNgKLVf0SEKU1k84yBZV7ckWmlaKRoBWJr5Aa0+07Tnoc3N1xpZrp7yT/5X/8v+dqv/l24fB27ObijnTOHw4FdTIy7PfvB4Dijo2d8XD15zDQlUgzbugkBFePh4YZDhUWNWQvz4wckMX7x62/745nXrtUC1Sq5VuZ55uLigpQGZIi3amEakGtFtXpjwDw7GdupWeK6jqUs/nIzCvMyk3OmVHfu580cz+0VaPVo/ftaKksuzLlgBtKyHUIYMBFUfTxr1L+UgpqdGkECqHrGgER/uRLx/SbijQElkMZTw8eqylIrh2o8LsZShfeurvnxg8ccDDKc+YUAcu6D2ixZXS2ZNcNj/e7WS81z8NSVOv7WoXMadE7rnNY5rXNax6uBzmnQOa1zWue0zmkdLz86n0Hns85nnc86n305eEECTH8dPD2N7mBBT7/eOlrIGNfA+8cj//m/+O/5D/8n3+abr99hkEo9XmHmjm0SkJCIUQgRLuLgkUyBqi1q3W4/hISIO+6i+RS1PRteXB0EtEZ53hjPTIlxbEbtJ5w3LTusF7EmJ1TfpAGXjEaBtZ2ZtghuCLFFiQN5yZ7gYN64byUBz35o9zI4LgvSIv4pJsIYUQFCy2QwI8bAuNtxnGcON4dTtkGpiAQu9ns/VpVcPNIco5vZPM/UWokpce/ePb73ve+xyhrH3Y4hQBRzQlMwE4J5o7jjcuTmeAPAfhgYU2SKA7s4ogjHpfDaW2/zq9/4RV7/xjcIv/LLtAWBZSEQePcv/oK//De/z4Oba3ZUolVymYkxnhwbmePxyOF45MmTGzC4d/culjMmSkqR3YU/o40jOUYOD675jd/5R4xv3OWHD+4z3rvwtTgc+f6/+QO0Vq4+/oh/9l/833h7HHh9P7F7/S537t7DqlJy5nB9zXLI2/qHkAhpQBL81Qcf8Oa3vsU73/4VSPDdf/s/8Pjjj7nXXgjGGNkPAxIiaEWk2XlVFP87F5euYrDkTEyJkCLHefZMEQGJQgiCVryxHRCmSJ4zHz34kFwqISWm3f7k6ELg4s6dZntKXhaWJZNLJudCCNHtoBpYQLbXHT9fzagYT65vGMcdaRiYxoQEb5qYmwSblqXgLyKtEWDOpBSIIaK1kZN4xsf1sWLjhCBoUb773sc8XjLXBplwy22fewdMb7mU0wulPX3kCc9kQD39wYvg6jteHnRO65zWOa1zWue0jlcFndM6p3VO65zWOa3jVUDns85nnc86n3U++zS8WAGmL2Q+7Nk/27xbi1RWPJJ4o8ZPHj/he+9/jNbKL795h2mYaGJQalla9NiTEsSLo1KrUrQiQTYZqbUbhRDQYqi51DRwiqzHZy2CEDyCqapnjv627HaVj/ozGKq1RXHXI07RXJoJb1FeCX796tkNqraRiJ8qjXTcCagpqBKiMcToEeE2lJgiIbp0t+Ts12zjdvIQyuj1SrWdpGYtocPvk0tBzM+5urryuquYO+UQkABJ8Oh1k0Ya7gwtN+ezPpcaqKGmlKUy7GF3ccEbb79FfPttnzMzqBXCwMOPPoYYmecDASVRycVJeYuA18KcM7kU6s2BaZqIQahmpBhJw0AanbBriphA1sIb3/g6b/7yLzI9eYukih4OWBQe/vhdas5cP3rI/OgKXr+DTAOmlZgiFgJqxjBN5GWmtmyNXApWDZuNuVR2b7/F13/r7yNJ+NEPvsfN44eUZiPBoIoQELIaSynczAvEyCBCiglknU9DgoD42mgpXndXxO1e/LvVwZVaWPLCPM+EYSBEf4HQM/tcbdXM4+1q1ur1WpM6+99GIMTUsm2gWqunWqvXERaX5yJCjAGrp2uHGAgx+p6L4veoBdLgJCWBfF7/NUTCtKdU5cNHD3k8Zw7qNU913Ycb45j/CDzPCdmnOaZPTBl40XIJOr4SdE6jc1rnNOic1jmt45VA5zQ6p3VOg85pndM6Xnp0PqPzWecz6HzW+eyLxYsVYPpZ8ay/dNgzvzz3kLX2YYut8j9+/8c8ePiYy3/06/yd1+8wRHfo5eoBRQtWC/uQUPMakSVXMsqQEmE4TaGIMKTEcT62xnuVRc/Goqc6lyGEFhUNhBAoSzkb4bPjd9le2Jz7SgmqRozCSf4XvJ6lHrb7hBhbtoA3JTyfyDWKvtY/XUrxzRwj4zhS7SRVHXfTFvnPtRIQYkob+dVab8lpY0w+UPFnHseRpckQl2VhXhbWZmoSIEQf/xAnUG9sGGNERRlKomaf6zFEogmlZMqSKWbkpZBvbsg5o6ViDx6sEwfjiOwGFM8CWZaFYC5LLlogihOYGcc5U9WQ5E6lmmIVqlZ2uz3DOG3PWPEGhFoLd772Ft/41V/lF2xheXLFMoNMhcc/+gklZ5aba97cX3Jv3LELEV1c6mkGiHB59y7X1wFdvHFdmWeWpXBcjlhKvPGtX+JXfu8/gGj8ye///3j403cpzbZCgMW8Wd1cCofjzMMnTyAmLkIipVMjRcDrupq1bI+MiniNXQERJ/f1ZeB4OHK8OZBz5rW793y9m91Jy6pZm+0Z7UVhXVCEXGpzrF6l1G2ikWrJ1FxZ5oLhY5AYqFUJMRE5Sa1jDMQheMO9CKEaWjLaZNUIHNY5FUHHibC/w+H6hr/64AEHICN4/o3vHjlP82lZFqe9J3ymg/4kP/Scw756V9/xQqNzWue0zmmd0zqndbwq6JzWOa1zWue0zmkdrwI6n3U+63zW+azz2c+MlzPA9En4zBltXgeoLT4YgQfA8dETPv7//AH/9Ld/k8vdSBDh26+/hcxXyHJAmpNHYNxHgg7uHNv14jCSQkTMuHvvHqUW5mWmLIVaq2cKbI3UfHMmZXP0EsPmcAHWRnzgEsBqgJ2yDWIQhnEkpuQjaPtrdbB3Lu+ShoEgniUwTgMSXNCaltOyh9TqTbbNnnIGaxHdIOx2O6ZpamNax+8kde/ePe7cucPjBw+JaSDnfBq/GkuZmaYBVWPJR3LNLkFU5f7999ldXLY6lj5uNSgKu/2eqL4RTZT9bg9W0SYh3g0jWgo/fffHzGKkYWDcjzy+eswf/Pf/mn/93/xLfnT/vo81BGKaeP3OXUaEoRZEF9IYiEMkTgNvfe1tpmnEgJ+8d58hRoZx5HCzIObNDu9dXrR1Sqw97pLAPg786jd+kfK97/Ojn77Hez99l/fe/YCajcDI8vgazAgYv/a1twl5IWTD6sLD44dMl5fs7t7heJwpFdZavhISaguHw4EbU/LDB/DgY3jnba6fXHH//fs8aQMZxoHpYs/1fORiv2ccBh7Xws4MqZX5+pohrBktbGS/Eo0ImCp5ru0lQbcavTnPqBXu3Lng7t27SIyU1giylEIubufgGS8385FpmDyrQIAQGZLLaJdiHOZWdxWY8wIhMu73JIyQEusuUUBSZNzvfH9FzzJZ8gwoEow0QJoitcJclQfHo2fQpMT4+lv8/nf/ivtPrpkFZnNifn4V0+cJV78YJ9/R8ddC57TOaZ3TOqd1Tut4VdA5rXNa57TOaZ3TOl4FdD7rfNb5rPNZ57Nn8JUHmNzwThP9adN767unJ/pnCtVZOzHgS24U3OmrGSEX/vj77zKlyJAiX/tHv85lSIzDRKwLRdVrN9YKLWKf12iuCRojQwikISGhSUTbtc2MstU3haCGNScf27/bo8mp6dk28jM5YPuFvGTfkAbWNs+anbAqQBE/RwgEcUJZN7G1/7lc07MCbJtbI+c152Idi8tjzWDaeUQ9Nvnm0Bzl2pDPmh4xpoSoIWrkUgiEVqs1UHJ2ghFpZOKZDrVWTI0owjgMPtIAYj5HuRZKyRADikEQxjFxUSeCLJgW7o3j2Xor8XhkGkfu7CambIRgmMA4jb6W0khwGpEYicPIRHRHbxAleGNFkVO0HEEwYq384E/+zO3heCBfH4hxZDddskuyHbeTgTnPlGVhKW5DS4W5GsXqLalqqRVV4+LygsPxwPvf/z7/9p//13zzN36dm/sfUnNGWt3Zgkts/+Hv/g733nkHUuS/+y//S6oYBSPJSVoqQAziUmNV4pAQ8Xk01bOMk83wEISUBqoqYl7nVrUyLwtLXrYx16qUWolBXRZuEGL0xn65UrXVZ232p2cvNkspJAmE6HNcVQkxnBobnr3NrOeHlFCBGoQaI3WYMIOC8cN3P+Cj6wPXtbIAldNL2fMd++fA85z8p/ifrzqLoONvFp3TOqd1Tuuc1jmt41VB57TOaZ3TOqd1Tut4FdD5rPNZ57POZ53Pvlx85QEmeHbe9Knvzr+3p3/5OSJ6J7mmteii3zMY/MV7HxKBMUV+9+//KkMKDCFhWjHJ7uhViXGgqlGyG3moisUIw4ikyNrIbvPLnMn4DCcAVVJsFVJFOLn6s0f8BIdfDWwpRK1e5rIdVpsTDaRb2QjWPHg4IxUzo2hhyQXDSOZOU/B6lKWcmr+BZz+s2Q5pGLasAKKTRFQlb47ejwvJnWVojn7dpCklcvF8DpfgnmLcqopVRZKTVsUj17T5q+qkuckrg0uEL3aBFANDDLeIUWslaeBiHLhzsSPMhlmGCHEcvKZoVXf0wwDBpbrTbvAsgJbVQJvntRGdrX8vC++/9z7z4cjd/Y4YArt95DKJN8BrBwc15iYDXZZMNZCqxCUTx0QpM7U1QlRTYhq4vLzgYIUH777Lzc014fqa+cFDkgR2zdFbjMSLHf/gH/8Or/3SLzGb8d/9s39GFX+JSTFiW2aGr2GtlVILafSMGGm2YmaInbbVuioigZILJoGqSq2ZeZ5ZcmYtbqpmhBCpLUNhlVF7PdWKEalavfYquHQYEDOWXCAEUnC7WWvorg0q1eqJIFbSShEVoYiwiDBLRIG5FH7wwYdcAUv7Weufnrv7FT/TO+Mn+ZlP8OKfWj+145VH5zQ6p3VO65zWOa3jFUHnNDqndU7rnNY5reMVQOczOp91Put81vnsS8MLEWD6LDzt083O/+BzO3sJ7bzVCcdEwXhcq0dagcHgp1dH6iDck8pFrYQY3dup1/4EjzCDN0cry8KyLOzqDsOdLiESQySaUWs+ZQJswWghtPqS25DOnfsqIz2bDFNFzGWzgtc6jXGtUemR2pIzIAzDwDgmcs7kkpF8mixr5lireo1JCWBCDF7jMpdC1bKNeZpGUoqECEuZqTeF65sbd8xihBQIKZzG2bItxnH0DIe4NuXzjIerJ9dUBTHlcH3FNAwMw8AQ/Xgz5epwTZwSqpWqfu6d/QUxBB4WZTclhuhy3SFCCon9GInBZ3ONVO+TS4mDCOwjOc9ULdQAN8uMlKVNd8vukABN5uvpBIEyL1vmBsCyZOZl4Xhz4N4wEKeJe3cvmfOCxIDJTK7Zp8MMlkJGYUyM0473PvyQ+foxVZVvfOMblHKKyg9jZBwjd/cTgjFrZXnwkD/45/81Fynxa7/0y1y88RoAv/BLf4ff+O1/xMWvfRvZ7ZgfPfYsiyAQA+M0Ec4yBEx900gj/qqVqoqpEdbXjfWlRAUrxnxYyBqoZiwlk8tC1YqabfV00zDw+ltv8eDjhyxLpqgiahR12aoBS6nb3slmpCERQ0AV5jmTq7Lb71yamhKxNTbUYlQFYvIsCwwIZALXVXlwXPjOj9/zxn7gtU/Fpa+nPaWAEbffviy8WE6+48VD57TOaZ3TOqd1Tut4VdA5rXNa57TOaZ3TOl4FdD7rfNb5rPNZ57OfHy9FgOnLgJz9/yfBPuH389O22p+fdJ9V2/n038+7tXzyeNZmeNvt5dnvXwSc13L9pO9uffaJ17nNd5/n/mbWplK2z9Zbr1kb6x/P3OKvM43n5z79rJ8Ucf55fcKZzHPN7hA5z5J5dlhP2+JzL4s0+v/kifg8Q37m2PXl6lPGcR6F/zS7tiYv/rR72nNk3x0dryI6p3056Jy2DeT2d53Tbn3QOa2j44tF57QvB53TtoHc/q5z2q0POqd1dHxx6Hz25aDz2TaQ2991Prv1Qeezv1m8EAGmT1uGp7MInvvFz7GOq6/dThWBIGirbSpm3H/4mIu7e3ZTZEK26L/g9SQRkLWJWYysUf9VSucO5/aGvD1o2RzC0wZ9/vd6jVX+ZyJAWGOkn8gPZrZlFlQP2fvn58dwLmc1zwDAJ2eVt57OsFvXNjNCkO05Q/AmfAAWDK22SWPXn2EYtvu5RNUbCNZaGe7cYb/bUfJCCHG7n9bK2ugPcMlk1baGQgiRadoxH258VkWIZ7VlrTUplLA6O5fQ+no2R6Dbwc84TANqKV4DtFbUu+K1cRkpBMIwEGNgGJLXgQ2CtJ91jayc/jbg8u4dxt1EUSWNA8hpjYZhILW5HFIEC4gqBBCCS0ubdJMghHHkg5+8y3hxyfFwRGsFa8+7EkEbiNXiYzOXCNs607IO1M6cY/teFauVYkYptV0zAK0BZNsLaRiIMRGTYVTMTrV3q67Vd8/tyK/t54d2TbevqpVl8QyPUrxpZXsoML9eFuWwFJ4cZrJ5Qz3Fa5/qatNnZrw96xeBzh8dn4DOaZ3TOqd1Tuuc1vGqoHNa57TOaZ3TOqd1vArofNb5rPNZ57POZ18eXogAEzxnAeTsi8868Sk8c+qtYwzVSridDgABYgwsi9cWXVT5V3/05+Rf/SXSL7xNGkekHEjAlCJJAnEIpOZQtCSXiNZy1lRPNgfof1tzns2xqt1y0ueksDrM7fy2AWs1ag2YKQEjCNvm2J7QDAmB0mqP1lpdJgtYAHuq2Zg3D4RqilVAAjH5zggi0Op5mp02Ts4ZGQIp+nhXJz+s0sLamhICKSZSawIYmnz1eDwyDMPmBG5ubviVb/8Kb735Jn/yx3/E5f6CEP255uORGOM2J9ePr8nL0tSUkd3ukrff/hrv/fhH27Nf7FwurLWS80wawtYwT+JAsISaksuClWZ95lLfRGJI0R2TCGqV66srAt6k7nhzaM8cCSFysZu84eCQ2O9HilaKKVkriG4EqgZJDauVrMK3/+6vYTGQa2E+HFgO12h2x3ax37U6ucq9y32LmgvHkrk6HCgY2mSwcRwYXnud/+b//J/xtbfe5vLigvlwRPYXxObgV9IAJ8oQo89vEGoxr08aIhLAirFyiOH31VrJxyNLrRzmhddevwtBMfOmgatU9fLykpubIyEnSlXyophUCpVlXijaagQ3u6+m1AIhBmIjy2qKaGU5Zq5yXnctYEzThMQEAoeceVIK7z264gfvPeSxnSSoGZfC1vZCFaJgJpg6GXxuvEROvePFQOe0zmmd0zqndU7reFXQOa1zWue0zmmd0zpeBXQ+63zW+azzWeezLwdfeYDp2Rg1LaJ59ofd/ur28Z+0Ap8QXm/nrJFTNYOip3vK6ZY3Cv/uRz/l/sMn/NPf/i2+9dbXGMrC9Ufvsywz05iQyR1bHAVRoRy8EZw23xGCEGMgSFiDxIA7b7OMtgZ1YidHv9aW3GSXMSKIZxBI9fGpUWqhiiAo8pT5BomoFWotHqkeAkNKpCFRSvHNZkbOcwvMtgi3GUPbsHGM5OXUAG6ejxyPtmUEhNaUjiBbk71hGNoAILRaokplyZWcM6a2kU9ejq22qLHkI8f5wGE5ImPi0dUTAjCkgTRGrEJujQ2XuWAV9tMdzBLHY+HBwyuWIi3joHq9W4EQIxfjJXlZyEAQwWprOijCfhixUr0+qMAQB0Qjdsgouo2VY6WY1xPdDSPgjlsNihawgBWlPDlgQBGjYNxc+98YRI1oy+6YjzMffPQB3/r1v8vf+Xvf5v/0f/w/8Iuvv8Gbd+76PIZAFC+9GwMU82yQYYiMOlDmhQ8/uO/rUioPPnzAo/fukx9ds5tGvvHGW9yZdiQLLIcZ3VaYloEgIIGi3ojOQmSdNAtsGQ2SEqEaWipDGojDyDDtuLx7h1wyS85os71iysePH3IsC0WVqkIxWKqylMJcsj9Qy9MIafDMBvNGjEnEa8ha3bJBpNUJDsEzQZZcUAsUCcwkHufMowKPDMpZlsbSMoJC2+3WXmZCErTYp/iHn8WjP+/cl5wJOr4wdE7rnNY5rXNa57SOVwWd0zqndU7rnNY5reNVQOezzmedzzqfdT77cvGVB5iAzelu+ARH/8nTe5rk28c8fYbHRT2S2RZbmk23S6iczirAVS7cv7rmz3/8Hm9NO16LQgoDASWF4JJVPDJqzVBRBV2zI1r2QHuM9dqySv2ETUZ4qml5e9ymtkVebX0uOTnmp88REbTqSXYbPOotEhDEnXM71yxhKq152el6IUV3TDFuYz4eD5tkNOdMSp4JsEpiga2Z3joO8Oi8qUsPTQ2tyrIs7Hc7H6sph2Xm0ZPHhCHyta+/w09/+GPyvBAkMFhC23kAtfjkpjhQcuVwmInxmiUXJz2BcxmvhFPDNRVDSJ4VgWEVRGVtMQfVgIqZnDn6Ql0yIQohBlJrbJhrJddKqZUknsGA4sdjFGnrZj6QkIJLe4EQMh99/BFfr99i98Y9Hh9ueOfeayeprymmvs6lLG5boUXZwxoZ92ecb2548MEHDBKQWtElsx9GbxhoRlkWYoqbzYATICJUM6p5RF9N/ZxNDu12E2Oz4eiNI1PyFw8JgZjSlpUjIZDbSwQSkCiUpXhmhao79XVRwOXE0iSlOSPRrxcJ/gLWZNaAy19jgOy5AVVhUfjoyYFHNzMLUOy0R87drlu3ebKItQ3+jNOBn91ZyzO/2dPnnl/6xeaAji8BndM6p3VO65zWOa3jVUHntM5pndM6p3VO63gV0Pms81nns85nnc++PLwQAaYNz/Pk6+d2+8/1UD2bav/Ov/3kuZV2uVaTsTl6aZdQa3ZoLmlbgEdL5g/+7C/5e2+8we61S6ZhIGGkJKQmpyzNICUGRIM7GPORSJBbzyDbJjpJWaOE5zp5cEd5vklXclifcc0+WH83g2Web33njt6dQ2jSQnfqA1bFZarmNTANWl1KIcWEhbU+pW+8lBI5Z4ahMI4nSSqcHP25RNdU0dqur378siy8+c7XGVKiauXRkyc8ePiQYsrv/d7vcf/d95lvDu3aAqZb7U+tBiaEkDjMN565oV4nM0Vx+azptqhqXjmW4OseQnTCN6UUZSRsWQ/WHAmIZwqgaKmUJbPfTwwxMYyeSUBeqKbkXNgJhBSImliKf17BHay7RZIMCAEJgXnJ3P/pj3lyvEH2E8dSUTiR8LLg8f6I1kwYBmQIFK1IFFI6SXe1KsebGy6nvdsUEONAEFwWmzNTOq0/a7ZKEErJqJnLSkthaHLe1XnHGCH5S4xJIIwDabdjng8gQhoiEBHxrByvuRuJEYIJVbVlFSghBM85aLYs4lLZcOb4hzQgEW4O117PVU+vctJeVqytzZKNDx485uGSyUCx07F2yxO0l7v1/267DJ59y7wt5f5c+CQf1vG3E53TOqd1TvM565zWOa3j5UfntM5pndN8zjqndU7reLnR+azzWeczn7POZ53PvkDIqalWR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR8dnI3z2IR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dJ/QAU0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR8fnQg8wdXR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHwu9ABTR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHx+dCDzB1dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0fC70AFNHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0fH58L/H5CiEpCNdCMTAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Run example on Kvasir dataset. First, second, third, and fourth columns are ground-truth, \n", + "# our LMV-Med, MedSam fined tune, and zero-shot segmentation from SAM. \n", + "demo(\"kvasir\")" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "d688bf7a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "benign(292).png\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABpgAAAGPCAYAAABI09P1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9d7xtWVrWiz9j5bDDCVVF04Quf+SLkqQVWqCboKCIoKICXqSRbpAgcBEFRezmogIKyiUYuhFKgQZUJBjIUkgUsWmQ1CSLRuiudM7ZYaW9wvz9Mdd37GfOs3aqOlXVe/f7fj7rs/dac84R3vA8Y613hFQUhUJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCziuN57oBISEhISEhISEhISEhISEhISEhISEhISEhIZdLIsEUEhISEhISEhISEhISEhISEhISEhISEhISciGJBFNISEhISEhISEhISEhISEhISEhISEhISEjIhSQSTCEhISEhISEhISEhISEhISEhISEhISEhISEXkkgwhYSEhISEhISEhISEhISEhISEhISEhISEhFxIIsEUEhISEhISEhISEhISEhISEhISEhISEhISciGJBFNIyFpSSq9MKRUppZc81205TVJKD67b+dBz3ZaQkJCQkJCQkJCQyyAppXdOKR2llP7Whmvvm1L6oZTSE+tx9uvWnz+0fv/gs93eN3dJKb10rZuXPgd1f15KaZ5Setdnqb6XrPv6ytrnD6eUimejDSEhIVdLTuOk51JSSjsppa9JKT2SUlqsse+9TsLBkFLWunn4Oaj3bVJKk5TS338W63wkpfRI7bPnbEwQ8uYhkWAKCTlDnguieK7IKSQkJOReyVPFsZTSv04pPZZSGj4DzXqLlXvBKymlP7wu52X3qFnnqfOudl+WCSEhIZddUkrNlNLLU0o/llK6tf5B/7GU0i+mlL4hpfRnznj+L69jtUgp/YlT7nuJ3fe/U0rphPu2Ukr7du+DF+zSP5H0pKSvq5W7I+k/S/ojkr5d0pdI+hcXLPtZlUhs6J9LelzSVz7XDXlLkZTSdkrpH6SUfi2lNE0p3U4p/UBK6UNPuP9jUkrfsb7/9voH0N9IKX1bSul9L1j3Ky3uN70+YsMzfzOl9F/WP4QerrHjf6WU/klK6W2fqh5Cnjt5S+GkNwP5R5L+uqT/JenLVHLim57TFp0ib+mJjaIofk/lmOXzUkpv91y35y1BUkp/PKX0VSmlH0kpPbn2v5845f6zOKxIKf3WBeo/rZyf2XD/S8545sufqi5cWveikJCQkJCQkMsu/FBTFMVdXyJSSj8p6UWSPrIoiv9yRjmvl/TOkt6nKIqfT+Usr1esL39zURR/5YTnXizp4fXb3ymK4sELtP0RSS9Yv/3Qoij+6wn3fZOkl67ffklRFK88bx3PhqSUXijpEyV9flEUo9q1nqTPkvQXJL2rpL7KL2W/L+mnJf27oih+7JSyv0gSM7vetSiK159w30slfdP67Y8XRfFBJ9z3oKTflpSku/3G7H7P9JxS+lZJnyDpM4ui+Gdn3PuDkv64pD9XFMV31a69VMd9PJcURZGKovifKaXvlvSlKaVvL4ri8CJlhFxc1j9qvGz9eneV/varkr5B0quKoljV7r8m6eWS3kvSe6vEoqakP14UxQ9fsO4/JOlzJP1hSW8raUfSY5JeL+mfSfquoiiK2jPvJunjrH6+6LaLolhcpP6Q51ZSSk1J/0nSR0i6ozIB838kdVT64ieoxOLvPaWYT5VUqPTbT5X0g2dUu5D0oErs2nTvx0naXt93oe+xKaUXSfpISV9UFMW4dvmPSHpgfe0f1q79bUlfLun3LlLfW4h8l6SfkfTGZ7vioigmKaWvlvQVKaUXFUXxU892G9byVyQNnqO6nzVJKV2X9BOS/i9Jv6zyx8wtSR8t6YdTSi8riuJf1R77aEkvlPQ/VI7VjiS9o6Q/K+kvpZQ+tSiKb7hgU/61pEc2fP6bGz77NEmHkn5M0qOS2ip56f+R9CkppZcURfHzF6w/5DmStzBOeq7lT0v69aIoPso/TCntS3o3SU88J61685d3k/Rc2fIfq0wKfrFK334u5DkbEzwH8pkqOW6qkn9unHH/w6dc+yhJ7yPp+y7Yht+R9NCGz//PKc/82AltOTE5dhGJBFNISEhISMjZ8mqVCaaXSToxwbROEr2zpJ/b8KV1IeljU0qfXRTFnQ2Pv1xP4QvKhjpeJumuBFMqZ2j/xXtQxzMp/0DSvsqZyVlSSlsqB0Tvo3IG3Xeu/25Jek+VA+lr63vuEvuRni+VL5f0+We0ZSHpA1NK73JCMupl67JO0iefTc6o5yLyapVfoF+m8gf+jbJOfn2YygH+f1x/7F96XqdyNqLLg5I+SScPVpEvk/TfJX22pPoPsc+WfJ3KVQZveI7qfzblW1Ta/DFJ36bShn9cZYy8SOWPmy4Pqpx5KpVfMJ6Q9FZPse4/LOljVH5Z/ClJe5Kep/KL0HdK+uYN9X+4pL8naSnpN1R+8eo9xfpDnlv5eJU/5P2CpBcXRbHnF1NKA0l/9KSHU0rvIumDJP2wpOuS/kxK6a2Konj0lDp/WNIHq8ToTT/mvVwlrr3htLpPkM+UtJL0bzZce/767+/XLxRF8Ua9ZfxYcmFZ+8TemTc+c/ItKjnpM1Ri1LMuRVG8JfCQJL1SZXLpP0j6S0wYSCn9HUk/J+lrU0o/UBSF/7D16UVRTOsFrScv/A9JX5lS+jdFURxdoB0PFUXx8Dnv/YMn1P9ySa9SOeb8UxeoO+S5lbckTnqu5fmS/lv9w3Ui7Nee/eZcDimK4jnTTVEUv59S+iFJn5BS+pv1+HiW2vBcjwmeTfkKSV+kMh7eTtL/Pu3mNW89XP98nTj/lPXbV12wDY88hUmsDz+jE4yLoohXvFQUhVT+UPY5kn5F5Q8Cv6fyR5xdlTOFHrF7X6ryh7qXqiT6h1WCSWH37Koc9L9+Xd5tST8g6cM21J3LO6FtxToY/LNXrj9/iaSPlfSzKn94uaXyh6e3OaGsPyzp+yUdqPwh84clvb+XV2vTptcr1/c8uH7/kMoflb9D5Y9AKyunoruT+vAU63xw3dcn1jr+OUl/+rn2pXjF6zK+iLUTrvVVzpY7kvRWp5TxzetyXm6fEefftf77mRueu64yEfEf1vc8csG2P2J1TCXd3HDPp6/voY5XPgv6fPgC97/zGjtfteHa312X9wOSOifo70WnlP3h6+e/SeUXwcc3lbO+FxzGXv94wz1NlRz5syp/xL/Lb1QmIo8kve091uvr1+16n1Pu+dL1Pf/gAuW+5Lw2U7mC5nckNZ5JH3oqfnSVXipneRcqV8rdZ593VCYOC5Ur1PyZ65I+VNKN9fuH1vfdNfY6R/3dEz7fUTlWLCT9kdq1d1H5I0t//R5saj3X+ozXhe3/z9a2+9yn+PxXrp//BJWrTwtJX3DCveDPt0h6zRo776/d8x7gmsqZloWkB8/Zlh1JM5WrUv3zB3XyuPul63seqtelpzEWV/kj6Y+qHFNM13j6d0+Ktw3Pn9bmh+2+E7HzXvVJJ3x/W8f9I5KGKmdVv2Gt/9+U9AWS0oayzv09tPbcj67v37lHfv9Wkv6VyhUvE5UTMj7JfPSVtfsf1sljxz+hEqsfW/f/dyV9jzZ/F/5wleOGJ9b3/tZad9eeqRi/oF7esO7/u2+49rnra3/vAuX9/PqZjd/XN9z/Stl35qfZl911Wb/xXOs1Xhey25XnJLv+8Lq8lqS/o3LCDhjyFTr5O8yHqvyd69b6/l9XuQJ395ztot6N3HIWDj6F9r6rSs753bWOH13r+10uYNeT2pztoQ2ct8HWT7tPriv7LGOXLvab5QtVJjVP/c2y9swnra992j2KuaQyVn5Z5+BlnfKbrsqdEL5mrcfJuv8/K+mLT7j361R+/5mp3LHkeyW98F70616/dDx++omn8OxHrZ/96Qs+d+IY74T7N/r5vX7FGUwhLl8v6atVAsarVM6U/ROSfkjlkvJN8rEqlyofqFwq/x1S3qLlpyR9ocrE01ernO36/pJ+MKX0afew3Z+hkvwfWffhlyT9JZXL9bt+43op8o+rnNn9fSqB60glgdRnnbxOxzO8f2f9P6+Ha/e+g8oZ3Q9K+laV+tt/Cn25SJ0vUAnKD6r8Ufs7JP1BSd+TUvrgp1B3SEjICVIUxUQlzrR1vMVcRda497Eqt+P4tg23fL/KZMSm82s+UeUs/1c/zaa+WlJ3XV5dXq5yUPz9Jz2cUhqklP52Sul1KaVRKves/+mU0sefcH8npfTFKaXfSinN1vuU//069p5T/qrKgex3bLj2ovXff15smOlaFMXt4vTtcV6+/vtqlRh9n8of70+TX1a59d4npZTqHPiRKmf3bbRXSqmhku++vajO5lVK6T1Sef7AI2udPZ5Sem1K6as31LNJqPPlmy6uZ0J9sspB5DfY5/fybL9vl/T2KlfSPG25qB+lU85gSim9a0rpG02/j6WUfjyl9Okn3PtQSul3U3nI8qMppdesZ7m+OQg++lVFUeTtSNYx8MXrt5/lD6xj4UeKorj1dCsvimJ2wuf7KpO9kvROtWuvL4riv68xM+Ryy5Prv+980QdTSh2VP3TsqUzW8wPdy9YrSk+TV6vk2k+qff5ylbhW34brPPJBKhOz9S1A7qgcY3/P+v336Hjc/bpzlHuhsXhK6RtV6uIdVX4v+nqVP7J8qaTvTymdZ3Uxbf6d9Xv/rvDQOZ4/S+7V94u2Spz48yq/c32Dysk6X65ylWNdnsr3UEn6SZXjnsp2timlB9c88ch5G5xSuk/l99e/qvKH2a9W6Qf/QuW2aueWlNKXqOz/S9Z/v0rSj6hcTfx/1+59hcqx2R9Vue3X16hMxn2+pJ9cr0B/ruV567+/veEan208i6kuKaV3VjkZ4QldfHXgB6SUPj+l9AUppb+0ttlFhW2/fvEpPBvy3MlbAifV5TUqtz37cZUr1yeS/pakf1m/cf372g9J+mOSvlvSP1XJL18g6afW31PPkoe0+beoh87x7EXb+xGSXivpL6tc0fjVKjHyz0n62ZTS+5yzzoe0mcO/RCVfPl05d5/OkIv8ZvlB6/o+WOXEg69b1/ujKrf0PUl+cv33ru9naX1u46bvTqfIV0v6WpWT116l8vvfR6hMdnXOW0gqz9z7BZV6/H2VHPetKn9DfmXt3vdRybufoXJS5deqnKjxQZJ+IqV01Vadsp3hRVcvSdK1lNJfTSn9nZTSZ6aU3u8cz7xjSumz1s/81ZTSO539yAXkuc72xevN4yXpA1US5OtlM6VUAsd/U21GvY6z0ytJH7GhvH+5vv4vZbPUVP4QsacyE/3ghvJeekL7TpsNsC/pD9WuvWZ97S/aZ0nlEsZC0kfX7v8cHc90eMlZddu1B+25f3jCPY/onCuYnkKdr6hdY5b+f3mufSpe8bpsL+LqlOvvub7n10+4zmy4V9U+J85fpnKwW0h639o9/0vlF/R2HW/P2fZH1s+1VS7R/qXa9T+8vv5KHW8V98raPddUDvQLSf9T5YDu61X+yFFI+vu1+5PKLzDF+p6vWj/z+yoH+BedWfNzKrebG264xsqwv/UU7PpWKr9Evn79/g+uy/qRE+5/Kf3VcaLmY2v3/EeVg+ItbVjBpONZje+x4fOJyplr365yle/Xq/zx6UjS1jn6c79KDr0jabDh+p9e1/2DG/z7RHvoYiuYPmx971ee4osPntM+F/YjncydH7nW7VLlj3RfpvLL4E9J+t+1ez9ife9c5aq+f6Ry7DBVOU45cYXYs/VSOXOxkPQnN1zbWV+b64SZoev7HtJTXMF0SpkDlV8U7/LxU/whVjBdspfKs0qOVI71v1nljz4vOOezH7e2+7+0z/79+rMP3XA/+PMta0z4DUm/Ztf7KndC+KH1+4vOFv/y9f1//oTrL9XJs24fqtelpzAWtzr+g9Yr/OwamPY5F7DPwzp9zHLad4l73aeX1j4n7v+L91XlOVd31q+2fX6h76G1uj56ff0f1T5/8LTnTijrVetn/mnt8/dVibWbxk532UFlYqxQOa67a3a6bGWzyh8QC5U8da1230s3teeU9j+49qWLvM4bQ7+/bsv/teHa566vvemEZz9sXdc/VJk4PFTJvx99Adu80vzTX1OVCdq7VsXZsy9bP/+VKsdby7WPvtN564/Xc//SWxYnPazj72M37POhyrHyUtLz7PMXqPxusK/ynFkvi5Vfd+0QcUr7NvKHzl7tc972Xl/r74k6pqj8nnYo6bUXaO9LdfpviQ+dZJ971aeT9KaL/2bZWPtbodr4X9Jf0wm/Wdo9tyU9dopPbXxuw/0v0vF3M+9/T+UEzLv4dZMdVPL4/15//gkb6nE+bK3rm6rcBtPve77KFVRv1PlXfH+uLsaHH3Nen6vV8+C6fxdawaRypdZCJ3ynP+PZTXxYqEzO/aEN97/klGf+vaTrT6Xvd9VzLwqJ1+V/qZxVVkj6Kxuu/bE6gBh4fNeG+zuSRip/fLux4Tpb9/y9DeW99IT2nQbWf3/D/QzWv3JDP35sw/1NHf+I+pKz6rZrgMmbTgI6PXMJpkckNTdc/x1JTzzXPhWveF22FyR7xj3/fVPMrq+9TpuTR8T5y1R+AViq+gXn/dbXv0jlwOquAds52v7I+rmWjreTe3+7/i/W9b69Tk4wPaQNSRyVA8nvV/mF7r3s809Y3//Tknr2+Q2VW7uciGMb2j9UOcD6XydcJ2kyU/lF6SMlvfU5y/7C9bN/2z77uXV/3nHD/S/VcYJpqDLZ8AN2/W3WbX31+v3GLfJOaMtXrcv+6A3XruucW86pnFG+kTN1nJSpJ8VOtYculmDaXd/7s6f44oPn7MuF/UgbuFPlqrQ9lT8+vHhDPf4F5l5/sX3lBV934ccJ5fLF8zM2XHsvHX8xeNdTynhITzPBpHK1xStVjt9epfILXqETJtac4A+RYLqEL5Xn9r3RfK1QOYv8uyR91CnP/Yju5iFw/Ds23A/+fMv6/Res33/Q+v0nyn6E0cV/zCOWNm6lqqeeYHpE5xyLq9wSbK4NW56p/B7yhDZg6il9eljPTILpIn3aqDeL+00c+6/X1/6gfXah76G16390ff3ba5+3VW6/9A7n1Gdb5ffXfW3YTsp09sqz7KDjLUz/7Dnq/a71vXdtPWd+c9ePhSfcSxxd5PWSc5b96vX9/879Q+Wkl99ZX5ud8OyX1+p8o6QPP6+vr8v4syon/fwBleNSxrNv0hl8pPIcQa//Zzf5Zrze/F96y+Gkh3XC2E3HkxX/tH32RSfFgcox777KCW7n/WF+I3/o7GTMedv7OevP7to2fn39n66v35XQPuH+l+r03xIfOsk+96pPJ+lNF//N8gPWn/3XDfc3dLxV+ktO6Ouvrq/3ap+/vUpOPFciQ8eY/8mn6OyRs+ygchVzIel7zlHnR6/vvWt7+prf/Klz9uERXYwPHzpPuRvqeXD9/EUTTK9YP/d1T6HOr1KZBLxP5YTX91XJz4XKowDepnb/u6vEsT+4vv8+lZMtmVz8E7oHW9+/uR7yHfLsy3uv/25apvszKn9M2yQ/u+Gzd1E5u/Uni81btPxXlT+AvveGa09Ffm7DZ7+7/nvdPmOZ7Y/Vby6KYplS+gmVW909FfmF4oStZJ5BeV1RFMsNn/+uyq2ZQkJC7r28SuXS9JfJtq1MKb1Q5Qqnny+KYhMmSZKKovidlNIPSvr4lNLnFUUxUrnFwlLl+UD3Qr5J5WD25ZJ+OqU0VPkj/g8URfGGTTtBpJRuqtyy5eeKovhHtTZPU0pfoHIG8yfoeNugT17//TuFHaJcFMWtlNKXXrA/b6PyB7aNW6UURfGfUkqfI+n/VXmW1Kev2/0mlZzyL4uiuOsw2vW2Fy/T3YfoPqRyVdfLVQ62NkpRFKOU0mskfVpK6cGiKB5RuXVOU09vO8O7tg8riuL2BZ5/lcov2S+TbVuRUnprlQdWP6bj7SLuuRRFsZdSmqr8slKXD1X5Q93vnbO4e+VHn6RyVc/XFEWxied9q8K/onLF3mcVRfErtft+KaX0akmfm1L6v+rXT5BXnLONLg+f457/rPK8ls9LKX07Y6r1VopfYvdd3/TwPZR3VLWPR5L+psovNyFXWIqi+Lcppe9S+SPIB6gcu3+ApI+R9DEppX+j8oeEgmdSSu+4vv/1RVH8tBX3/Sp/DP6YlNJ9hW37uEEeUpnQfLnKFSyfqjIB891PsSs3138vgrPnkXONxVN5+Px7quzD556wI9NM5fZpPPO5KnHK5buLonjd02rx2XKvvl/sFUXxmyeUI1Vx66l+D5XKLaCk8seSLEVRzHWxw+jfVeX31x8vNh+O/rDu3iLrJGHi0IlbEpu8v8rE419IKf2FDdc7ku5PKd0siuLJDdezFOUh4mdt9/VU5e+pHAd+rKTXpZR+ROUknI9Wyfdvr3KstaldXyjpC9fj0XdWufXf96WUvrgoin9wnsqLoviu2kdvkPQNKaXXqvSRz08p/ZNNuFIUxftJeaz7PirPzPmfKaW/WBTFD9TvD3nzlbdATrro71z/tX5zURS3U0o/r3KLsXdVuQL9mZLzthcuec+U0is3PMM2iO8m6VfW27q9pHbPI0VRPPRUGnlBOW+f7lU5J/JhURSrlNJP6fRtIp0T83efoijecP6mSjrlt9N12zaNEzYJ27Z93znuxS9ecIJfsJ3bu6lcIX2qFEXx4DnqfE4kldvpf8r67UW3W1RRFH+j9tHPqRxH/HuVSb3Pl23tWxTFL6vc/h85VLk180+p/G3nj6ncPvZp/X4QCaYQZHf999H6hXXy5aQB7ZtOKeukPZX5/Nq5W3e63NnwGV9EmvbZiX1cy6a+nFeezrNPVe6c8PlCivPVQkKeIfl2lbOq/nxK6a9bUsDP+DlLXq1yxsjHpZT+ncr9l/9zURS/v+n8hfWe2Z+7oZyvLoriTv3Doih+L6X0XyT9xXVS5i9K2j6jbS9UiZfFCQM6zj94N/vsfVT+mLDpB6GHT6lrk5z5Rasoiq9JKX2Dyn2lX6RyAP4ilUmvT0gpfWlRFPVzHT5E5cSBHyiKwhMer1H54/hLU0p/d/1D1EnyapVbEnzK+pyET5H0i0VRbJpgcZZ8h8rZV9+9HgD+sMrJGL91wXL+q8rVPX8spfRuRVH86vrzT1Y5tnvojD7dC7mlcvvBijyFvtwrP3oqX2DO9cX2rMKKonimftD7dpWzZD9c5Rfs71G5bcSHSXprlT+wnfij3r2Soii+X2W+tr2u7y+r3OroxSmlP19sOBct5OrIGkt+cP3inLc/L+kbVSZrv0vVH9lervJH7odq5SxSSt8q6W+onOX6lafU+WhK6T+q5Np/pvIHxK96Gr5GUr/3FJ8/Se6c8Hl9LH5dpU7u1/kT0p+rctWzyyM639lQT0funPD5Rb9fnFaOdM7vaGd8D5XKraqkDRM3Lij38nviNUm3i/OdRXdTJW+f5RdbOj6D5lmXoijeuJ5M9cUqV358hsof2L9D0v+nckunx84oY6RyRdZfTindkPSlKaUfLIrifzyNdr02pfSzKn8ce3+Vq8dOuvdJST+UUvofKpOP35xSesE57RTyZiJvSZy06XueTsfQZ+v3t41ygfbyvW/jebImW+u/L9HdGPljujfnDp4qF+jTWXLecs7iopM+R55xTlzHzmkJWZdr67/nmXiIX2yabOGydcb1yyB/UtLbSfqZoij+1z0s91+oxMMPOutGSSqKYn89mfaL1s9Eginknsj++u9bqXZ455q0b2ozKBQbPmPW1/M2XJPKH0X8Pun4x5GTfly9F0J9d/0gtpaT2nse2aQHZKWTD8G79jTqDAkJeZbFV7SoXPHztesZmR+ncmuVbz1HMf9R5WDtZSoTN0Odnvy5ps0/PDykk3/AebXKWSifoDLp8Cad8qVbxwO6F65fJ4kP6HYl3TohkXHRpPt5v2iNVQ58vkfKh/a+XOWPG1+cUvoPtdndHJz5UK2cW3xRVDn79t+fUudr1zNkP1nlLNkXqDyk9MJSFMXPppQ+UOUg7mNVJg+UUnq9pC8piuLbzllOsU62fZlKP/ob69Van6KSj57O6qrzSl9P/8uLdO/86Nr670W+wJz3i+1zIusfVj9K0uepxJtPUplgelil7+K3p/6odw/bM1eZ2Px/U0pHKv3vs3XKjzIhV0/Wq1v+bUrpD6nckeBDtP4xb52EfOn61i9LKX3ZCcW8XGf7zatUnrHxb9fvnw6uESM3T73rmRO+g/x8URTnOrj8Hsy8LXTyd/1rT7Psey1P9XuodGzTp4uD9/J74h1JN1NK/XMkL/ZUbktz4wLlb5SU0oM6jr/zykPr1dlnSlEUj6o8b/SzavV+yPrfiySKvl/lZKsXX/C5TfL4+u/wPDcXRXEnpfTTKle9vLs2rywIuSQSnCSp+vvbL2+4vun3t+dSaMd7FkXxi2fdXBTFK1XuzvFU5cTfGvXmzYeb5KTPkZsqE1ebdpK6iDgn1nm5pdoKqVPkzvrv21ygzo8uiuJ7z3H/qXLCSvDT5HVFUXz30633nMJvFBdevXSGXIgPn8YzGyUSTCHIz+t4ifFv1669ny7mK69XeXDne6aUrm3I+n/w+u9r7TNmrb/dhvLe9wJ1nybU9+L6hfWXlw844bmVLjY7oS63Jb1HSqm94Qe0k/r2dOsMCQl55uRVKhNML5P0tSqTS9uSvrEoiv3THpTKH2lTSt+k8mygt1U5ODtx1cX6i/9FV0j8F5U/xvzddR1fVhTFaVvMMKD7p0VRfN4569iTdOMEbLtowv4pfdFazxz8+pTS+6n8Af5DtJ7dnVK6X+WPB5L0bSmlk5I3n6pTEkxreZXKGUH/QmVS5Vsu0s5am39a0p9OKXVVbtP3ESoTVq9JKT1eFMUPn7Oob1K5ZeBfSSn9bZWHpP//VO7ZvWlbonsm62X911Qe2vp05V750Z3137eRdNZMsAt9sT1LTlgFdZo8vN7K6ExZ6+Qr1i+vs6dyq4gniqK4F3a4qHyfygTTSxQJprdUOVj/dX76aEkPqPwusGlVolR+D3jnlNKLN21nafJDKs92eYGk/1YUxeufRluJ83dVuXL0WZWiKA5TSr8s6d1TSjeKzVuIX1SWUvkd5oQt7W5rw/eq9Xee97oH9d9LeTrfQ991/fd1T7MNv6by++t7pZR2N2yT95ILlPUzKlf5fITK1RRn3fuRKaV3X29h83TkQV18y9aHVa6MezryV9Z/X3OBZ/ix8bSx6ZmyTiCQtK37zjNef8iblbwlc9LPq0x+vUTleVNZ1pO130vlBKVf1ZuH/IzKiVIfqGNdPB2BA0/6/cx/a6x/R7pXvzXeK/n59d+7fptcf/960UkPppS2VGLbL/g2kU9RXqsSW1+su7H1A3T+3yp/Zv33T6r8Hn2eez9Q0tNOMGnzSvDT5F/rqW97eW5JKT1f5XnSeypXAd9LYUePi/DhU3lmo8Q2WiEIZ1N8UUqJ5ZDMDv+HFylo/YPft6r8wfVL/VpK6R1UznadS/pmu8SB65+w3qec+29IqpwH8jTkp1QOLj4opfTRtWufpZPPX3pSmxNf55WfVfnF6JP9w5TSS1Uu538m6gwJCXmGpCiK10r6nyoTx5zHJJVJiPMKB1q/rcrE1Hn3MT5vG5cqt4p423U933DGIz+rEoM/8ALVvFblOGJTcv4lFyhHKrdueFzlGX5PRTZ9qfwklatH/6ekf3XC63FJH5ZS+gNnlP8alSvU3lbSvzthu4QLSVEUs6Iofmq9rd9nrz+uc9Npzz+qcvB9n8pE2lPxw6cq76JS16+7B2XdKz/yLzDnvfci/n6avOKCr5fcgzo/TqV/n2vV2zMg8ePcFZeU0senlP74+geN+rXn6XgFoJ9/x4zMv1cUxcs2vXT8veJTdYoURbFS+YPZnz3r3nPIw+u/73faTc+w/BOVMfuNm3ZnSCldTymda3XTWtgubdNZeFLJ62+fUvoTtc//ri72g8uzIU/neyg2/VH/MKXUTim96/q755myTubz/fWVtbLeV+XWoOeVr13//aqU0l2ztmuf/dP131evf3Sq3ztcT6I5U4qieLgoinTB18PnKTul1Fj/eFn//BNVJph+SvbDXEqpm1J6zxPKeqHKrYeXqp1TlVJ6+7Xd/PeA7ZTSXePDtX98tcoY+DXZSqR1ORtn+qeUPk3lav3f1dkTUkLeTCQ46VT5FpW/r/31VJ455fKlKs8o/Zbi2T8z/CT5JpUTw16x/i5dkTXevOQC5Z2HD6XazgXrVW+fc4F6ng35SZW7BXxwSqn+neZTdfr5S2x5/6P1C5uw9Qx5aP33i9a/yVJOT+UEs/PKf1Q5ieHPpJQ+fkO73tbefo/Kvn9mSulPbSospfT+5+1DURQPXpAPX3qBfj0d+RSVdvrm01Y5p5QGa5u9fe3z91hPrqjf/x4qzxiUapNh1+OYTXX83yqPazjS8erMpyyxgilEklQUxY+llF6lErR+OaX0nSpJ6qNUZlZ/Xxfb4/8LVf5w81nrQeSPqvwRjLNAPstn3Bblvs7fqnK7oNellP6zSiL8UyoHCe+tpynrLYU+ReXsk+9MKf0HlTMY3kvloeQs1a/Lj6g8K+U/qvwhbK5y1spdB8qfIF+rMrn0z1NKH6pyMPteKveJ/k8qZ7jd6zpDQkKeWXm1ytUnX6nyy8EvFkXx38/7cFEUv5VS+gitD5R+Zpqor1GJH6OiKE6dkVIUxWNgcErpiyX9w3rSa/0jzcqw+5tUngXzD1JKH1IUxXR93w2VP2CdW9b4/N9U7m3+jvUVOCmlv6Zy2frP1J9NKb2rjvdqdozkS8RnFCecl5RS+tJ1W1+mctu6k9p3sLbXfSoTVk9JUkovUrlFUn0wyY8g4wsW+WqVMwD/ho4PkD9rtvS9kI0/6EnZT9qSfuuEbe/qcq/86F+rPIT801NK31nny5TS2xZFwVYO36TS3q9IKf2Pun+sf7z4oAusMnqmzmBSSmmnvjIypfRekv6xyhmZX34P6rhPpW8/Udgh1yml9y2K4q6tg1K5OpB6//PTrT/kzVb+qMofXt6UUvoJHa9Y/AMqZ172Vf4Y8O8laZ2o/zCdffD5d6j8UZizDE9czbOe0PHak66fV4qi+KVUbkX6oaes+HlGpSiKb0wp/WGVZ9f8VkrpB1Seo3ZDpU4/SCU2/bVzFvkjKrnvP6Ty3MWJpN8pioIJfF+p8vy270kpfYfK7XJetK7rYd2bRPc9kaf6PXSN1R8m6fVFUfxS7fLbqJyt/zsqV/acR/6Oyu+En7v+MeYnVG4t9ZdUrgz/M+fszw+mlP6+Sg771ZTSd6v8/vdWKidT/IzW23YVRfEjKaUvVPmD3W+sbfm/VW7R+gKVs8d/Qpu/oz6bMpD0aErph1T+ALjS8blHvyrpL6x/gEf6Kr/T/6KkX1K5Wn+g8mxDttT7m0VR/Fqtnn+jss8frOMf4W+q1OPPret6o8rzzD5YpT8/Ienja/W/j6R/l8qt8H5T5dbUN1WOX/6QygPOP/G5wIKQpyzBSSeX90gqtwP7ekmvTSn9W5WT6F6sMkZ/TdIXPM1m3zMpiuLJlNLHqvzO8jMppR9RubVfoXKS9furjNfznpv40yq/Q31uSummjrfY/tr1atTvUXlO3MevExr/XWUy6qPX1/7iPenYPZCiKFYppZep/G3ye9d8+FuS3kPlOcTfp3Iy3abfZplQ8p0brm3C1tPa8ZMppa9VucvGL6Xy7OC5Sp3d1snnfdXLOUop/QWVZ6a9Zp3g/xmVtn03lZzbWt87Tyn9OUk/IOk/p5R+SuVkxrFKv3ihyt063loX/858TyWl9AE6ntzJ5It3Sik9xD2bElbrccunrN+eNSn0j6j8rv1jqo7ZPk/SR6WUflzl2GKmcjXkR6hMXL1ad09A/PcppYXKiRj/R6X+X7iuYyHp04pzbpd7mkSCKcTl01WSz6ep/HLzpErQ/zsqnfDcB3cX5fkW7y/pb6uc6fF5Kr/4/Kykf1wUxQ9ueOzlKgd/Hy/pM1V+6foalT+i3BPQXwPlB6rM7DIj4L+rDNgP1+bB++eoJLsPVZnwakj6ElV/yDytzl9JKX2YytkxH6UygH9cJXH+OW1OMD2tOkNCQp66+MBgg3xGUZ4D9BqVP96wAuLC+3CfgIP3TNY/FH/3BR75LJVbbv2/KhNNP6ESk5+vcgD4QpX4zBe6b1P5o8ufUTnw/B6ViYWPVbmf/rlmDZt8p8pkyYfr7u0LPkJlkv4RlTO7fldSd93eD1/X+zXF+qDo9ay3d5b0v05KLq3lX6lMNHxySukVxSnbCBZFcdLWGheRvyXpQ9YDwv+t8geOd1fJR7d18dVHP6hyVhiz/76ueOoHDl9E/oTKmcebDgL9EZU/iv0BnW/bnXviR0VRPJFS+gSVPyz8aErp+1Ruu7Gj8kvZ263b9Ex8sX0m5YdSShOVP9AdqIzFj1Q5pvqooih+v/5ASukrVSaMpOOVYX9zPUtNkr67qO4x/lkqV1Z9iaoz979h/UX9Z1WOyZYqf6j9Uyp/yPlulSslve77VN0yj3b8q5QS23V8+YYfFUPe/OSrVP4g82EqY+jDVcbEkyp/nHiNpNcURd6G5WUqVzZ+82k4VJTbxX2bynH/J+l4BcczLf9c5Y+If0KnbEv7TEpRFJ+5xqa/plKv11Qmft6g8vvORbZf/QaVWPtxKrmlpfJHiG9e1/UjKaWPUZl455zIH1KJt1/y9Htzz+WpfA/9MJVjlP/nXjRgzSN/TMff2d5X5e4Xn671DOwLlPXF6+TGZ6v8rjdUuR3wz+l4xRb3fkVK6SfX936Ayh/w9lRudfwqXWzruWdKZpK+XWX7/vj6s99QOYb66vXY2GUk6YtV/qD5YpVcUKjs07dI+voLTMy6JenrVI51PlxlUvZIpU98haR/UhRF/Qyu16o8n/MDVXLmDZVbhP22Smz7/4qi+N1z1h/y5iHBSadIURT/LKX0m5I+X+X3qYHK70v/WOXEwTtPt457KWuOeg+V7f1wlbF6pHJCwX/V5iTJSWXdTin9eZVj2Zfq+CyZb5G0VxTFdD3R+ytV4tcLVY6rP0ElvrzZJJikcjVqSunFkv6+SvySyt8sP1jHq2nrk88aKreL/4Wi3I79XsjnSPp1lb/NfpqqvPwL5y2kKIqfW0+O+0KV33lfpPI7zW+qHKP4vb+YytWvn6eSOz9ZZTLtjSq3D3yFyqTxcy3vqBIvXB6offbSDc99uMqx288URfFUV9B+t46/336IjnHw+yS9uth8ftU/V4mdf0wlHyeVfPyQSg4/tz1Pk1Q87a0ZQ666pJTeSSWwfHtRFHctawwJCQm5CmI/fp4m1xmgp5RerfLLy0TS808auKfyfJZXSHp5URSnblWXykMz5ypnIT94gbY/onKw0j4tSbK+92UqE2JfUpSHpvq1jsoZxJ+gMvHRU5lk+g2VS9y/uSiKJ2v3f6HKAdTbqBz8favKJNVU0o8VRfGSc/aho/KL0CNFUfzR2rV3Vvljz4epTCq9tcof0x5V+WPNNxZF8Z/s/m9d9+FziqL4mjPq/UGVXzb+XFEU35XK7Uu/SdI/KIrizBU0KaX/I+ltzrOKJZVbFX28ylmYb7Puw/9ROVPrq4qi+J2zythQ5hep/AIiSe9anLAn/Nq/T7THOin3o6fds75vV+XMwB8oiuJjNlx/ROsE03lnQl3UjyymPri+yiil9O4qZ2l+qMoZzrdV/mj5rUVRvKp274M6/mL7djr+Yvs/JH1n8ewd9HqipJT+psofh99BZVLn97Q+/8hWZNWfeUSnb4FViX3TZ/3z/1vl9ovvo/JLU0fll7rXqvwR+98WtS8Sa52edSbUXXYLCXmmJaW0o/IH6Z8qiuLc25GGPLdy2vfQ9czuF0t6h+LuM5NCQkJC3mwlOCnkqch6MsIflbRbFMXIPv8olVunf2JRFE/5rOCQkKcjkWAKyZLKvWsf8+Xlqdzf8t+pnK36l4qieNr7MoaEhISEhGySlNLfVjlz+H2Kovj5s+4PefYlpfTXVa4u/sB7tKorJCQk5FmRlNJnqNxC6H2LonjK252G3Hu56PfQlNJ7q9yy9rOLovi6Z7u9ISEhIU9XgpNCNsma+zr1yas2CfL7iqL4U/Z5UsmHS0l/pD75KyTk2ZJIMIVkSSl9ucqZ1Q+rnD38PJUzgN9W5WzZjwywCgkJCQl5piSVB4e+XuWZVh/1XLcnpCoppb6OZ1t+7HPdnpCQkJCLSEqpqXKF4y8URRHnh70ZyUW/h6by8PP3kfQVZ63cDgkJCXlzlOCkkE2SyvOFf17ltra/qXLHi/dWuUXpHUkvKoriV+3+t1a5hd13F0Xxume7vSEhSCSYQrKs9yX9fEnvpXKf4oXKLQleo3JfxvMc1h0SEhISEvKUJaX0QSr3mP5KX/of8txLSundVJ7f8dC9OAg0JCQkJCREiu+hISEhISEhkpRSuq7y7KwXq5xs0VW5PfkPq9zCfdOZhCEhz7lEgikkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCTkQtJ4rhsQEhISEhISEhISEhISEhISEhISEhISEhIScrmkddrFbrdb3H///XqXd3kXveIVr9DnfM7naHd3V1/3dV+nVqulZrOpRqOh1Wql2Wym5XKZn53P55pOp5KkTqejbrerRqOhlJKKolBRFDo6OtJyuVRRFGq1WvnafD7XcrnUarXK90lSo9HQ1tZWpRykKIq7npnNZlosFrkevy+lpJSS2u22ms2mVqtVbi9ltNtt9ft9dbtdtVottdttSdJyudRoNNJisdBisag802g01Gq1cj3L5VKNRiPrirY3m01JUkopfz6bzXJf0Qt9TCmp1Wppa2srl+t6oTyeazabuW/tdlvT6TSXsb+/n3WxWCzUarVyHxB0U54XVwrvV6uVxuOxJpNJbgO6WSwWmk6nWi6XWTe0sd/v68aNG2q321oul+p2u5UXtppMJlmfq9VKnU4n64n6V6tV1gPtXfusOp1O6dzmo/gGfUS3RVGo0+mo1Wqp1WpptVpl3zk6Oqr42GKxyGW4/9E+2rhYLLLvYJPlcqn5fJ774HqlTvyi1+vldvOeeheLRX4+pVRpU90nsT8xRXn0FV3OZrOs7/F4rNlspvl8XokPdIium82mOp1OxTboVZKazWb2Sf5vt9vZprSReKEti8Ui299jijKazWbWHbZst9tqt9sVn1gulxX/3d7eVrvdzr7O524P2k48co3PFouF5vO5JpOJiqLIz8xmM02n04xj/txkMsltnU6n2aeazaYGg0Fuu1Ri5mKxUFEUOSZ6vV72vTqW4Rvo/YUvfOGxU4VslOC04LTgtOC04LTgtKsiwWnBacFpwWnBacFpV0GCz4LPgs+Cz4LPLj+fnZpgWiwWOjw81J07dzSfzysgCkh4I7yjOMJkMtF4PNZ4PM6GwVEok3Ipczwe6+joKN9PwLZarQzGHoC8B+Qk5cBy5dSdDCfBYH4Pwdxut9XpdNTr9dRut9VoNHLgzOdzHR0dVYDcnQZnwOgOTAQGAeBB4HqCSHGW2WyW/6eNXMcGvObzeeU97djb26uAut9DmbTR20P/U0rZPugb/TloeP8BCnRFW7mGs+MDm+zr9nJ9up4cxNBrr9fT0dFR9gX6llLScrmsgKqDRR2Q68J9EIu3tdVqaTabZaCt+ye2p3wAnPodMG7fvp39DR26D9Ttix0Bxkajke3F/XxOPGPjZrNZ6T/9RDdOsgAnenQicnDudrvZvynTycoHjKvVKvstfUG/7p/uo8RUXRf4QqPRULfbvasenu/1evk+sAyMoA9gj+Og28DtDN4sFos8AK4PSlzv2BufrZdD3PHXB3P9fj/bMeRsCU4LTgtOC04LTgtOuyoSnBacFpwWnBacFpx2FST4LPgs+Cz4LPjs8vPZqQmm1arMGB8eHlYMP58fn7FZFEVlFkGn08kZRILWARflkk12B+Heg4ODivMNh8OcyYdkNjl2PTgXi0UFfLjHHcmzfnXj4eCtVkv9fl+dTqdy/9HRkY6OjrIze0aesjqdTi6P+4uiyMZptVrqdDo5aHA2Bx8H4slkkgPd++sB4v2HjHwWBPbEkXFe2u7OSKa6KIpKVnmxWOT2pZQyEDFjgTZ45pb2UqeThRMl9wMYPrMEPaeUsn0pExCkHDLdg8Eg6342m6nRaGRdowd8gf4DMA70lM196MYBHNsyK8FjgWewkc8uWa1WFQCn7qIodHBwkLPQDvR14uj3+5WsPGW3Wi0dHR1lnTN7BD0RHyklbW1t3YUB9ffz+TzPtKB/xHgdIJlRQexiU+rnhZ87COMHPsuIflGOk6jbSTqeYeD1ud9wr8cg8Ul/vEx06LFC/bQfgMffKIf6GKjUB5dcI5487tCjz1Dic2adnDYgCTmW4LTgtOC04LT6++C04LTLKsFpwWnBacFp9ffBacFpl1GCz4LPgs+Cz+rvg88uH5+dmmDCgNPpVN1uV4PBQN1uN4OWO+JkMqlkEd3ZCQZmAWAIAtiz/YAT93k2VJJms1meWYBDS7rLCXz5mmeuaXudZKiP9nuZLPPjLw7m2UEMTvnefvqEHrzNs9lMo9GoYrTxeJxBqtfrVYAeoEL3rmvaDqhS73g8rsyYqPePNvG5C+VLx5lv2kEQURZ6R5+0CYFkvT6uYyvsgY0d/CBA9AeJ007P1AIQ7XZbh4eHd/WT5amUw3MONh5U2ByA5n9/oSNf+kr5i8VCo9EoA4mXgV6dSN1HiC9ijrZSBtLtdiukWfcN+gdoMIjxOBkOhxtBwwcP/CU2GbB47FGu34PtnYTxIfSLrSEej0dvC+1BT4C1+yvL4z3GGSxR/mKxUK/Xqwz20Nne3l4GXICeQaoLOEf7nZxarVaF3AeDQWUQh2/gw+gMosc3WLqKDX0g6b4WcrYEpwWnBacFpwWnBaddFQlOC04LTgtOC04LTrsKEnwWfBZ8FnwWfHa5+ezUq/fdd5/e8z3fU+/93u+tXq+n+XyuO3fu6A1veEN2jGazmR2YrBmNoDObgGk+n+vw8DBn2Tygi+I4g51SmenEuAC4l8vLg4yMKWBM2+bzecUpUkra39/P1zEERsCReE9ZGLHb7Wq5XOaluPQdY9y6dasCHLSb+rmPLK8Hv2cz3eGdIL3/w+FQg8EgL1/jnsFgkNtNHd6mXq+nbreb95kl406fnIxp2/b2tvr9fl4GWp/5wRJfz8YT5KtVueTW9zzlXjL/TvLok6Aaj8c5s87+nz6gwG9Ynoju6Ld0nGXudDr5GWxBf5fLZfaV5z3veRoOh1mvBwcHmbzdBo3G8bJIfAZ9ej1+vwPhfD7PewcDxgwWVqtVDvZGo1xiORqNNJ1ONZ1O83JqzyzTZwYSvV5POzs72t3dzb7++OOPazwe5/54mxzMiOHt7e0M7tQH+M3n8/we3U4mE81ms4rNAVNsjo7waXQwHA4rgx0fHNA32ukzaVIql1MzCwifchvz2t/f1+HhoVJKOjw81GQy0WQy0Wg0ynW22+V+wo4HDurootfrVYip2TxeTnoSGUiqDAYdtBuNcg9hx1Xa7cQvSTdv3jwNzkMUnBacFpwWnKas9+C04LTLLsFpwWnBacFp6D04LTjtMkvwWfBZ8FnwGXoPPru8fHZqgukFL3iB3umd3knv8A7vkBu6WpXLV5fLZXZCnGSxWGSHpDPc45lNgmUymeQsrncOh/IXnXand8MTYO6cPEsmkReB4O2iXhyQIHNAxEFwJp4ho71YHC/flI6X9Xmb6IcHE+Vj6E6nU6nLn6E+Xm5oSIFAc91Np1PNZrNMogBuSkn9fl/9fl+7u7uZWMmAOnhiK0nq9XoaDofq9/t5+Sz1D4fDfOhdv9+vOLNUOipERx9brVYGIggHHdUz4XwOyGAj9I3OPbtd1zVZ7H6/n/uH30JqBFK329Xu7q6uX7+ulFKeUcJggufwOfd5QI021/0HoX766SDqBEV2HOAGhNCHk4P7vsfPtWvXdP36dQ2HQ61W5SFwzWYzHxrnMeoDDx9YMagC0CEgBmW0wwkOn8NvPc48Xvkc/+n3+3cNTtAZ4A05oyePl/oLn6EMDsQrikJ7e3v5IEk+BxMA2zrhoAvfU9Xvc9KvL3X1F+U5HqEv+ug+jm7r+BJysgSnBacFpwWnBacFp10VCU4LTgtOC04LTgtOuwoSfBZ8FnwWfBZ8dvn57NQE0/u93/vp7d7u7bS1tZULROE4sWfp+RyluTHcocnSAjgYms45WOJo7lwoYDKZ5GuueBwJhdNOzzjjMA7ukrKT8R7QIPjdyKtVuSwVEF2tjg97w9G8Dul4v0PXCf1Ep71er+IIrk//vN5H9ApZ4GCdTkfj8Vij0ahSVkopOygAu1gscpD5jAV3KLKzzFxAH9zvwOZ99faiX8ptNKrLR2knpAA5M3MAQN3a2spkUBRFPtwREEbnrVar4gMAIzNB8O2UUl5qTB29Xi/3F4CnLJYCY4PZbJaDudlsVmZYdLvdSkzgW5A29fksB3zCCR//okyPIXThMySoj7K3t7c1GAzUbrc1Go1yTEEi6IIl4w5o/M/SScCYwYkD8mKxyDMdfJaCx2zdzyEun1nS6Rwftsfg0AnbB2E+AER/kvISb/cr7Odxc3BwkGc9+L7L0vGyUJZj++DCbQyRMBMFndRnC2FzdIUd8AN07f64WCw0mUwy0FNeyPkkOC04LTgtOC04LTjtqkhwWnBacFpwWnBacNpVkOCz4LPgs+Cz4LPLz2fptJve5V3epZjP5xoMBvqTf/JP6nu/93t148YNfcqnfIra7XZeekZj605bFEXe67CeOQVk6ABO1WiUS/A8S0nZvPb39zUajSoZQMCNzOTR0fFBYMvl8q79GQGjVquVgXo8HmdwINPp7QPMkXp2lfZvb29ra2tL29vbWiwWuXxA0J2bQHHiXC6XFTAB6BqNRnZCz2rSXpYLolvKfuCBB9Tv9zWdTu86yNCBDdBnZsTt27dzOyRVdIM+AVCct91ua2trK9vP7U3ZDA6Y2SCVB8URdEdHR1knPjsjpXJZ8XQ6zf2HmLrdrra3tyv6RdAtpAwh0wcAIaXjg9gIUsgKG3MNcGs2m9rf38+zVObzudrt9l2kxjJG2jydTnX79u27iKfTKZfqepbdwZP9OAFEyMNJeNOSSdeLzwhxIgWUKJ/lmvgHRIAfekwS5wDmeDzOy3m9bW4LBnqS8qCt0WhkAkEf/X4/24AYbDQaeZYB/uqHaqZUzpCRylk0jz/+eI4b9s+lHT6Ac1zY3t6uDNrqg10nLADefdVjEUzyeGg2j/dJdlL1ASuDJ/zQB6nu40VR6LM/+7PjBNkzJDgtOC04LTgtOC047apIcFpwWnBacFpwWnDaVZDgs+Cz4LPgs+Czy89np65geuKJJ1QUhcbjsX78x39ct2/f1s7OTlYMyqWBAD0N4TPPEtY7wzP1jvLyTNxyWS5vJdApn+uAaavVygGLwjCWgyNBh/IAWK47AEjKykYwKs7rRqTt6MMNRplkjH1JIm3i/vm83HcVMAAICSActN1uV3SPXqUyiwqp1nVWFEXFAd3BeFG36w4ApjyCQ1Jlv1VvY6PRyGA+n88z6EKkvMg849jeboAHu2Fb+tftdjPQOkBjL9rsdlssFvm9EydACwFgEwKewQRLtynL/c19Hnv4fTzH9fl8ngcwtIH+YFN8HzDjWTLLEKL7orfDfc6BHz9DR24/dOY6df/AT2kLMzrQmfsAbV0ul5WyKB898H46nVZIgrKwATHBAAZ9su/qZDLRo48+mmd5EMvYtb7HKeS/tbWV++/9ADvqOIBgL0lZP/TRCaDVamVfoN31eMCebi/HTi8v5GwJTgtOC04LTgtOC067KhKcFpwWnBacFpwWnHYVJPgs+Cz4LPgs+Ozy89mpCaaiKHIW71d+5Vey4skuAzh0VlJFGTiLN8Szmw6CnnGkwyjAg2Nvb6/iOHQcEPG2O+ARfGSgcWiAlXu8/V42mV0cnvJcJ2TaqR+HnU6necYF5TjIOwA5COPAHiRHR0d5qRpt9BkP9efJ2JPF5xl0z33Yxsmm7tTom2cc+AFa2lgnG3QFSS+XywpAAu70WTo+4M2DgGwwdqoHPYHKX/SK39FegtH9wUER8qDsOujR1pTSXVn1TTHgGWz+rw+INtmEZcFORNSLD2LT0WiUn59OpxXd82o2y71Qh8NhXs4K4DKQQFarVY51Jwr0iK3AAo8NDgAEI3wWipfLM9iE5ymPuCZusCf93zQoIbbpw3Q61Z07d3KdzLIB7N2+TrxgD8DrccL99IlyfJCKrX0A5u33WHU/gSgBep9xJR0f9OekHHI+CU4LTqNcrgenBaehx+C04LTLJsFpwWmUy/XgtOA09BicFpx2mST4LPiMcrkefBZ8hh6Dzy4Pn526Rd4rX/nKAqdIKWXHGI/HFQWxfM5B1TvgjazX56CKUBZCwEvK2cCiKDQcDivltNvtCogVRbmnYL/fz9fm87n29/ezUfr9vnq9nlJKFecj0Kk3paStra0MquPxOH/ebFYPFXPj37lzp+I8gPtgMMjLcD2TuFgsdHh4mLOi9IGgGo1GGexwJNrLHpeDwUC7u7vHRk5Ju7u7FaJC3LmWy2XOTNdJF9J3sGcJri+lTSlVDikD3CDLw8PDyuCAtrOfqKS8DNoDDUKdTCbqdrt579b7779f/X5f3W634vjeHidR7AsJ3rlzR9PpVNLxzBEnYNeTg73PrBiNRhV/9Ww7g4DZbKbDw8MKIVBfo1E9dHB3dzdnjWljs3l8gKJnwdnLlcPh0FOv16uAEc/QFwByNBrp8PAwkwL+TWz48u/hcJjBlucXi0VlQNJutzUej/Peu/VYpB31QZr7HfeT0b9582aOU56bz+c6PDzMS68BRXwGcMRuHpP1waf7CrMKiBlihbj0wamXTRmOXSkdzwTpdDq6ceNG3jvY4w6/ZbCIDoiRXq+Xl++ynBwMYkZPu93WJ37iJ8Y3mDMkOC04LThNd+khOC04LTjtckpwWnBacJru0kNwWnBacNrlk+Cz4LPgM92lh+Cz4LPLxmenrmDa3d3NzgXoubN4ZzAgGUCue4A4gNAJFFPPiBEM/kxRFLljzWYz7z/Ic8vlsgJa/pxnQr3sZrOZA4624nz03YPDndQdkvs8g1nPttMWHIX3nhVHvC20m2CHkDzri5MSDHVdehbUdQag+TJMnocYHIA32ZC9O7G7k547PsFOex2E0anbHrs5YG1tbWk4HKrf72t7e1v33Xdftp/PRqmXUx9woIe6vVw3LrTXY4C+sMcpPuKBXm+H29mz46vVKvvizs5OxWYOROiqKIpMIICu+zMHA9Zjg4EOA4bZbFaZdeCkjZ8S3xCiVN0XN6WUga/RKPdQ9WWhfp/rpk7K+J8LfSVOAPXZbJZBHp1TJjrzuuozdtwuLt1uN2PIbDbLPo3uiEufLUO/KNvjnjgCP32QTNvAMvqLHhkcMeuDAQgDU8dbH5SEnCzBacFpwWnH+g1OC04LTrvcEpwWnBacdqzf4LTgtOC0yyvBZ8FnwWfH+g0+Cz67rHx2aoJpOBxWGuRgR8dojH9WV94mhRLUfg+KImio158HLAgQD0zKIPPudfGqgypKAkQ9g8d1BAMCKvU+OAh4H7yPODL/o1cMjtHrxIKzcBCh7xdKXTzLoWh1cPP+0lbqrDuYA/0mW7qD9vv9nIFmuSh1+qwF9Es93hYnzrr+l8tlts/29raGw2GuF5Kpt1FSxdZ1/0On/X4/BzL1O1jXBzToB9v4oMbB3n3ZSZp6IEIfENFnQP7o6Chnl70ebMXyZ+rtdrsVH3RiAiA5cHE0GmWAx5fdZsQVuveZE06KxCL9kpRnurid6+2qx0s9htCZL8MGYCE4wNbvpx+ud0mVw/A8PtB1vXwGDXxOfGGjdvv4YMt2u11pJ3pFt9RBzCL0j5kGTuT1AacPpt1Odd8POV2C04LTgtOC04LTgtOuigSnBacFpwWnBacFp10FCT4LPgs+Cz4LPrv8fHbmCiYcstkslwli+FarpV6vl5cvjsfjuw4Jo8Ge2Sej6cstUZ47hRseBWMkd97pdJoNwpJUjIoBl8ulRqNRzkASOJKyURuNhobDYQWMZrNZJYvLUjjKQQ/eX3di9FbPQuI8ZAbJEgLSu7u7OROLjtD1tWvXcn/JRONEjz32WCYtCMWJArBhWSBtpQ87OzsVECJwCGCWHiK9Xk+DwUD9fl/SMbD6vqtIo1EuR3RA9exuv9/PeplOp5WAX61W6nQ6GgwGeuCBB/Jn7Ku6WCwyETr5A1TsK4kuXC83btzIS4A5kI0gwz/ILBNwLMd0InHgoG/EBksMu92uDg4OcqadLLB0vOS41WrpiSeeqPTN7yuKojJjYnt7O/sRQT+dTrW3t1chzb29vdwX/Aq74uNOcvi5g490POMC/2UpqxMA9/lg0AcQ+Ap2AAjZC9UBkGWZlA1+rFarfA8+y36oi8VCo9EoZ+BZ5omebty4oa2trbz8lQMcZ7OZ9vb28rJfsMUxjRh64IEH1Ov11Ov1dO3atey/o9FITzzxRJ6l8aY3vamyJ6z32wdSDA6Iufqg0nWATX0pfX0gE7JZgtOC04LTgtOC04LTrooEpwWnBacFpwWnBaddBQk+Cz4LPgs+Cz67/Hx2aoKJgPPMKUGE0RaLhTqdjra3t7W1taXxeFxZPoZjsu8ioE9wY1TPZrpTEDQe+N45lm0BfDgogYNhcNqiON7HcrVa6eDgQNPpNDu7L0HrdrsaDofq9XrZKA7eZLMBNkCR+jGKgz/OTBYb5yHjuViUS3F3dnbyteVymYFse3s7txWgp1/Pe97zKmBLUEPCOBdZUMTBd3t7u9ImHN5tl1JSt9vVYDCokCNgMBgMso/QH17oabFY5L1lARleLEWlHZ7VxVboFLDCng706DSlVJl9ACgQdAAKe8k6AAHKk8kkD1DwX+r3wQbP0j73ZUCKv7QboONe7IWusRdAjA37/b5u3ryZwcABHHvSZj/kEB/GPt5urwfd+MwC4qwoirtiUSoHDeiIfuKn3W73rtkp2Mv3QqXNjUYjz5agDfU48z2JHUwB3dlsptFolPvbbDa1t7eXfcEHTgzuiqLIe6mCR4C6z2CBOJrNZp4lcHBwoJRSHnTcuHEj7xnMPZTJUviiKPKyWOIGm69W5SGKPnBmf2EGP/hiyNkSnBacFpwWnBacFpx2VSQ4LTgtOC04LTgtOO0qSPBZ8FnwWfBZ8Nnl57NTE0yeUaURnqXFMNJxNpJncASc1TPLdMaz7+40OAzlbOoIQcHMAgdgd0KU5iDgwOIgVw9WP2gMg+LoODAO6M/i5Dg6fXASo7xNwUWZ1O9ZU0iCeyBBqdwnlDIGg0GeYQDpAPQIzk3woDfqhvjQGX1AP/xPOZIqMzkajYam02l+FuDl/l6vl3Xghwu6fgAjSIugx6eOjo4q/oAtvK3olMw3/fTlxvgjvkRbeI6ZHZCE70fpfcIPsSd98Pp4edYev0KcVJ0oAc1Op5P3hcUekAX9wffoYz2WaXcd6OtxVgd66ZhgabvH9lkvH8Q5CfrAkL5DTl6/xy59ZkDgMyTwa/pB3fP5POvw6Ogo+zo2cqxz/PF4wRc9zpmR4vFFvNIHjzPXeV2H1LlcLvPsJ3yaAwEhJceRkNMlOC04LTgtOC04LTjtqkhwWnBacFpwWnBacNpVkOCz4LPgs+Cz4LPLz2enJpgA7uVymQPvJIfhfzJgjUYj79m5WpV7DOLI3EtGEqVRFsHCdQ8G7xhLU3kGAMTJJVUU7QSA0zjILZfLynLH3d3d/D9L3QAClE6bMQyO5U5LXyFBngF8Ea67Q7jhm81mDgA+B9gdmHhJyjoExJi1gYPWl0GiJy9TUs6aUudqtcoZXxwScmSGAEL70aETN3qqg97R0VEOvvF4rPF4nDP27tiuX2YwuG3rPupAyIsg8jLJ7OI/xAI2gowABwCJ9jnoe3+JJwLYQb4+mKAfi8UiL1Vm+Wu73dbu7m7ey5UYcyCEWOhH3ab1uPL76oOl+vMOonV9Uo6TU10PbpfVapVnTtSJA9/yz+qDNGxU75/HhtfJoALfYrmv6959bLVaaTweazabZYLwQwkZ+DGwcj24Xzhe0D5ildkpPvClHz7LgPeUyYBz02A45G4JTgtOC04LTgtOC067KhKcFpwWnBacFpwWnHYVJPgs+Cz4LPgs+Ozy89mpCaatra2soEbj+IA5Gs01DN9oNDQYDLLCAAIyyzdu3MiGAijG47H29va0v79fARX2MKRegoPlWexvyLXVaqWtra0MJn7gF45HBpBAabfLw9vqCubV7XazAekfyxr9vlarlQ/Ba7Vamk6nevLJJ/WGN7xBjz76qA4PD/P+imR3+evkxfLPGzduqN1uZ8dZLpc5gwyZttttDYfD7LiHh4f5cxwPx2IZIPaoZ7Qhr/l8XiHmoijyUtXxeFzJvGMDbAToOSk2Go1MJM1muRxxNpvp4OBAjz/+eEUHvhwTGzWbx3u39no9tdtt7e/vS1LWF/UXRbnEFSB1EpSUlwvu7e1Vgr7ZbOrw8FCj0Uh7e3sVH1+tVpVlvYAJ/uAE4wfjOWj6jJHV6nhptA9uGo1GBvFer6dGo1E5sI0lytvb2+p2uxXQcB9E3/iDH0YHUUnK+wLTDwSd8wII0ZeTNEvS5/Nyn058H0LvdrvZZvzFr4hX92X2C/WZAZKyDabTabah2464rg+wfPDiNiHOsfPe3l5eis4giLLYS5W2uL9SNkIb6qSEjzLDBqxZrVbZxkVR5Ho2DUzqpM3ny+Uy2yfkfBKcFpwWnBachr6C04LTLrsEpwWnBacFp6Gv4LTgtMsswWfBZ8FnwWfoK/js8vLZqQmmojjOaLoicQ7PGnqm0JUNCJCdQwhErtFwnqMeNw5B5YfpScplX7t2LTucAz1tglzIPKJ8BCPQPvrVbrcrAMBSN5y00+nkWQ0Ak2f9b9++rf39fTUajQyYnvWlfcxiADRps2fumRVBBh09ANQ4AiDuDoOevVyeZVYBZdDP+qGI2JlAhgxZMtputyuzHRzoW61WPtSMF21kxgPiszaoH7LlfwjB+ws44k9k7N0f3Wfa7Xbe79QHDfSfvqCnOog4WaIfJ3DKkJSXDa9Wq4q/E/Q+68MBpz47hLLRgf8P0JOZd0IA2Ofz+UbgqMey+550PMMHH8K3vD3MjnE/BMDoqz+Lztw36RN2duCj/95u2sY1xyb6iA38L+3Bx+v6hcyYPcCz3md/+QDBMZDYnM/nFQx0f1ssFplI6Scx6+U7EXAP10LOluC04LTgtOC04LTgtKsiwWnBacFpwWnBacFpV0GCz4LPgs+Cz4LPLj+fnZpgWi6P93L0AHBnd6OiAAKzKI73OsSJaByZcsARR6EOAoKgoAxAvijKA6ogil6vl2cS4Dw4tjsPGUNfPkd76wKwkpmnPdTLjAJAHaIA+G/cuJGN686O4znI150XBybg0AuZZA9M9MVzDnL0HSfi5YGEngAJAscPaINo0a3PWnDgKIqictgiS1lpr2eEOUgtpeq+pO7wBAr+4X+pl/ZyDb8F1A4PD7PO6kDf6XRyH+uzO8i2+8DDg5kY4DMCE1/DLjzPEkMHVuzPLI6iKHJsuN6opz4LpP48NiQr74AAoAAwxANx4ES+CcyIAXzOgciz7fR/E0lTL/5XJ0/KcHDDX9GbDwg8fjbFERhVH1T5e28Ts0UkZR2iIy+bchBiETyp6w1fdMz0PuJbjhP1QYO3n7/0MeR8EpwWnBacFpwWnBacdlUkOC04LTgtOC04LTjtKkjwWfBZ8FnwWfDZ5eezUxNM+/v7OTNPxQAZCndAIOOGsVhWBiiwRyYZ/Nu3b2cHBawpgzIJZoxx586d3Ln5fK6dnZ0MQAAM18haswy21+tpe3s7G45AB3jrRie7mFLSZDLJ2X+pzApLx9n17e3tvFy12Wxm4z3/+c/X1taWdnZ2tL29nYNwNBrlzD3tJliOjo507do1pZTyHoxk9ZmR0GyWSywl5YDa3t7O7QW45vO5RqNRXj4KiPKML5uUpNu3b2cAd6dKKVX2f1wul5WldjgnSyR9CSJyeHioW7du6eDgQHfu3Knor9fr5aCv28WXkAKCgBz+VhRFZVklekA/BBL99ow+/gOZ0fadnZ3sE/QDfbiNmfXgZVEXNuU+9MeME+530ksp5SXfxB52ov30H98AlBxEPB48c14flKFzH7DRl+FwmHVzeHhYIWgGNPj9arXS/v6+ZrNZ3m+UQQHxzaCOWSDEIW2jX9iIWRFOQNzH4ATBNrSdvkmqALgPtHzw2Gg08lLt1WqVl5jWZ+DQNycpJ0t0ge7xQeyE37EUltk3DCboI/332QRuSwazdeILOVmC04LTgtOC04LTgtOuigSnBacFpwWnBacFp10FCT4LPgs+Cz4LPrv8fHZqgskNjyNgHBqDs6BoDypmCtQDD2N4Ro4Aazabms1mGo1G2Yk9e+pOiVN49hRFkAU+Oir3hWw0yuWV0+k010MgoSyMRD+73W42Fm10B6MOSXkvUs844lzT6TTvg8qhaezbyT6jEMlqVR7sBYAdHh7q4OAg14WOMa4DMXr3oMbBfH9KBykvN6WUde5gJSk7L/0j640tqY+ghMypi2t37tzJB+fh1AQbhI6/YEsIt90+3k+T7Cz3SdJkMqnMdGBAAri5j/qLvhOQDFJcz7ST/yE7AMkDnuXPKaVMog5K3Odg4P6PvgEOdL1clstuGejU7cez+J6TtYOT96cOWp5951n6xiwNJxT60ul08mDKicjBy32rjh/eBuyM76IfsvQer+CLpIodsJH3oa57PsOXPCvvOoT4iSu/x33KfYO/jo3YiL54+7rdbm6TD5zwG2Kb+3m22+1WcDXkdAlOC04LTgtOC04LTrsqEpwWnBacFpwWnBacdhUk+Cz4LPgs+Cz47PLz2akJJkkVZ6QCOlEHHIzjgCMdgzrPtVqtnJ2tL6lbLssDpDiUjOsoYLVaZSXRQcB7f38/1+tkAwB0Oh2Nx+OcTUfpAAEZbUgAIHFFUibtof+AlKQ8g4JyIJzpdFrJrJJ9dIOS+UfG47EODw9zMBAYBJwDjC8tdeciAOknfQbQvBwCC1vgXN1uN4NMPUC8H9PpNNcBsWA3DrUDXFi6CYl62ymPdrEXJaDldeNXtA/fpByAyXVXDxzpOHDxVbdvu92uXMPXKa8OiAQiugWk8V+uUQ71UQfl0Vbk6OgoAzqgJyl/5nHiPusDgPosC9cBRM57J2P3cY+fdvt42TY+BQnRHwdTfIN+OmagQ3RL+3zwUu/XJkBHd/zvNt1UF9cAdwYf9BG9QBLe9joubhpcoAdAHhxwbHS70050QJvdR/FLygw5nwSnBadJwWnBacFpwWlXQ4LTgtOk4LTgtOC04LTLL8FnwWdS8FnwWfDZZeazM89gciemQ56JJ4tII1nOiaGm06lGo1EGIxqJwliqCjgWRaHRaKS9vb0M9C6r1fGBX81mM+9zuVqtNBqNKgHuCqTeRqNRAXOWxqJ4+tZqtXLGH6ekb2SLe71ebgdtm81meUnfcrnUk08+WWkH+6imVC5HRKf+7J07dypO6eBE5ni5XOYlgwQPbcCB3bkGg4EGg0EGRydBZmpga+py4IQoKXexWFRmUnS73Qo40yYOHpRKQnYn39/f12Qyqcwq8AwrdbVaLY3HY6WU7prB0O12K4GIjVjiWBRF7ocHnge/68mBjUP6NgWrE7Vneo+OjipBB2isVuWsEZYmetn1Pvf7/fwef3Zw59k6cEJkR0dHednualXOTPFBmQ/S6n/xD++bD8R8lki/39dgMND29rZu3LiRbesHF9YHQ5DC1tZWJmEnG49jnnM9MMMG/3XbOC6wTJo29Hq9/Bx4xL2OS8Q79SDcD3Y5YbgNGSA6ybjf+CCo3+9LOp4tU3/Vy3GswBcWi3J2Ev0LOV2C04LTgtOC04LTgtOuigSnBacFpwWnBacFp10FCT4LPgs+Cz4LPrv8fHZqgqm+ZIxKcQYcHqekIYAUCvEsOEE0n88zIJJZpgyy24APSvSgpA6WApJlI0taJyfPHq5WqxyogDhle8bS92mlDWROKaPdbud9K4uiyOTUbrcrYOMgAwE6SbFc1fclxZkx+HJ5vP+o2wUnp9+8pz72Ux2NRjo8PNRwOMz10ud6Zp12upPPZrPKdbfDZDKpBA/XKJP+s6drURzvm+rZcAIBATy9Lv7H6T0jTz98lkFKKS8f5Rr+jF/hw61WqwJ8db/AxwAVwMqJ2W1Chh09uP4gFXzJQYO2cz/lOfGsVqvK4ARf8dkoDu5uS8rGl9GN+w110k8nQQYLzJBZLBbZbg5UHq8AHLHhA0JvAzGAHrzt6A57u2852HKIYv2++qvT6Wg4HGY8YL9W9Ol288EjvgeBe2zj08Q5WMnMBF5ets9oQg/ub9RVx7Z6OSGnS3BacBr6Dk4LTgtOC0677BKcFpyGvoPTgtOC04LTLrMEnwWfoe/gs+Cz4LPLy2enJphwRhqP8n25VT0w3CDegNVqlWcL8B5QnM/n+XOe9+WUDiye2d4UCF6fG9eV485F/wh8P3iLTLs7KlnKVquVM3hu+Ol0qv39/dwvlm46maSUKod6oRtIz/d+9WwsQex6Jvh5ue28vw5AdUKgPuzrweE28TIdHD2gV6ty6SDtRX+SKv2iTZIqJIp+uYc6Ntnb/Q4QchDw2QkOJOjUpa4jgqoeeN7e+sCDz/kMH/I21zPDdX9FiDHKdtByf3B7uO0h7zpx0gYnFezl/aRMbyd9o4xGo5FJ5fbt22o2m5XDG8mcu14kVQDUfbT+v/en7mt85nbwASeDR+r3mPGl6sPhMM92wU8YWLif1MnX8YM2UbfP9vGZAY1GI2NHXceScuz7DB1i3IWYd9v5bI+QkyU4LTjNbRKcFpzmZQSnBaddNglOC05zmwSnBad5GcFpwWmXSYLPgs/cJsFnwWdeRvDZ5eGzUxNMq1WZ1Ts6OsrL/ciycZgZy/vceehw/T1ZdcSXtHnWlyWRAAfLVYuiqGRxXUkO6DiKByr3uMMA8D4jYHd3NzvCZDLJbeRZ6icDubW1pdFolINrNptpb2+vAmreDsiO5Y20A/0tFot8uF79WQzq2V2c00Hc9V8HE9elVGaSIVOpmknHwQCLk4J+Pp/n97SPdrjN6wCLEzMjBaBvNpu5navVqrJ3LEDiwICwXyntAIw6nU4FwOsZ7kajkUnJ/clBm2fpu/u2t9+XDKZUXe6Krsi6bwLSlFJe0ukEWG+Dg0w99txXiCf/HN8jRnz2gOvd/7o+JFVmAsxms7zfML5NvXVA9kGWpEpWnX44kREDPmPJfcfLRo+LxSLHGfcxaMAfWKK+vb2dl44XRTm7xWfquA3YT1hSnj1RHxTRP7cNxMdAEl/p9/u5jcvl8d7Q7p/EEoAOBjqBu91CTpfgtOA0ygxOC04LTgtOu+wSnBacRpnBacFpwWnBaZdZgs+Czygz+Cz4LPjs8vLZqQkmCnGFLxYLjUajvOSQyjAYjQZUcPSiKCrghfM6OHsHAFWMSJm+xBADOqCRtXNCoNw68EnKQU4bMQIBh9G4jtG5t9lsajKZ5HZAHhjfg5S2LJfLynI2n8ngoF8nq7q+yBTjTOitHlje1jq4UBZ22RTY3n7uR8etVks7Ozs6OjrKL4ib2SAenN4XB3a3NeLkB2ihIweQOvFTj5flgwRmKngw8uJ5yuZZbz/XPJDr7ULHTtae0U8pVWZM0Ef347rd3f4QE4dHdjodNRqNiq58wIR+ALpN+wwTx66LegzW/ZD3ACSkQT1k0LEngyL2H/aYcL+gbB9wePzyHIRYtzVxhPh9bltm71C+LwV3/yJ7z33oeZPfsHwY2xPvLM2mvPl8nnWAzT320LPv/+vCABQ9hJxPgtOC04LTgtOC04LTrooEpwWnBacFpwWnBaddBQk+Cz4LPgs+Cz673Hx2aoKJbCEA4gDuDswBWoCgg8xoNMoAwF6aOIErCECoB+RgMNDOzk5W1p07d/K9DkQOVo3G8WFigKE7fF1pDl4cxIUi3Uk98MhWsp8pjuvl0W+eQVeSMjngMLS3DsJOsrTDwZiA86wiYONO4EGPg/C/L0OmXNcd9XpAMaMEXWBHXwbI/94u9x3pOHh9GbPfV7dhnXwIaicw9EqgLxaLymwEr5sZMnVSRcg6Y1v/HHAnQF13tJWDHdGd3+vtdb9x/aB7yvaZOyxrPGnQw9/lcpmz5bSDAY2kCpDOZrPKIAPBxpTpB8VxL9n5Xq+XdUYM4OPMLvDsPf2oD8TqgEc7XH/ogvvRNbNH3EZcc3/ymTV1n0cn/nISp7567Pl7BOB3svH4dp/1+HUso0xioB5bIWdLcFpwWnBacBo6DE4LTrvsEpwWnBacFpyGDoPTgtMuswSfBZ8FnwWfocPgs8vLZ6cmmMgCYhyW8rlyUYY7Tj37Np+Xe4KyL2g9M4iD8xeQ6Ha72Wnm87kmk0mexeCB6U7JZ+70ADHKc6U4uAI4/M+SwWazWcns0k76I0lbW1vqdru5Dojp4OAg1wcQOjjiiOitDjT1oMchVqtVLosg82Csk5uXjcNhY/RZB3rv82KxqMzwAMTRE32kLoKq3W5XbOw69M+9jNXqeDkpOqJd2NHt4L7kzxA03LdcLiszWKQqgdN212GrdXxAnLd9MBhkWwCO9MP1zuFtBKwDdX0GA2V4bGEH2sN7f87jFT11u92KPrEHNiBesRX3Uafrp65LBmOud8rpdrva3t7OAx9iFp9zwnFbef99sOI+zv3eVh8M0Cb6V8cXjyEHR79G28Au9x2PEQYM1OW2cixCJ2CI26s+gPPl7R5Hbpd6fNfbH3K6BKcFpwWnBacFpwWnXRUJTgtOC04LTgtOC067ChJ8FnwWfBZ8Fnx2+fnszC3y+v2+2u22RqNRLtSXnNIQDxCUIUm9Xq8CjBhlMplUHNMzvPP5PAf01taWOp1OnoWAIVarVc5QuvLciTz4MAafubFpB0vkUCJZ/zrYUr8rmfbxDNnR2WxWCVQnQZ4nS++EUO8L96I/gJ4A5v464UnHsw+wgy9RhJCwIftYSqrMguh2u+r3++r1eup2u0rpeBYCfUeXBNlyubzLbvTRZ0R40Pn/CEGH7utSJxGCD8IHmIrieM9I7AAQ+4DBgxswr/sXmWkGQZtmeDQa1Zk1dZtMJpPKcx6wrgNs7CDDe88+k70HIGlLs9lUv9/P7XB7dTqdCokNBoNcb6PRqByIyQGR3hbKoe52u63t7e08QGu32xnE8A104zpZLpeVutDH0dFRZSZHHWi5r06O2Bd7OjAz0KrrGl3RP8cR7ESdvlctfeG+6XSa2+rg7HFeJ0gnJ49XJx1wy/1wUzyEnC7BacFpwWnBacFpwWlXRYLTgtOC04LTgtOC066CBJ8FnwWfBZ8Fn11uPjs1wbS/v6/FYpEz5J4V9uwfwD+ZTCpLIt2gnU5HnU4nB850Oq0si+M+OkYGcrFY6IEHHlC/39fOzo7uu+8+HRwcaG9vT7/3e7+nvb29DKbS8cFnrgiMQptwJPoBwDNzAMEBaZ8rFYNjBM/uAvIs06sTmaSK/nAAd7xNwEf7aS+BRJ8dHBzYqQMg8xkP1IFjQrBORE6WODTP8ozPLqlnuKmXMrmHg8cgPnRWP/SOOr0dbk/aUSdx7IPt+KzVKg866/f7Gg6HuV0O7NgRoHByrIOHDzYATcrZ29ur6JlYoXzX0ybQbzQa2V7Yxf2MmSg+28IBq91ua2dnR1tbWxWw9Ky1A4qDV6fT0c7OTm4PBx0St9TnszPwxW63q+FwqFarlQcDt2/frvhMt9vNf4ui0HA41Hw+12g0yv2az+caj8e5fR6fDAiJdfRC+xmksmzVB0qOS8RYnYwd6P1zjwv+93ipz1ap6xnxeHCfdV9mZlPdp92W/j7kdAlOC04LTgtOoz3BacFpl12C04LTgtOC02hPcFpw2mWW4LPgs+Cz4DPaE3x2efnszDOYcGayx/UsuCvPFUYjWJpHJ7nuDupATx2U3W63tbu7m/ek3NnZyQCxt7en8XicAwaD0Hk3HuJBRx30x8vgvb/c4JSN8rlGxhUn9fLd8CeV7UbdVG/dObiXftNuQMOBHps4CeKUCLMTIBCeHY/HOZCwlxOIk5brxcWJH4d1spKO91ytA70PMGi7t5v20H+vE5+lrE6no+3tbQ2HQ21tbeWyF4vjww99j0wPag9G+uqgTLu4r56x9sFN3SZue6+vTl51//S+ut/QHgdl6nNQRz+8HFRdz+12O2f2WcaOD9Xb4DrzQVLd7xHs02g0KsunmS2ETwD07g+u3/oAxGcQOT55373tXrbHeR1HThrMMABzW/igt+7/7lM+84E2M2Dgf2+D1+sEEnKyBKcFpwWnBacFpwWnXRUJTgtOC04LTgtOC067ChJ8FnwWfBZ8Fnx2+fns1AQTzo9xUSAdobP1bK0HDge5OSB5p91YXFutVjnbf3h4qP39/VxGr9dTr9fT7u6u3uqt3ioflpZSqhzYVjcEGeuTSIb6aUvdKSjTDVJRZOt4X0kHZQ8Wgg7juS4c0DzLC2BzvdfrVTLezH5YrVYV0Hbn8ADAgegvMwfQOXr3rC1Z69lslnUACTArhLb68mNvD/qoBxlA32w281JG+sNztIHPmDVRJ2WkTjT0s91uq91uZ/8ZDofq9/uazWa53+inHtiuN9rjszPqgwnvnz/j5bge/HknB+732Qp1IHVbOenynux8fU/WRqPcq9X3l6VdDlSIL8/EDvU9YmezWfYTQJpsuC+flZT9fD6f58M6qQcbQKZIfQBBzNNmdD4YDLS1tZWXyuKjdbzy/qLrTYMwn33g2Ed/6gRCW30gVB+8eGzUCfPo6CjHk89S8nZuGkSGnC7BacFp9Ds4LTgtOC047bJLcFpwGv0OTgtOC04LTrvMEnwWfEa/g8+Cz4LPLi+fnZpg6na7uVKcD8f0RuCIXEccPFiG6ABGAz1jjWPS8L29Pd26dSvX+8ADD2hnZ0fD4VDD4VDv8R7voeWy3FfykUce0f7+ft5j0hUK8czn89zWZrPcC5PrzWZT4/H4LmdqNMrlgoPBoBIoXt7R0VFuY7/fz2V4phOd4cTMdkAnbkQy2Sz7A6T4DMdwQEGHy2V1GSiAyzJQHL3VauXlhATtzs5OXsY5GAx0dHSk6XSq27dv54MOF4uFRqNR3reSYKIf7qT0C1InkCRpMpnk2SqABMsvPQDqs0IcyNibtdfr5fqK4ngZL8A+GAwy2HvAkj2n/bTNyZlgAmyxv5NNPcBbrZZ6vV7WFzHA3sB87iCCn/lyTPyWun2GA+SE3wDYADhldTod9Xq93NdWq5VBFh1QJm2h/YCMtwHQpi50RfwCqg4+3W5XW1tb+eA9t2mr1dK1a9fyIG8ymeS9d4fDoVarVZ5tgm/VgZV+YffhcKjd3V01Go3s8wzqlstlPgyQmT/Y3oEen8UfeBF39QFdp9PJhzCis8lkkuPP90V2nMR2xGyj0ajMaDk8PMz7xGIj10F9UBtysgSnKfcpOC04LTgtOC047XJLcJpyn4LTgtOC04LTgtMurwSfKfcp+Cz4LPgs+Oyy8tmpCSbPaJL5pvMeGP7eHc8b5S8PEL+P/3k5cKHUVquVM9uSdO3atRwYKMvbBgl5ENPGelau7pwYFgC8fv16XrIrSdPpVOPxWPv7+5X2e/n+Wq2ODyVMqcxuUz+fEczL5TKTRr/fzyDF5zia14VeFotFbjdOmFLKQIOTQyIAQKfT0Y0bNzLID4dDTafTDPCeTXWpA510nCU+CaB5X58VQf+8X/gDn9Fe/q+DMoBJXwE9gMB1Trl1H3Y/xhfq/QUEvY1c4z4AnnsJegeKOmH4zAJvY92HvYyUUmVAAGFD9q6jVqt11/61Xo+3332MQY50PFOHPtE+HxRiH653u90MdpRDX53gAOD5fJ5JGJujJ56nf4A7ccLhgoCtzxzyuPcBiesAndRjjPcO/L6n69bWVmVQ66TBftDohbp9YLNpcONE5vhIH0LOL8FpwWnBace+UO9vcFpwWnDa5ZLgtOC04LRjX6j3NzgtOC047fJI8FnwWfDZsS/U+xt8Fnx2WfjszDOY3BkdsHBOB/pNYO9/PajqgO4NxqG4Np/Pc/2j0UjScVYXp3XHAUAdPFyZ/AV0PUPK7Akvq9fr6YEHHtDznve8nJE+OjrSaDTS/v6+Ukoaj8cVB3YDeaDhbMxiIKjcwQAPDjvb2trKgeCHvqFvJ0QCxEEQcOH6YDDIAeIO1W63tbW1pcFgkJf64VyUgZ7IdLrNcfy6EzqwOWA78LO8ETJ3ndRBCKfnf+qiD9hsOByq2+1WDqtD3+is7sPu16eJExXvnaAoG2CrD3A2DQK8bCcW7HMSIPB+MBhkUrt582aePVAU1VkHrl/07nap68fjgbYSk/gG/kh5+B2DiUajUcna0x7qbbXK5aX4JAf0SarMLKINPMcMA/a3BXQ7nY5Go5Emk4kODg5y3NQJy3Gorl/Xs/91OzWbzexjDC68fPcln9HRaDTyLB0/UI8YoG0+WHJf5/M6roWcLsFpwWnBaZslOC04LTjt8klwWnBacNpmCU4LTgtOu1wSfBZ8Fny2WYLPgs8uE5+dmmAaj8f5wKrV6jgjT2MInJTSXZk6B1I62u12K0ol61gPOAyHwqbTaaW88XisyWSiW7du6Q1veEPupC/xw1HI6nl7af9qVS6Z297e1mAw0PXr13XfffdlUMOI/X5fN2/ezI5I+ejk1q1bevTRR3X79m3duXMnEwfB547gAUr/lsvjpXNk87e3t3NG340IcLCMjddkMtFkMqn0EWfhf54BJIuiyMt6V6uVxuOxDg4Ocvu63a7G47EODw/16KOPajQa5br5H3sBvv1+v5Lll44z0xAvYOfto/9Ohh4oTsTMkCCL60C3u7ubCYp7yWA7oTlQHxwc5AHBfF7u+TqdTivLXQlisr8AlQ9K0PNyucxLOa9du5Z1vb+/nwcEPhsCe9UDF1vSDgRw89kSTmjodn9/P/uHk+bW1laOr9FoVAESjxsfqEiqEITHrQ848AcGCPUMOrMDiEl8Zzwe57hHGo1G1r0PmChve3s7zxbwGQqTyUS3b9/W3t5ejlPuRTf4Yb/fzzM9iA9/7zpYrVa5DB8g0e/FYqE7d+5k3fZ6PU2n00y+PnBCf9wLJkHK9Ac9oRfaTzvPGpCEVCU4LTgtOC04LTgtOO2qSHBacFpwWnBacFpw2lWQ4LPgs+Cz4LPgs8vPZ6cmmCaTiebzeTZGURwv/aRCgsyDEWeo/18HH9/XkODD+B7M/X6/YnzuwxiuAK7jHDgs+2Gyj6pn2cme7+7uVpaPTiaT7MDsseqK5tp0Os3Z006nk/XkZAHgOyiS0ceZABsAGAf25YaewaX91IPTScqOOZ/PM1gDYGRVAXf012q1MnhLx4eezefzvB+jzxrgugchLxf8gzqxn/sPZTabzQqoFUWRfa8oisryS3QI4JFF9oEJMyx8f1bIZLksD2HDz33mDLNT6E/dn2krZXl7KaPZbFaWTDroUiY6AAi9zx5XdZ+nf/P5PM+eAESWy6XG47Gm02nWAeVKyoCK7+CDkBbtBOh8oOFx6HHnPsRnEOd4PK4sv/Z9Wre3t3Xt2jU973nPy/sSj0ajPJiDABwE8bWiKDLgTSYTHR4eZhuzzy79hbCZLYMvzmazTPT4RqvVyjrzwSy+S9x5TPIXknc7MsOC5x3o0Rux5NiBLafTacU+jhOb4i3kZAlOC04LTgtOC04LTrsqEpwWnBacFpwWnBacdhUk+Cz4LPgs+Cz47PLz2ZlnMOFQZGy9ITQSB0LqmVU6DShTBqAFaDhQoEheTiRep4OeX3cAlZT3Mn3ggQd048aNSiBtbW1VDvZykPDZDg5KGI6XAz0ggqMASh5gZELJBKeUNJlMMhB7/TzTbDY1m82yYd0WXK/boyjKw8/29vZy8BweHuZnOYis3W7nzCeZYRxZUl4C6yDnjuYBQTA4CGB/2ui6RH/Yug56ZLCl46BFn8y4GA6HlQCjvYAN/sZSSq6zzytB6eBHABPkdR90/bg96r5J/8gOe4ygB/yfa07cPrDC3/ifmSEMjOjn3t5e7jdx6vGJ/gBcbApgQXzuS16Wk4PHrA/8PDuOfTudTm5rr9fTjRs3dP/99+ut3/qtM5Chc2KawZPPVqKfkNJsNtN4PM5tn06n2ab0j/7u7OxkXQHyYBBtrdsHspaU/cR9g2vUXycI7nF/cJ/zgbTPzMBXsY+3DXzxWAo5XYLTgtOC04LT/DnKCk4LTruMEpwWnBacFpzmz1FWcFpw2mWT4LPgs+Cz4DN/jrKCzy4Xn52aYHIHJ4NHRz1YaTQN9ZcHsmfBeY9zObCjMDK7HkjS8R6gy+Wy8pw7PEpoNpsaDAa67777dP/99+v5z3++rl+/nvs2Go2y0ch2UrY7Vv3lmWn6ybKzRqORy8FBcDza5M7HTAkCaDKZVPSEky8W5bJY7kU3GJuMI7rgvtlspsPDw0wkAH12glZLvV5P29vbko6XwWE7HN6Xhfq1TUAPENIP2kkw8T9lkfVtNpuZcP1ZdOFLbT2DTns8QMi042fY4ejoeA/K6XSa2wawAE6ewYa4HewAZs8I8x4djsfjbD9moqSUMunVfUBSzuDXY7AeXz6LptFo5KXgi8VC4/E428CXAeMn+BN6R5+9Xi/3k77XX36dF7MY2EuYttK+oigqhx5S1+7urq5du6ZWq5UB2wc8xBBxgK6ZZcPAxMnbbYVPuv7oA3obDocaDAa53Qy0fBAJpvAMPojQV9rBvb4830nEZwJgC65jF/46YXrbiXP/POR0CU4LTgtOC04LTgtOuyoSnBacFpwWnBacFpx2FST4LPgs+Cz4LPjs8vPZqQkmGo5ToTQHEzrsgFAHA5TgjWo0yiWovk8ihnCgaTQa+Tr1+KwDgJ5lb37YFhlnlqiyXyROKkm9Xi9nmAlMB3ey/u12OzsUmV3ayFJHnzlBmzxglstlXkI3Ho917do1jcdj7e/vZ11Rh8+ESCnp6Kjc73Rvby/33ZfV4ihOIimVSyDvv/9+tVotzWYzzWYzPf744xVS4wBBn0kAyTmYu96pqx70nvWm3R4M9Rki3AuoAn7UXwfs1WqVly+2Wi3t7e1lO3vgIQ44dTAgu+0zMCBugmsymWQ/9H6hA0mV/TvRC22ezWa5X/fff38mJmYqoN86gHs2GcCjzd4OjzMy/wCux6rf75lryLbZLJdU33fffVqtyiy9L9WGvPjf9bFarXL8LJdL7e7uZjBnn9Z+v6/77rsv7weKnRuNhsbjsR577LG8V6wfXLlarTQcDrO+Dw8PK8Tn9mIptYMz9fR6vfzC1pLyoZLYDsKp+z8+gj9Op9PKzBone7cP7fMBRN2HfIZLfYDXbB4vd+YZcBA70c6Q80lwWnBacFpwWnBacNpVkeC04LTgtOC04LTgtKsgwWfBZ8FnwWfBZ5ebz05NMHnWC+O601AZjfOO0ln/HOfiPXtY0hEC8eDgICt0tTrej9LBHQDmf4C+3+9nx/L7CEbAzp2UzCqZTJSGIRyo/YVu5vN5ZUYB/XcwoC534NFopNlslp3IdeltT6nM/rLUFJL1djkR43z0kYO+0APL+1jSVydrBy0PHO9X3bG4122PD6ETd3BAGf1CJO12u+IP6Jf6ATp0Px6PK1lyL7sO/Jv81f+nra4D7Iy+Xcfc54SFrjb5LAMbJyX3c/cpJ2IIgZcDtusbcKDv7gP4CXogljmgD18j813vE7qGKLEbNsX+1NFqtSqHTLIk1geH4AEDoIODg7v23HWfIX7djq4XH1TW9eJ2p+0MinzA4nhHhh8fpDyPYcqlDvcRyvQ6675TB2z3PY+bTT5bj9eQsyU4LTgtOC04LTgtOO2qSHBacFpwWnBacFpw2lWQ4LPgs+Cz4LPgs8vPZ2eewUTlDtC+Z6AD4abGudI9WDqdjvr9vgaDQd4TlOVnzWZTd+7c0WQyydlMwG04HOYlb4PBoOI8W1tbGg6HGg6HeelfUZSZutFolI3nCgPcMZ4TVUrVw83I9gHYZIsJSncgB8S6wxGEzCCoOwCB1m6389I/ZhKMx+PcZ3f2xWKRM/0QBfVMp9NKQBdFkYntiSeeyADq9naQdH24E29yQu9//VmuUUY9mLHp9vZ2JUNKIPDyQMMG6Jny6+QkVcHPbQtw0R4nUuQk//Z+S8dLFvFxBzVmpDhR+YwD2uCZ5U0gRfn8pT8sh04p5WXTtNN1VhRFXjLKzBv8jb4wePJBwN7eniaTiUajUZ61gM4BePTX6XRyrBKfDKp4Xyc8lhCzRN0BzePV/d6lDnreX2aESMoDiU6no62trUw4vkRYOl4uP5lMdPv27ex7vozV66V9TmLedh8w4J9OLI4h3g58APGBgc+WCDlbgtOC07x9wWl3+3dwWnBacNrlkeC04DRvX3Da3f4dnBacFpx2OST4LPjM2xd8drd/B58Fn10GPjs1wdRoHO9X6UDoBgDwHQDcCR1wpOPD0sjs46QsF1utVtra2spL1vb29rLhm81yn8zt7W1tbW3p5s2b2trayhloCKDdbuvw8LASRGTPZ7NZVtZiscjZURTne5zSTgDXg48sLP0AQJfLZT7AC+Ckn9RJILq4sdzoGN6BEj3RF9re6ZQH+O3s7Oj5z39+Zd/J3d3dXD7LdReLhQ4ODnTnzh2Nx2ONRqO8FJDyWZLHHqQOfk7gnvWHnCVl/THTw4FuMplUZotsb2/r5s2bet7znpfL9yy3Lz9GnyxxxE8Iislkov39/QxIgDd6d+Jgee4mwiIgfR9YBy4fPBATAKd0vK/sdDrVwcHBXbHjRO2A4cAMcEBg3OcxCXAhgI6knC3Hdgyo8MednZ1MDIvFIg/Anve85+WlpUdHR/qN3/gNPf7443kpKTpigEEc0N+Uyn14iROAlDjBX9irFwLhHmxWB3QHRpaf9vv9PDhYLBZ5j1t0+cQTT2S77e3taWdnR8PhULPZrDIjiQFRSuUy5eVymWOo2SwPCPQBE/7ADJ3HH39cBwcHFbswqGKQBh5i+00DTHQEhvC5930+n+d21EkuZLMEpwWnBacFpwWnBaddFQlOC04LTgtOC04LTrsKEnwWfBZ8FnwWfHb5+ezMFUw4OgWR6fRAp1Eo1smgKI6XXWI0HBTnbbfbms1md5FBr9fLysCpBoNBni3AHp6Ug2MA0LSj2Tw+aG00GlWydz6LgOCoZwu55svJ0At9xmCUWc96OyHSVhcvD8DihZ6dMAEz/l8sFjkgCTJIj4wpYEnQQ5yS8r3s3yop64vD/+gjwUabaD9B5VlmZp3w3mcRuH5Xq1UOBGzH/f1+X/1+PxNkq9XK2eobN27ktrvtmIExmUxylt59CRsA4PgvweWkTL/QGzMPvN/ef9cLJEIfPbOMONDzOX5GnHldlAEJbJpVgNAWr69OZpThvsmyVXSNzdAP8Yt+vF58gPIBpfn8+MBOdMLn3l/aRl/ruqD/AC5tpMx6P+k/7Wa5N/qkn51Op3LYIHG8Wq3U7XYrOMM9kjIOMeugPsBzm3v73Me9zR4T+KHHDbiwWCwq5YScLcFpwWnBacFpwWnBaVdFgtOC04LTgtOC04LTroIEnwWfBZ8FnwWfXX4+O3eCyY1KkHp20gHQDUQmrtUqD70DICRlEGg2mxqNRtlgdArgaTabGcTJHJJV9vaQhfR2OviyzA6D0z4HHRTLNQ+eujNiTAxCWWSP+Ut9kjLY4oCIA7iTYd3J0R31QCAEEUFGXRDfYDBQr9fTcDjMsyScaHEyBwU+l0rA8n0o6Q9tcn3SZj7n5UDm9uHZg4ODnOWm3Gazqa2trXwo2nw+z4HlM0i2tra0WBwfygcg0wbA2Ymz1WrlvXPdptPpNB+K6J/X/aru+14P9qzv3em29MFT/TriNqf9Hmd+n9/vvuXA5EDTaDQqxMmzTm7MVkEnDIzqtnd8wB+xH585WdJ+j79NoOfXPU6crOuk4Lr0NnjcMNvl6OioMiAaDoeV9nosuK7wc0l53+WUkvr9fp5p4W2o98315xjmJO4DDsc1x63l8ni5asjZEpwWnIYEpwWnBacFp112CU4LTkOC04LTgtOC0y6zBJ8FnyHBZ8FnwWeXl89OTTDRkZTSXYfieUUAGhlBD2DAiE6QreQAOMC/KIoMSmSOd3d3de3atbxXKhk/DswbjUaVTC1OCLA74DoQcbhefUZDq9WqOBPKRgcss5TKzDsOPZlMKtlQQLEOjugRYOn1ehWypE0siaVebMFn7uj+l8+Xy6WeeOKJiqNJx3tU0gYc22cXdDqdbKvRaKTxeJyD3Zeksuy3PquE7LwHPP3nEDV8qO4n4/E4f+7BgV90u11dv35du7u7mbgI4tVqpXa7nXU4Ho91eHiY2+5BBsizNHE4HOYD4VJKGo/HlSW80+n0rrLcn9zHi6Ko7P8JcaaUKjMTIAWkvpzUwc2BwP0CvfgMANcVAxWWodLm6XRaIUnaWBSFDg8PdXh4qJSSfvd3fze3ZT6f686dO7mvDAKwJ3XjZ5Tt/SLu6sRWFEUGXupy+2NnB3n0xGwXZgoBlP1+P9uBvrsfMtuGmSoAOMTvM5EgPfTKUl50MJvNtL+/r+l0qr29PR0cHORZLdTRbrd17dq1Skw7cONfPsBzH/DZRcQHdqddIWdLcFpwWnBacFpwWnDaVZHgtOC04LTgtOC04LSrIMFnwWfBZ8FnwWeXn89OTTB5YLgB3TFwnGazmQHYQQngICDqgUbQs3yMz9ivk7K8gwAwYIpyyKLz4nNAutls5tkGOCf34Awo1hXpgefP8hkkA6j5/qEEW6tV7lnoZMhehmQBafN8Pq8AE3qAdNATMxK8Hbyv9wVnZSkrZQwGg4qjUy+ERXu73W4GUfpBuVL1IDsPSCca7JhSyvZ1H+M5n03C50VRaDab6datWxqPxzmr66TAAYsQB+2vE4f7L8IAgTJdFw4SDlDuy65vJ16vg/tphwczbfR4q5O3Dxjq1/ExZlgws0I6JhHq4X+3Nf5BPNQHCfglNoQ8IHf0BfAQx257ZtVs0gk6dB9x/6+LDx7xax8s7u7uZrAmdrA7+6RSDnGHTWaz2V3gib4YzPnMFJZzM/gEC8BABohOHN4HdIONiQHaTp0IemWgwiA85PwSnBacFpwWnBacFpx2VSQ4LTgtOC04LTgtOO0qSPBZ8FnwWfBZ8Nnl5bNTE0xuRHdQP5AqpXJJIJl7ggsg5n/PyAMyHtCeYSTTyf1cp8OTySQfxucHtkmqLBHlfgeRuvAcivS++hI+NwxB4XU5sOM41E8bMCplASwsuUVXy+Uy14szEwToBAeCoBqNRmXpKgHlwUpZDsg45yagd2Lp9/u5PsgWWzrIu3h9ECPER/18ThC67h2Y0QFLSPkcnTSb5T65TmoePOjTgwQ/nM3KA9dms1meLeEDCR+84Ce0mcBzoQ73F9roseW6roO331evw3XHy0GefUyJISc3HzQAarTVZ93Qbic77z92qgOY+4Q/ByDXQb5Ojrz8et2/HCh5lgw/fd/Z2cl9Go/HlT7VByceY+47TjCUxecM3JbLcok1WLdpFhC22gT0jhGbCNZ93PGYGTtbW1vxxeUCEpwWnIZOgtOC04LTgtMuuwSnBaehk+C04LTgtOC0yyzBZ8Fn6CT4LPgs+Ozy8tmpCSaWm+FANKTVKveQHA6H2traygdRScpLGEejkW7duqXHH388Lyl1Q3sQuxJ4/s6dOzlTdt999+XO7OzsaDKZ5OWD7Du4Wq0yAeFg/mKZIYb0rKmkivKY5eCzJjAwz0yn09zWoigqpAbgIl6PfzabzXK9tBmjuqNset9qtbS9vZ2XBy4WC926dSsDkzscDoMuvCyImoACHIqiXI44GAzysrx+v5/beu3ateykPlsEwuca+kPvkKD7geugKIo8YwUA6/f7WV8sl6wTdErlElMPGPq1CUAcaMl6A1CUAdCzpNSDzGeSuD69H3V9dzqdTBiLxaLSb/SQUspk6GW7b9N2XxoKwDkBDIfDbF+fiSMdx+l0Oq0QAFl214frj3p9ZgLP+qwYB078IaWUD02EaNAF5Es/XId14KRc9NTv97W9va0XvOAFedlxURR67LHH8nLjw8PD7HfEOTMevM3MaGo2mxWCR4fogFk8POdExDJg7EU9kAN9o9zZbJbtjV4ok72TPZ44aPTatWu6efNmXhIccrYEpwWnBacFpwWnBaddFQlOC04LTgtOC04LTrsKEnwWfBZ8FnwWfHb5+ezUBJM7sqQMdmTLATbfr/T69etKKeno6Eij0UhvetObdHBwoNFopNFolMsjiFGOB6VnIlNKOXt8eHioO3fuZEUQRCjQnRBwxZEcfAmQemauvryOYEypepjcYrHIQI/z+OwFAopgcMN6HZ4t534nVA9qwNID8uDgIDspbcM5Go1Gthd6pRz6xKvT6WR9ch828PI9iwpJS8fLkSEHn1kBEdIG9DCZTPLn+Fo94+wkNpvNNB6PdXR0VNEZIOEgjb7Rv/eDa+7X9IO2um4og3LQBbqq29kz/pvqw38Wi0W2vwNYvU3Eit9HX9EPgxKffeNA6GV4m7Cpk36v18v/+/1ub/cTJxZvkxMAxMzSUQAdH/H2YVf3f7cJJIDet7e3NRwOtbOzo/vvvz9jweOPP669vT3t7e3l/Wx9wEVcMLihb/iN291Jq05OtJn9WiElxyf05C8wZbFY5D182+129i1fFtxqtTKYM+Bl5gT9ZSAbcroEpwWnBacFp9Ge4LTgtMsuwWnBacFpwWm0JzgtOO0yS/BZ8FnwWfAZ7Qk+u7x8dmqCqZ5JRBGLxUKTySQbkcBlXz439mAwUKvV0tbWliaTSS7DlUGHqafValUUCqFIyplPJwWeI9vG3/oLB8EhMa730bPvXJOO9ymlrb5PqGcaqTsruFVdcgkROaF4PdxbF3ca3k8mkwxuDui+9yKO43rwgPIgRxzoHIS8jw6crm/eu14o00nDgw6buT94UDUajWz/un6wP5lahIBdrVYVAKrXhe7cHx3UXCd1YgJgqYey6rqs29HBw0ncde/+ULcX4OoDnLoPMDjyuEXvzOzxvXrrJEJdXibl4IcO8D4gWq1WedAiSYPBoDLIqg9s+J/Yc334IKzT6WgwGGR7drtdbW9v55lOHNZ4dHSkN77xjbp161Y+MJKyaSvEyJ666NAz8vgFMcVnzChwGzMDwH2gPkjDn13oJwde8jyDpfl8nvdlZZaFL8n1/ZBDzpbgtOC04LRjnQSnBacFp11uCU4LTgtOO9ZJcFpwWnDa5ZXgs+Cz4LNjnQSfBZ9dVj47c4s8gKkO9tPpNAP3wcFBViBLq2gQy1nJkBMwR0dHeZmpG4FyxuNxXirGLANJdymK5ZPNZjPPFnCD1YMPpwJ0Go1GBVioi89Q4mw2y+BUFEVlGRvZUO73wCDLznMeIA44rl8PHILTr1GHE+dqtcq6x3EBIJ/h4G1DH5ucEhBC6k7K/q0OzlIZyA6i7FtKsKJPbOX30i4c3AEa33C7AuYELqBDP5j5QKB6sLmdXb+I2xLxwKevLG3GTmR00acHoMePB78PMNwOrn8foDjYe9vwQ/pE3fgavsqMHgdTB19eDmCuCwdpnwEgKZOLgzBL2Q8PD3PcuF3I5BNj1Ecd/X5fg8EgzxgA9Cl/sVhoPB7rt3/7t3VwcKDDw0M9/vjjOjo6yrNZGAh4Nt6X99ZtQtxgBwgGoGe2Cy9mxuD/2MqXtPpesz6gwK97vV4FHxnkOtD7PrdsI+CDlZDTJTgtOA0JTgtOC04LTrvsEpwWnIYEpwWnBacFp11mCT4LPkOCz4LPgs8uL5+duUWeZ3IBEF9i5w49n881mUwqn9M4N2hKqRJgOKyXicEdcAEDFEUge6bRM48ICksp3bWUrr7kzI1I//nMgc6Xa56UAcWxHNC4j0Cs98+DGvFyuYYe6Av3YS93YPpPX7CfAx26BnioD0B20AVU6uTP3/pepd5+dIZd+BzATCnloOF+fAR/ct2zLFcqZ5lQL8EDqHufsE8dUJ20nHy47sDq79EffuADJCeU5XKZ44B9Sv1aHcx5ORDX/cJtwMwA+uwDKPdlMt70Hb9x34ZE8Tl/T7203Zd6tttt7e7uand3V9evX8/EzOwj90fAHnyRyoHRcDjMAL+zs6ObN29qZ2dHW1tbGgwGFV+/ffu27ty5o8cee0y/9Vu/lZemTiYTDYdDNZvNDOzMHHAs8sGb+0JdJ/7Z0dFRJjg+Z1BVJ19sgR7dhsSeD1o8NtCn70eNb/kgwn0i5HQJTgtOC04LTgtOU+Wz4LTLK8FpwWnBacFpwWmqfBacdjkl+Cz4LPgs+Cz4TJXPLiOfnXuLPBwQQHVwWy6X+bC5+t6KgB1KpsxWq1XJnGF4dzRXoIMa7XHHA3wonwOseMaDwpcYkmUFBIbDYa6PQ+uOjo7y0lTaAdA60dWNyT2AqPfD70EH3Ac50Dd3KJ7hPYZuNqvL3MhgAhRODH6YmQc/AYjunQwdvB1AyPbSJpxzsVhkZ0X/9VkaBFuz2dTu7m4GODLA3EdwScoO775I2WSp0Rn9qgegi/uFB5lnkfm8HlBOpG5jJKXjJY5eFwTlevO9Ot03iBW3Lff4wKfeBmzGZ9zvxFIURW5HnUAgHuIEv3Hirft7s1keYnfjxo0MrMQih0DWB2c8xwCl0+no2rVreRbStWvXcuZ8uVzmwxan06n29vb05JNP6vbt23riiSd069atPBuBAUF9NpQToeMb/faZDr7v7mq1qswYqQ8G3WbEIp/5AIyXt4V4op3gkA9uOGCPgy8ZUBFjPrANOVmC04LTgtOC04LTgtOuigSnBacFpwWnBacFp10FCT4LPgs+Cz4LPrv8fHZmgonKCGhXHo6Dk7IszMWBlQOpcIh+v69G4zh775k7HLMOhk4ins1D4Sh0tVpVjIjSF4tFbgMK9Iw2Bi+KIh/SNZ1O8/MESN3APOMGp131V/2+Oih1Op0cnHVdEpDU57qgbH9RPoGVUlK/389l+IwGAJV2EJTYx8kHqTsvet8EHJAv/XJ/2tnZqQTjZDLRfF7ugXt4eJj1jkO7/Wk3fki/HRTxY+p3IqsDugOjt7eeSede6nG9eAy5Ddz3WGYtKfsaZblefUaEH17oROPLtPkM/bHs1wENPdE+v+71DofDPFtjMBjk9tYHLwASMwgcH+bzed57FQKF5CBsCA29bG1taWtrS9vb27nvk8lEh4eHGo1GOjw81KOPPqonn3xS+/v72tvby/ZLKWVAZIBJeykLTHPCZXDn9zrQ48fozgdIYJvHrNsIbMMX3BbYgbamlLS7u5uJkvjY2dnR9vZ2xrG9vT0dHBxU/DHkdAlOC04LTgtOC04LTrsqEpwWnBacFpwWnBacdhUk+Cz4LPgs+Cz47PLz2akJJjrpswFouDsxIML+pp6tq4MbDcJwBHuv16tkOF1B1IdxCLDVqpq19fLdeT1AHbCcvAh8L2MwGGg+L/cvPDw81Hg8ztk7/qet1Ol/ab9nCB2gkZP6Ih0fZFcHNs/UA2Is45OUM9gAEbaDrACyuxxiHXQ4pgMCRIFP0O6jo6N8TVL2Ae9T3Wcom75Mp9Ps5EdHR5pMJrmPq9Xx4W3D4TAHHgOL2WyWg9/1RR/QvfeBurF7q9WqzLCoAzfl1QO0fi91oEf3W2Z3FEWRM97YwfcBRW/YC3AkptxvHHBop4MUAwDvF36DHiiz1Wrp+vXr6na7eUYAWX36gK0ODg4ykRRFoX6/r263mzPe2AhiqreVmSHgCm1hFgkHd25vb1f2ND04OMj7MN++fTu3IaWka9euZd0DvLSBw0HRDTrp9/u5f5AStnSf5hrtrw+gHAN8xhFtw398wNHpdDIhbW1t5RkTrVYr65Ay0Wu73c6Ef3h4mO3gg+SQkyU4LTgtOO3YJsFpwWnBaZdbgtOC04LTjm0SnBacFpx2eSX4LPgs+OzYJsFnwWeXlc/OfQYTDonBHbg9O8p9DgKAg2fBAVVmGPiSPgx/dHSkO3fuVAzjxppOp1lxtAUjsZyLz93RRqNRPiDw1q1blSWT29vbWdkEFrMPOKBrtSqXk9KOwWCQM984ls+M4HmMTbv4S/udcOiHkxzPoAM+IwPp2ed+v5+dkz7gdNjLg4u+ui5TSnl58WAwyEuLPeMKEE8mkwrAAIIEXJ10F4tFDr7lcqknn3wy+xWZZ/TguufgNuzVbJb7jXK4m4MlbVsul7ku9wkIzQHbBzLon2ecuNwXCfZOpzwADt+BgNyeBCU+5LHjS0ZpD/+TUfZsPiSOHfzZfr+fQQqwoC72FSVjPxgMsm53dnYy4ADWADO+dHR0pLd7u7fLbSCWafNsNtN4PNZ4PNbBwYH29vY0Go0qMy54DgIfDAZ37aFb9xfqxmeWy2XlsE2Wx7bb5eF0HO53eHiYy0ZntIOZQu6jxJDrLwPmetBbB1f05f5UFOVy0q2tLQ2Hw0yikBFldbtd7e7uVkiFwRK2n06n+bPRaJRnVUDSmwZtIXdLcFpwWnBacFpw2rEEp11uCU4LTgtOC04LTjuW4LTLK8FnwWfBZ8FnwWfHcln57NQEEwYh80jQuSEIOAcIrvv90vFekoBaXbFcI6OLA9VBHOergyJBinIo2wMVR8ShZrNZRfHNZjM7Ok7hsw9oJ0QCmWF4PsMRHUQAevpJeQ48fi/i/9Pfent43p0H/RMk9H+5XGb9ezZ/sTg+UJGyCUQyu/SBWRSURzmUTZsJQl7eXpwUHyOLC6C483KdZ1x/gCxLEr0/AK7vk3qav+M79MF17fZw23I/L0lZR14v/eQ9umKww/PeTkgEMh8Oh5XZEK53jzXeO8FSzvXr13Xt2rW8FNSBnkEXfjMejyt7mUKePvPH+zMajbS/v6/Dw0MdHh5qb29P+/v7Go/HWVfommdXq+N9RusDOsjAwd2xxffGRUeuHx/oOMF4m90/Gaw6jtVl02CAdhO72KLb7WowGGh7e1s3btzIAxWfnQXQuy3dtj74YTYFPu0+GXK2BKcFpwWnBacFp1UlOO3ySnBacFpwWnBacFpVgtMupwSfBZ8FnwWfBZ9V5TLy2ZknDqIoFEgH6xk3BwcXB0RAisw0ymDplVTdOxLwdKDHAbnXFe0gOpvNskJd8WQUXYG0m2y8Z1I9M+118V6qZjn56xlTiMbFs/mU61K/H6ck0HjPvThsv99Xr9dTt9vN5NbpdPTkk09WlkeuVqucKa6TNvom4AEgz67WCYfnHXR4Hp/AlmTF+/3+XUCBbX1WCk4OcDKrgCChPe6jR0dHOeMLkNQHIZteq9UqA2+dHJ2Uj46O8nsH+IODg+wP6Bv91P0RPXDAHIfZTafT7JM80+v1NBgMdPPmTfV6vUrWGvH2+cCFMvgf4BkMBur3+5V9Q51Ij46O8sCn2+1m3yEuDg8PK7NTsCHgfnBwoIODg5zRp11uB+zvfu++Vh8QOZkBzHW/XCwWOfPugzoEW6E792Enx3qc00cGIf5iv2Ri0El1d3dXW1tbunHjRsVW+Kv7LEvhuYaeAXZ8qiiKfNDkpsFIyMkSnBacFpwWnBacFpx2VSQ4LTgtOC04LTgtOO0qSPBZ8FnwWfBZ8Nnl5rNTE0yetSPr6Rlhz4jVxTvt2T2/7llojEuHPfvnswE8I+iZtrqxCAbArNvt5kzsYDDIwcj+mavVKmc+KccDAKV7nQDYZDLRZDLJ2XWCwQHeDcFnHqieYXcd8yx6mk6n2RGLoqjsBeqgSQD3ej31+/2cgbx165aeeOIJzWazvDQVgAF4+B+90l6fPUAAQDL02fWEjTwQyfgPh8MKUXNQHplY2oGe8bdOp5PJ1NtWD0zs7i/81X2l3+9XMrg8ByCyNBIfoA1+GNz169crgMogptFo5KDfZE9iIqWUZ2kURZGXPeOXvmzXM934poMhemLPWI9l9EAMsafok08+mW29WCxyuZIqAxx80Pe+ZUnorVu3tL+/ryeffFJvfOMbdXBwkJenoltIxAWyrvfBdeU29MFlvX0si+d/fAlgR/Abrjvg4h+0t+4/+A57v7ZaLW1tbWlnZ6dCmgiHSDoped+JH2ZbEJd14Mb/dnZ2MnZxQCMEEXK2BKcFpwWnBafVfTA4LTjtskpwWnBacFpwWt0Hg9OC0y6jBJ8FnwWfBZ/VfTD47PLx2ZkrmFypKMlf7jjcX39uUydxKozlxNFsNivZRc/mMrOBZ+ptckfB6CgGhXs2stfrVTL0lC0dzzogyLzPOIN0nCHHEepgiG4cmFy4f9O1TfdJx8DmJAcJ0C/Kg4whuPF4rEajkfsKcNRJxvuMHRDuYYkjbfOZFLTP+wZwetkeVPW6IBB8Y7Va5Wyrtwd9ICw1dv/gPs/i7+zsVILKBy+dTie3l74yy2BnZ0fb29s5Q4y/+SADImF2BvoB+DyjDdkCKAx0yNR7fDkZ1rPi6Mvjz/tXFEW2ETrEHrSv0+nkPtSf5X5iBn/a29vTnTt3tLe3p8lkotlsdlcddTCirdjdwbse0+6f3j90W2+rP0NdXMeerjPvo/fNY9nLY8DS7Xbz0t9er5f3/kW2trZyW5wA8E0IuK4vjx2fcVS3Be3yOkNOl+C04LTgtOA097/gtOC0yyzBacFpwWnBae5/wWnBaZdVgs+Cz4LPgs/c/4LPLh+fnZlgcmB0RbrQuKIocnbMA9YD2Bvsym42j5c2ko324CDby6Fu0vEBV644B3raTAbTQSylMsPIckmAZj6fVxzOZzO4k3lmFtJxcCF4HIDq+tsUiHzuoO5AVf/cn5nNZvk6IMUMh+3tbUklAbCf5tHRkabT6V2ATnn1IHTb4dCQG4HBQWu8rwsgM51O8zJTgssDlj46+TebzRwMnm31gMcHPHuMDQDwVquV9/28ceNG/oy//j9A32q1cmD3+/08I2UwGGhnZ6fiM94HBjhFUeQD6Hz5LHpnxkVRHC9Z9IEN/ucDJPTpQY+eNg2UANzJZJIz0B6b1E0fIVYnGWKHcijrzp07unPnTt6n0wGL2KWt+FI9jpid4LGDLR0nfBDncVmPFfrOc3zOslnKqA868C8nWa+LmRoA+3333aft7e28z6mTi8cJ+vRBK38nk0muh7Zxnw+Qud+JCOwMOZ8EpwWnBacFp6HX4LTgtMsuwWnBacFpwWnoNTgtOO0yS/BZ8FnwWfAZeg0+u5x8dmqCyYObjrqxXbk4BMvF+Jw9AnlPUHa73Qw4zWYzO8NqtdJoNMrLsA4PD/OyRBybNrAskOyu7w2IQhGM6J/RFl+KOh6PK8/W96f0fkMMfqAa5QH69cwgbcFovHBGZl1Qvj8nKV9zkKNN2OXo6EhvetOblFLKMwi4h0w32eKtra28NHKxWOQ9PtHvarXKDnv9+nV1Op3KjIxGo5HB6OjoKB+2NpvNMhA40DgAs0Sw0+no+vXr2tnZyYe47e/vZ7Bz3wBsO52O+v2+rl27puFwmPf29CWH6AcAoV/0vdvt5kzvcrnM+2Y6QEGUZMHxXfS0Wq1069at7F8AaA6wNUgtFgvt7+9rNBppOp1WZn1IyjMO8Au3iZNUp9PJPtFoNPJS5clkkskCIsMuvoy6HssstWSWhC/b9HjCHz0Oj46O8jLpxx9/PB+y53bjBQn5wAn90XYGeoAl4AjYOfihb/d7n9FELDYajXyQoZMkNqctAHp9WTBtwP9ZnvzAAw9kP9za2qqU7zi5s7OTy0EvHtcA92g0umuJsvub44HjEPs215fjhmyW4LTgtOC04LTgtOC0qyLBacFpwWnBacFpwWlXQYLPgs+Cz4LPgs8uP5+dmWDyRgEQ7MeI1LPq7hwOUogbFyUDjMvlMu/riRHa7eMD+zzjjHPimDjOarXKewrSHozie0l6+5fLZQZBQJ86MRDP8XIQ8mDwe2gXTsd7HJj2etYYZ3QQp704FP2q14XOIY/Fotw/lSDCGVerVS53EwE2GsfZcMgXwO92u9rd3c19IeCwh4OMBx9lIui60+no/vvv1+7ubj7U7fDwMNvPZ3BgawKwvo+lg7xnpP21WCxyYE0mk0o21+0H8fmsCHTjAec6Z8khQHV0dHwA45NPPpn3u5xMJhX7sjRUUn7G4wkfcvBdLpeVmRXYFFvQLp/pwws99fv9yuwZ6XgwNZvNKv7rwA3BQIL+vwOa+7jHnesNP8XWvHgGveBrPO/7wNb753anTz7wok/1WRfECcuYHcy3t7c1HA61s7Oj69evZz/0uGF/VGKL8sEJn33je8BC/j7gQtfgMPpyX2MAGnI+CU4LTgtOC04LTgtOuyoSnBacFpwWnBacFpx2FST4LPgs+Cz4LPjs8vPZmdMpUCAdQQnuGBjFFQlIeLYUxRfF8X6MlEMZy+VS4/E410UgY3QUCwE4yLsQLA7EKaW8dNDbjxLJzAH0ZF8dWL0fGNCD3vtNPTgLmVpeTo6AD5l4hDL46+SLTtw2m/S+XC6zU/nzlO9ERbs9q10U5aF+knKmk8PyAG+IBpugGw+4drtdsQW67nQ6euCBB/JMgsVioe3t7Qyu+ABLkz2AXRcIOqhnZb3/fN5qtbKfMJPAM9j4D2DgIEKZ6KDu181mUwcHB5VD7cjqs0SYwPe9Rz2b7fXh87QDMnVCcF1jHxcfhHGtPojiM7LYPoiCXOr7nXq8bSKW+oCv/pnf77EJ0EHeTnw+0HA7+yDTgdHjFd340nrKZanufffdp36/n/e+ZWkyh+uhP1/6iv+7r+GHkiokwCwWPm+1WlmfDBLQQb39+KvPVAo5nwSnBacFpwWnBacFp10VCU4LTgtOC04LTgtOuwoSfBZ8FnwWfBZ8drn57NQEUx3YaLRnwwEE7qVCGuVBSeDTaM/geZ04PB3yDLlnlFm22GxWD6Nyx60rpNVq5f0JPXBom5MWe3riGEdHR7lcDzAAl7a5sGyVvTe3trbU7/fV7/dVFEUlSDC2L1EE7LjmwUEGH2D2gPdMckopzwJpNpsVAHQnaTabFQBvtVqaTqd5VgZ6pK3D4TAftCcpt8Nnfrj/YHPu90CdTCYVEgaofNCAjbinKIrK8j4nWumY7Fk+SsBgw6Iosg/TduzI/9zvvotNsC3ZfXToddy6dSvPWtjb27vLN9EXPkQd8/m8MshAh8y+wf74JL6UA7tGjAhAhj8SC06cDo7U4/G9Wq2yX5DNxg4+IHKpxyd9d/+A9ABKbLhcLrP/O1FThscJZTl4o0NvGz7is5kYfAwGAw2HQ+3u7uY9b69du6Z+v58xh+Wj6BSApw/E5eHhYbZfu93W9vZ2vq/X6+W9g+/cuaP9/f2KHvEDJ1HIHR9zjA45W4LTgtOC04LTgtOC066KBKcFpwWnBacFpwWnXQUJPgs+Cz4LPgs+u/x8dmqCqd/vVxyAYB+NRjkY3Dg4IIonE18HFJTvBsaIXJvPjw/IQ5FOLhjHy/XnfUZAo9HISxHn83llBsLu7q76/X42ni8RI3uK0n1/SvpJsBEY9X7SB17D4TAf1sY1spntdjvvqwg4HB4eVoDOjeokl1LKGWhJWX/cyx6c6MKD10kqpePltdiBe2nnbDbTo48+mst2gGy329kx5/N53ku22Wzmfvf7fe3s7OjatWsZLGn//v6+9vf386F0+JEDLDpeLMq9a70v9MfBCztiE+6HNNyHfeDi11mii14ajcZdywmXy2W2G0s2R6NRhXAoXzoeDAGUxIBnxh3oIUWENvK/99ez2QxmHMx4MXhwG7u/EQeNRkNbW1t5dkG73c7X6vHvNqrH/klEwCAEf/ByPH5cd7PZLPtcr9fL8e+CTpwA8NlWq5WBHdvv7Ozk182bNzUYDPLMAidwZouklPIsIUl5RpAPZn0Zr2MEy1OJc7+Gr+NvXl6dePGNkLMlOC04LTgtOC04LTjtqkhwWnBacFpwWnBacNpVkOCz4LPgs+Cz4LPLz2enJpg8g59SqjTUlYeBMERdGTikP0cn6oagPg8y3+/Ss6Oe0af8esDjCCiQPtDmwWCQnaTValVAAELhWcrzfvp12uyfoQs3BhltZkNQN7rsdruazWa5fg9i9OMA5XpDaC9OTftSSnmWgreZutEn9fpzXo8fTEeQU4ZnPn3Z5nw+z/9TB3YFFA8ODnTnzp18aFsd6F2nkC/g5+2kTU5g+AT3cy+6xLdch/icg43bhXhAr5PJROPxOC9HZfYFbadeBz58Dj/hc/yQzz2GiB980fvn/u+DKN4zg8CBvh6DLvV6N13f9LyTkP/vbfdB26Y+1O1Xt1vdhnWgrIM/5UEozBpg8OdAv7W1VVlm7jZzIPY6jo6OKoMPiJz9b/Hp+Xyug4ODTALz+TzHsg90ffBcJ01Jd2FLyOkSnBacFpwWnBacFpx2VSQ4LTgtOC04LTgtOO0qSPBZ8FnwWfBZ8Nnl57Mzz2DCyK5Yd0wqJUh4j3E9ePhbN5Tfj3IkZZBnmSWggPiyLYLMy6fsVquVjVMnKs+Ue1u8H6vVKgMg5TWbzcp9i8XxMkr0QPkIREM/B4OBms1yaSPAUQdXz6q6ntAn/aTuekBSht/n17zvOBjtARjqYELdTqyU79lQMutcn06nlQEA+562222Nx2PdunVLe3t7Ojw8rAC9i+vP++p+CpDiS5CqB2Xd/+gTfa+DCH2DnDqdTiYhDjzDx+oB6br3mQD18l2fXj82onxszZ66+F99RgXtZMYJYLizs6N+v5/9wn2gDtY8x8t9ZBP4AFZO6NjFBxUs8XWyoo9uR9dPncg7nU62ictqtcpY4IMJ+oBO6kDfbDa1vb2twWCQ2yYdLwnGtg7ERVHkGSKrVXnI5+HhYe7/ZDLJs3gAdAaoe3t72b/RB/g2m82yLesDE4/HOiGEnC3BacFpwWnBacFpwWlXRYLTgtOC04LTgtOC066CBJ8FnwWfBZ8Fn11uPjs1wTSdTrMDURmgivLJvjugbQIjlI1D14G5nsVFKRhtU2BKqgC1dLxnKEDjhsGJyK6STaQts9ksL1/r9/t52RmZYHckhLJQvjslAEO9OJqknGFcLMq9WjGyAwrZXs9gO1g3Go3ctqOjo7xn6dHRUdYr9xDYLCX1wHMnquvYwQeQoS8Q8WAwyCB5eHiYddVqtTQej3MZZFPZ9/HRRx/NAUgGlswq7XDQRd/YHF+s+w/3ul/gk1zDJgS4P8vSRwIe+xIDXj/luF4QAtEHGJRRn03B0uV6uxEfXHif8EU/FM+BtN1uV5alsjybdjNLw8VnVfCcVIIP/fC9QNEnBzpOp9Psh7QXjHDSclvR5larlQ8e5IXf0n/XD7pfLpc5g0/fOCCP/UfRybVr1yoE1uv1sn5Zjks/WP7NzB7XDTHDwZlHR0e6c+eOfu/3fk/j8ViTySTPdKGd+GCz2dRgMMgHTPZ6vYpP4x/00fVEWxuN4yX4h4eHCjlbgtOC04LTgtOC04LTrooEpwWnBacFpwWnBaddBQk+Cz4LPgs+Cz67/Hx2aoKJQglid2oPGpdGo1FxRg9WHKbRaFSCw5/z+1erVSWzjeM5mLojedl1sKo7ZT1Q+RywpT6M22q18lLLTU4qVWdDuA4dQP3l2WE+Q984m1TdS9X75vdidHThwVtvj7ernkH2euvPO+jTDn/vNvV7vW8OqpB5SikDPNlaD3QnOADDM+eevd7kk+6HEHI9e45+8VO/34mGNjhI+8CC/Wbd3vSXOKI92L1OUO6TXtamgQ5tJ7NeByCAHnByP8JP3D8ARP4y0MBPiUfsCbi7TokV3yOYwSF284ETfajb3IEVoEd/tB9/c19DH1tbWxoOh9re3s6ft9tt9fv9Cinil3U/8raOx+OsF/dPnmOwuL+/r4ODg8pyZcc/7AjhMEtKkiaTSba9D455ru5P6H02m53o9yFVCU4LTvPng9OC04LTgtMuswSnBaf588FpwWnBacFpl1WCz4LP/Pngs+Cz4LPLyWenJpg8KMl4u1Pg+A783F8Pcg8yFOXPO1DTOTckHWF5npMPDoWSVqsyQ+ztqgOsk0y9L95+2sPSxHr/6aMToJexCXzqQM9n6Nudjjbwmeu50+nk+vx/D6KTwME/qztRvX3Uualv2JRrblcImjoADQLXwdvt79l99O/3ATj40aZ21ftV758/Qz0c+FbXPz7r/UopVQARAppOp3fV5zZzH3H/cx93MvBBh4OF+4AvvfQy0LMTANfQofs09zSbzUqZ9YEZz6ADngMYmZ3T7XZzfB4dHeU++mCJujcBPX8BYgaKiM9MwPd8QDgcDvPhjtTtpIku0T+xw73M9JnP5xqPx3lWxWq1qrQvpaTRaKTRaJQPimTWBbOJ0CfPtNttbW9v5zIBeWYG0b5ut5v3aq4PbusD75CzJTgtOC04TdlnvV/BacFpwWmXT4LTgtOC05R91vsVnBacFpx2uST4LPgs+EzZZ71fwWfBZ5eJz85MMA2Hw5x129/fr4AIzk0mi8YR3DSEoKQxq9UqK8KNTBlk+xwkWq1yCdsDDzyQAWM2m2l/fz8vKTs4OMiGAvjI7hKAnU5H/X4/K353dzcbHOfzZa2SMmlMp9PsDDidA4wHgzsVuvIyMbYT4Hg8zg4NiNDOwWCgra0t9fv9rBMABh3v7e3pySefVKPR0Hg8rjgxdqsHp5Mz7UZvZLrpnzsTANLr9bS7u1sBt8PDw3xw3ng8zkGIHekb5aaUNBgMTiXj2WyW/QwA8iw4uq/rHF1LykFEO/hcKgcQu7u7edmtEyn6dV1MJpNcnwP94eFhtgmB7gMWJxra5IDfaJRLJ69du1YBWs+m4zP9fl9bW1tZl575Pjg4qNiT62T4AfmUjvevLYoi26fT6WhnZyf3d7lc6s6dOxqPx9m3HFgR7NPv95VSOUNkNBrlOHSbeRmeNfeyiDfquHbtWv5sf38/LyVfrVZ5T1OWT/M8M2zcV6gLHHFQxnYsv4f8dnZ2MrD7cv2UUp45gJ+wfBXfIlbQ9Ww20507d/JMAsgBEmXQURSF9vb2Kvuo4sfcW5+ZEXKyBKcFpwWnBacFpwWnXRUJTgtOC04LTgtOC067ChJ8FnwWfBZ8Fnx2+fns1AST76uJMVCyG4POe8bRr+MsPAfweODjxK1WS9vb2xuDc7FY6NatWzn7hvHIoDLbASemXjKa7sgY2kEFYACQcSAM40HpzupyEvDzl+dpL8aUjvfHxMkI8larlYMUB+DzZrOZM9v9fl87OzsZOCmLYKBcB3i3Tx2clsvlXRl9B2za7qTSaBwfWNftdu/K9NZB2P3EgcPbxV+CpP4MpOnX8Aue91kItIc+8PxgMMhAj1+4veogRT1kyeszJ/wvtq8TLMsU8eFWq6WdnR3duHFDvV4vA5f7rPed7DSE6fbxOHQwd1+sf75cLvMSS4CO+yeTSV5O7Lp32+EvPrOCmKR86nRdOW7UyZ5ym81m3ruVMnwvV28vYEvfnfDq9qoDvffZ/YlD9dzX6686QfhBod5u/MZjy9vvpMH/6BB9c1glmBZytgSnBacFpwWnBacFp10VCU4LTgtOC04LTgtOuwoSfBZ8FnwWfBZ8dvn57NQEEw1xpeBUiBu3HpxuNF7+GU5MZp8OkwVcrVaV5yRlwK2DAuW506IUyuQ9TiPpLud3h1itVpk0/KAxF3fOOkBsetEGxB0cp8Zx0TkARVu63W7WlwdPs1keFtbpdHIAzGYzScqZZZwFHUB21AchQHR1O3smm7764X7YDV3iP/SVcuvARNDXdUndToAOvrSFQOJz+uP65n98gJkazNbgcEPuqfefvvkshJP8BXsxwKHtTkwO+B7wOzs72tnZyQDhB+Vh87q/Yjv05DHoAw23U93/62TKoYxuZ6476TsGMPBghhEgD9h6myRlHTvIuu3xF8qnDYvFotIev1dSHijVMQc/8Bf2olyPY+6H3GiH+y8DHvrtfujLhOtERtnEOX6B/Tzu+cwHePiw+3zI6RKcFpxWt3NwWnBacFpw2mWV4LTgtLqdg9OC04LTgtMuowSfBZ/V7Rx8FnwWfHb5+OzUBBP7+zUaZdbKO0ThDpp1RXpncVqCHVDyJZgoA6dCmcPhMCvr6OhIk8kkG9QBQlJWAsviAPrr169n0PQlp2TBaXun08nvPRPK0sST+ulLy3BCsn/0mWcxFgBGgDqALpdLTSYT3blzJwdjo9HQ7u6utre3tbW1pWvXrmUHXy6X2tnZydfb7bbm87lGo5He+MY36pFHHtHBwYFGo1Fe8tlqtTQcDrMz0ieueV/dET1L7Y6KPug3e2A68OA7BCJZUH9+uVzmmSXtdjtn1FutViZ6ygFcOdQMX+MaPsFAgIPWhsNhBvZWq1wGvbW1VckUM7NDUiVTWxRFfsb3qiyKQrPZTAcHB5pMJhqPxxXgPzo6UrPZzEukWXLdaDQy+Xm9PlhBH5QHufM/9fnAwQcCdRsSP8QwtgGgybYTtymlPGhoNpt5tg/kx33tdluTySSDvOsAXc/n89xWfJfZRJQPcU+nU+3v71dm/tD3+XyeP+Nz+sRghiWi7t/4BD4+Ho/zoGg+n2dMc70cHR3p8PAw6xD9oNder6eiKGcb+NJl7MPerGASfSReGo2G+v1+ZXbOaDTKS5/pB7GAf6O3mBl3PglOC04LTgtOC04LTrsqEpwWnBacFpwWnBacdhUk+Cz4LPgs+Cz47PLz2akJprd6q7fKIISSiuJ4aSgvFFIU1QOg+N+z1w50vDzQV6tVBYhYpodDkT2tZ0clVQzqGWyA3+ulHSjIQRvxjKbrgXLov5eJY7g+3PlcNhHHcrnMxq5n3aXSKWkPToMN2Mvz5s2buu+++zJhPv/5z9dwONTjjz+uJ598UgcHB5W2UDYE7Ppwx3QHdSDiGk4LUMxms4p9ACs+cwDy7LyTEeBNub5nJG2mDGyOj3p2myDCVtvb2xoMBur1ernf7hfcwywX9uZFJ/gmYI3OGIgAdAAC7WIWCEti6cN0Os06e/LJJyuH0+Gj2Mnt4UTse7f6Es6UUvZf7O6x6YDrWX/04bFB+dxHH3iGgY50POjyjD/2QO9Obj5bAl3jLxxGBzjTFuwlKduDfW23t7fzAMzt4PGM7rEVy0MhZISYpI88S4wym4Z4ZfYSAxR8k8/AFkgLoqTfR0dHeTDhuAWo02f385CzJTgtOC04LTgtOC047apIcFpwWnBacFpwWnDaVZDgs+Cz4LPgs+Czy89npyaYXIk4nSsAI9E4fw7ncCCiUTgGAE7GzYGecskoOqgStJ6ldpAkCL1N7vT0xR3G20sQDQaD3G5JGbiccCiPpWw4upOM1+Nt4H/07IBf160TDrMkcEoIi1kZ29vbWVfo/MEHH9Tu7q6uX7+uRx99NGdNR6PRXQDi7a3rib575t9JGAIiSJ0QcVDXIXaTjjP+EAl1cpDZdDrNs0j8Olnffr+v4XCowWCQZ0fgQwQ8QcFMgn6/X/Ft2tHpdHT9+vWcufVgSinlGQkAEbpxHwW4AbmiKPLemByaiM54FpCoxxRAXI8zgMLrcDDnf8AXP6VOX5rMvdSPvtxfuW82m2Wfpa34I7YldljyzoDPScTjmjLwewjLBzsAnhMD7WRWTLfbzSQNMdBnlp27v04mk4xBHMqJXesA6nrBX1xPzCwB4P16fXDLXsGOb+jPcdZJ3QfErn+P25CTJTgtOC04LTgtOO1YgtMutwSnBacFpwWnBacdS3Da5ZXgs+Cz4LPgs+CzY7msfHZqgmk6nVYcbVMDPeD8hSJ5ptfraTgcZnB3cB6NRtkJAErPYHuw3rx5M9fPkkAcgexbp9PR1tZWbjvtJONXBxkPIJbT4TgA6mAw0OHhoWazmabTaQZbdOMgTXbWHWBTWzAihvIycCrPONJu6t/b28v3sTwTp2ZWQafT0fb2tt7pnd5Jk8lEBwcH+vVf/3Xt7+/r4OAgLyuVlG3lwI9unNCk41kb2Bo9AWy0ybPz9BHbsvwX0AEYvK+r1Urj8Ti/CETayDPD4VAPPPBAJYNcB3p0uVgs8pJWlki73lkWeOPGjRxA9KvVKvcm5VDDoijyckL048s2ATiuezbYARYfhrTrgVv3U9pL9pu/7gNODvgkuqdMwI+6m81m1o23A7+DvBwbfGZNURR5YITefTmtDwDQU6vVyvdMp1ONx+MKhhCXYAhLhLe2tio2uX79em53fXlqo9HIbWcJPrM93N+YqdNoNO6aJUK78TefGQM5+qCi0WjkGUzoFUxot9u6fv16tv10Oq3gEhgEjjkhu71YWn/WTIKQUoLTgtOC04LT8PHgtOC0yy7BacFpwWnBafh4cFpw2mWW4LPgs+Cz4DN8PPjs8vLZqQmmW7du5eBotVq6du2aut2u2u1yL0VAguDHCd0ZO52OhsNhXhqIE/A8Dszepw70OIsbqtfr5QyddJypByzm87kmk4na7XbOKqKso6MjjcdjHR4e5voBOxRcFEV2dJzGs544J4GCUbgHkKNf9ANxkJKOM7NOaq4/HKjRKPfL5H9JFf0vFgvdvn0766Pb7arf7+esarPZzA7l5Xe73dwGXwJHmwBIz6SjO+7DjrSDPnMNRwRYfTaJZ7gJWvbdRU8EcrfbzQHoZDwYDLS9va3r16/n/XV91kBKx7NVfNZKSqmyZy0kAtDjCymlHIi0hT4fHR1pf38/A1JKKetbKg859Bkv/X5f0+lUo9Eo6+vo6EiPPfZY1smtW7fy/px10vfBFXbB79EJeut2uzlzjj0Y3OB7/pd48oEYwEb8OJlRFmCGrvFlAG0wGOSYWa1WGRCJGcpG6mVh+263qxs3blSWerI0td/v571siRUfwADSRVHO5gD0IS3ww0EWoAe8aUO/36+QSqfTybaif5DqYrHIGOIzCyB72uAzetABtsdHqYPn+/1+jnFfVhtysgSnBacFpwWnBacFp10VCU4LTgtOC04LTgtOuwoSfBZ8FnwWfBZ8dvn57NQE03g8zgrFmcjmYQyAzx0fhyd7BjmQjeQ+AgBnp2P1jCcAs1qttLe3V8lw8gzOCWjzHMpkaWCr1crAv1qttL29ndvCrITFYpH3s/T+LxaL7FwAEwHhbaG96KjeRgcFz7LjEEVRVGZIOBl4xpY6eE+mcrlc6vHHH78r0CDR0WiU9e26pK0e5HWAByQWi4W2trY0HA7VbDZzpv/27dv5Pl+aCEjh6NgcoGcpKm1ygqP9W1tbeXkp9wwGg5xlZt9S+kRgLRbloYqTyST33QGTDO5yucwA1el0NB6PM9Hh1wBAr9fLfdjb28tlkf2l/Qwq8BUGKq1Wq+L7jz76aA7o8XicgYdX3ZcajfJwNghpsVjkPW7RN3ajHnzJBzWUjaAXH0TgH/N59VA4bIcPOFBRtvuW143PEwfsDYsv83+z2azYeHt7O1+rE4LPivGYcT3yGW1iUFHHjHa7ra2trTzgGw6Heelru93WtWvXsg82m8289JuBHiToMd7pdCq6H41GOSYGg0FlZgkDMMqjTbSLwyF9hkzI2RKcFpwWnBacFpwWnHZVJDgtOC04LTgtOC047SpI8FnwWfBZ8Fnw2eXns1MTTAAdADqbzdTr9bRcLnP2joxio9HIDo3jAIoEnmdtHegBUZ5BSW5sOgJReGYOZaaUKtlpnHg+n2t3dzcbhgwh5ZJFvn37tm7fvp0NNplMsjP5/Ri72WxWyMVBH+MitI++rVarCsjTLrK2RVHkA8A8uAkOd0yErGqz2dTh4WHuf33JHTqsAz2AQT9oF3olk8x97XZbu7u76vV62tvbU0pJ+/v7FWL0dtJnAovyUkoZ8BqNRrYrumdpYkqpstcpSxLxBe6n3/RjOp3q8PBQ+/v72tvby36In7huIbZGo6Hbt2/nQILkySrzGTMJfObLcDjMAICu0TcDI3SAXh977LG8fNIHOYCu+1NK5b6yPtNgsVhkwtlElrQP26Ab7IH4Ncja6202yz1naSPER10M5pz40C12rg90ms1mBk7a57ExGAxy1pwBm/s+OqzvVUybfGaLxydle3zVgR7f293dzQSXUjlbhPIdzOfzecYF7EB/fYk/Axv62O/3M1lgF+4jBiHZGzdu5D2P6Z/7csjJEpwWnBacFpwWnBacdlUkOC04LTgtOC04LTjtKkjwWfBZ8FnwWfDZ5eezUxNM7XZbRVFUAoflnWS6HGAIAASgl0rSYFko2fp6thSFIQ44lD8YDDJgzmaz/H/d+Bx2tlqVh/fduXMnAwLLy8gQshyRZWtkSSEwbxc6AKwJVs8CovRms5kDgnLcyfzldRDIHoToy+vxctH1zs6Obt68qVbreH9JHB2y9mWBnt3Enn4Nu/vMBpxrOp1m8PS9SouiqASZ28dt5mTS7XY1GAzyYXQsS3W9F0WRiW82m+VBBnVxD58BpoeHh3ryySd1eHiYCdCzvs3m8dI/9DabzbRYHC8NdlBkaSJA78tKITsnSXS7Wq0y6TYajTyTAl360m0f3OBTPvsBMkS3R0dHuW/03wHI/zqZeSa6ruu6P7htvD0MJhg0YXd830nG6yEeO51OBlUGhA7ILB3u9/u6du1a1ikzROgzdS4W5ZJsSLU+W4blpticGGy1WpXD9lhaz/Pc02w28+AOcgOkAWofODjJ13HAsYC4rw9UwSdIyDGW/Zk9pkNOluC04LTgtOA0JDgtOO2yS3BacFpwWnAaEpwWnHaZJfgs+Cz4LPgMCT67vHx2aoKJwE4pVfawlErQYNkfYEkGjU4Q2DSCDCMZSozlWV13TgIYZTcajUq2cLlcVvbF9OBgFkBRFBl8Foty2d1sNsv3HxwcZKUyQ6GenQPccW4cCQPi0O7EnpWkbMjM+4ngvAQ5+xsSKMy88EyvZ2XpA7M0ALmjoyMdHBxUgIF20y+CmmfRPzagbA+WVqulo6OjvP8q2XkAmrJZOug+BGDjyGTfWQ5IJj6llIOIF+2jjfUAdPFM/Wg0yocUug59NsrBwUEe3PCcAzy+gQ6ow31htVppNBpV2kR9UjkAARzc5m5LJ1jpeH/SlJJGo1Gu12e6QJroAL/B74ifZrOZ989kMOE+6KTgmXKEMvAl9OOxUicY9yewoN1uZ3IHdPE/+o2Pu3/iT/gXfoQdfOBI+7gfqc+A8v7VZyetVqs8yGJwyEAQHPMZVO67rsdWq1U5BFI63lt2uVxm33aMAevQGWUtFos8eDk4OLgLB0JOluC04LTgtOC04LTgtKsiwWnBacFpwWnBacFpV0GCz4LPgs+Cz4LPLj+fnZpgIitYz3ijNM+Q8bkbF0XhcA4qi8UiP8M9LgAkDoyj1LPp7nxkggkk6mRJLAannJTKQ9QwLPV4/x1UMQT/O+HQPtrihq4Di+vHAwup9xk9oSuv14nF2+nZbs9u8vJAqusKXXgmfVPAU75nVlMql3N6YPHZzs5O1sdkMsltIpO/qc9OQNPpNM9EoFxJObvvAMRz9B8fwH94Hr26nfl/NpvlfgAMlO2DCvrOizbhF3wG8XOtrifs4qSMjqTj5ckeM24j9Oe6o3wfDPl+sn6PxwEZfvy0rhsf0BHnPng5aeDDcwA9WX33VXSLDYl3fBsdgCuU6STuMeE+jy92Op2sU9pYJwafEeR2JDYc29wPfbm565KltvTP8cRJHdLCZ/F5bEzffWZMyPkkOC04LTgtOC04LTjtqkhwWnBacFpwWnBacNpVkOCz4LPgs+Cz4LPLz2enJpgAjFarlTvDssK6oXEsBy6WQU4mk9xoBykCsw4+Hoibgpr3bhRJ2Wn4y/LJ2WxWuc/rIRg968vnZPI8Y+hORDmuaPruwOj98GVzAHWdHKTjPSk9G5xSuZTNSQZHJitMu32vTJyDdvd6vdy+Xq+Xs9O+D2cdoFia7GBG/6fTaWXWiZM2/tPtdnX9+vUcJOjaSc6zp5S9WJTLEZlJsVwuKzoEvOiP2wu/m0wm+R7s7HpD3/P5vOLPtBGd0TcGCQ4QdcBEsB1tpQ2+l67XTb3YHWAig46u8Bn3OSdLHzy1221tb2/n6ywLJa7xEycx2kj7l8tlnjFR93sHdR8M1P+njT5Q9AHGZDLRfD6vzCLxfqO7Ovbw8n1nl8tlBl0/DBA/QXeLxSLXJx2TNu2i7tlspq2trezj7v/S8bJb96fVaqVOp1OZDeGE6G11XaIblkEfHBzkvrOEGlJkaS2Dg5DTJTgtOI1ngtOC04LTgtMuuwSnBafxTHBacFpwWnDaZZbgs+Azngk+Cz4LPru8fHbq1Xd4h3fQZDLJweeN9UwgRql3fDAYSDp2cLK4BGsdxNwhcL6Ukg4PD/P+pdvb2/nArf39/bwMNaWk8XiswWCgwWCga9eu6fr167p586be9m3fNi/ZfOKJJzQajfIzOH9KKSvQDeABUAd3DMbyMQeYZrOZ9710Z3RAcKdhKWHdwQEyzy43m8cHkAFcnvmVpDt37lRIi/b7np/0p9fr5YBAn9gC8GVZJO3x/RkJBp7z5cQsJZbK5cO0yZcLExiTyUTT6TQHC/0djUbZv3zWigcjweEzTQhayqc+J1Oe51nvO9lmgF5SZcmw299B2+uCPLkGydXthZ8BqN524sFJGH2Px+NKewG3drud9xblYDbf05OYHAwGOVuP3miL64I2AkC+ZNj1usm/nQDQ82Kx0MHBgW7dupV9CJ0yK4Q2csAi+yBPp1NNJpPKwY5uZyclsIr4YvBQFOVBeDdu3NBwOFS73c5xPJ1ONR6P9fu///t5j9PlcqknnnhC3W5Xw+FQW1tbGdw5kA9b+ZLyxWKRZ2w89thjuY/NZjPPjJlMJnryyScziPug0nGJAQb9YfCE/kLOluC04LTgtOC04LTgtKsiwWnBacFpwWnBacFpV0GCz4LPgs+Cz4LPLj+fnZpguu+++3Tnzp2cScNB2UeRDqaUslJ54RCete31epVArweXg6qDv5fpjlcPLpaxrVYrDQaD3F4O3iuKIjuJZ/c8M+zgTkaW+lxQLqDDvSxhA7Q9S8n9ToyU5QHBeycYQLYojg+XwzmdINCjk7OkbAvpeCkkQMRzzWYz28iB0vuDblgiidMSRN4G6geQcWRAhfsYBPAsgIYtsQni/uB9rPsgdUGOlE8/6vbAJ9C128XbSzbbAdDt677sAEgbXU9ePu2uA6330etBt7Sl2WzmZcn4VX2gsekzrwvBxvXPPFa9/W4vv+Z6AhPqy8d9wOVLapvNcgnzYDDQzs5OJiXa5TMb6K+3jfIYJDoxA5QMHIkj2tVsNiukBLnhp6vVKgP90dGRBoOBlsvlXaDrfghG4RcMbMbjsfb29iqxgR96fKOreryBDyFnS3BacFpwWnCafxacFpx2mSU4LTgtOC04zT8LTgtOu6wSfBZ8FnwWfOafBZ9dTj47NcF08+bNinNQMZnnTYdPAeB0yJe59vv97EAeXCjVCQKjAE44mSsT4HOnp36WX0rKmUJmGOzt7eW63JndYTEG/9fBgWChXgcMdzAyjJsCCWPWgR59uE6cOOpZW3Tv7fd9PAEAnML3LXVdE3DohfJTSnmpqgO9Z6ZxavbPpXz0g9Sz8JvaSzsdEP1vfbYHPuqE7XaE5CA42kc78COe98DC1wBQ15Hb0V/eVkjBQXnToMUHLNTrZH8amfj/brder5c/Y8YEPuTtcHDxurBlnQzwWV4MZryfbgOuzWaznD1ngIG+GbiAKz7bwmcSHBwcqCjKg/YODw/zgAPQpb0MRPv9vgaDQQXoe72eOp2Oer1enmFBPOFf8/m8MsPCcQps83YfHR3lJbEcqIctIQ8GO3w+Ho/zAZCTyaSCQT6480ElOsE+kio4E3K6nMZp0pZS6kpqq9FoqiiWWq2WWixmWi5XOjpqaDYr1G5Li0VTq1VvPVuuUFEslNJUjcZKKS2U0kzN/z97/xlt2XGdh6LfSjvvffLp3OhGIxAAEUmCgBgVSIqZ0iCVqEBdPUm2nmXZY9hX1vP1Gx7ylSw/WZJ1rWsPWTIlXoqiAjlEUqLFTAIEE0AEIhOhAzr3STuHFd+P2l+tudbZ53QD6D7dDVaNsUf32XutCrPm/L6qmrOqnHTQGceMDLIRxwX4fhlJ4iJJPAyHBYQht2fXEMchoihGFIUIggSjkY3hMIbnRRiNElQqNly3jDB0AdgoFi3E8RBBMBq/4wIgV9mwLA6AiDPcIm5DmTRxlxgWI44TRFGIKAoxGg1h2xaSpACgDNsujfMj3hHneXa5DcCDZXmwbeKxjSgaaU5zHFfbu+Nw8GnD9yOEoapXHEewbQ4ULfT7NpTpMBJH8USh4CIMbbguYFkxRqMYlkVscvQgX0X5AXGs5GNZVcGhaQSk47gCY1U/hKEreLaAMHTg+w5830KSjJAk6TEHhtMMpxlOM5y2VclwmuE0w2mG0wynGU57KaRKZQeGwyJ8v4Ug6MO2Hdi2B9ctwXU9WFYBhs8Mnxk+M3xm+OzS5rNNHUwLCwtoNBrag8azCmmUlmVlHA38jo3kNjoKlVtMkyTRl32xIb1eT3vr1tbWMopA7yEVhA2n55cKSuVwXRcnT57UQmk0GtixYwdc10WtVsPU1BSKxaLeJkZFZCRCfrImwUMCg++nF8BRuWu1mn4/SRIMBoOMd5TKny9Hdi4BiV5MypPb5vgcLylLgXmQqac0TkaA0GBoFHTKycgK21ZRH6VSSXt++/1+xriLxaLeFlksFrUxSCOWIMQ6Sq+qBATpAacDTIInQSAPsHnAkV5oGiv7iO+xHqwLgaBer2vdkqCW7zeWyzykDvq+j9FopCf4k0BQgjLL4VZY/i29/VKeso5xHKNarWpdYz/yDE2eK2rbNprNpraP+fn5TP5sJ7eXM488YbLfqZ/sQ4LVaDRCr9fLbBOlvViWlSGUPKBJMJV6EwQBVlZWsLy8rB3HBOIoirSeVioVDdiO46BaraLRaKBarWJqakpvcyUeUYa+72NlZUVvDz5z5gza7TY6nQ6Gw2HG/tm/QRBobODvrVZLRy1NT09r2yC5Ub95pm4Yhmi1WrodlDf1jVEUMnKLRETiCEO1rZZnG5t09rQRpw0GAT784X+NbncagAXLWh8UMClJ+5Bp0rtJAqQwYun/S2zhc5xQyCQJH7D0Mwp7konvbJbOpX3yOTlwy9YzrQtTvk3Z/KQc9Btjmae/qXKt3N8bp/V9YWXK2Uiu55Ky72bz/YEf+CPs2PGY4TTDaYbTDKdteTKctnkdN3vOcJrhNMNphtMMp1066fd//19m1q6Ysvhr+GzSc4bPDJ8ZPjN8dqnw2aYOJhbEHUDdbldvrZLOhH6/n/HCSXLgd77vo91uZzpRnnlIryuVkR1KBZNGQEWhR43fMQ/mQ6fM2toaSqUSqtUqisUi5ufndTvo4csroeM4+lJBKrTsBJbFnT1yV1DeYKXjC0Cm7lLZpfcwitLLCOWuJe64Yh1ppPywbBqaBMpJDi5eiEilp0OLCiydXwRZWRZlzPLiOM6cZSq91ZMceNIhJuuc96jTwyw985JU8w5O9hHbLvPj/1kO/5U76/i9JA/5rGxXnqip15Zlaa8zyybISNlalqUBlvoot0RSByh/WX86HAFknHcAtF5LGyKpEqiomwQWOo/ZVrZNyoR6KPuVAEhMkGUzSacty6Qs5Jm01H1pD6xTv9/XfVyv11GtVjM2z+d5VjDPI5a2wTYSbJeXlzUYt1otDaRSD/lvvi2UH4mfMmB9JKlI56u0B/aLbAN3QMqoD+qExAySOgcyJm2eNuK0bneIOC5jx45DuPLKRxDH0TqcYaITnrrLgS37UH3PS2fZVw5Go+G4D5mfmnSoPk11gClJMLaF1G4k/jQaDZTLZZTLZTiOMx5k+mi1mhrLiRmAPNPZQpLE6wYsaSRfLDgxhor+S2UgMdG2bTzyyOuwurorIyPHCfC6130arpuN9JK4mg7yVBn5JAfIyl4jJMl6TnMcF4WCl8ELDsIVlkawLE7+VJ8wYjAMA1GeDUbzyQlhkqRHDgAW4lhhbK9Xwde//hYkiQPDaYbTDKcZTrsYyXDa+ec0gPMUYjDARSu5SGE4zXCa4TTDaYbTzl/y/bPv8vK8IW6//VOwrOx6I5PhM8Nnhs8Mn7H9hs8uDp9t6mDi9i565uh0kQoxGo30sWdUBJmSJD0LU3Y8lSXv/aUH1razR7nJxkqj4W9Aet4nvbd8fjAYoFwua9Khcti2jU6nk3EwsW7yM6lzAWTKYXukQrDz5Id5sgPZaXyeik+iohOI5UqPu0x5pcj3CT2bzF/Ww/M8DRpSuYHseZLMk3kAyCigJBX+Lj3/so1SnnkwkU5K/s460CEmy5NALxPLIkhKubJO/D+Nj3VmPaTTUxp/ngzYBvYT/5bRAtz1JfOkwcpzPoMgPSuUDj/WSYIQ68K+ZL9JGUu9TgcBYcbeqDuu6+rymLfsa37P6AEZkcF6UW5sA2VLveDZybKvKHuJFZPIEFDnnxIjqtWqjqSg7NnfPMZTDspYFuvt+z5arRZarRaGQzWwpLNc1k3ijtTLPNYA0EDPunO3JEFZ9p8kPoktknyZN3WB78k+p12adPa0Eac5ThGWZWNx8ThuvvnrmnvynEYdpZ5JPJF4L/uSn16v96I4zXHSo1PDMMTi4iIajUZmR26/38fJkyfR7Xb1gIt1cxwH9XodgNJrXmyaz5v2Lc/+ZSIfKdsqotvdhqeffsU6Odt2jJ07D2F6uoNSyUehEMP304tHz4XTKNuNOI3RQ9yWnh+w+r6vo/klJsoBZp7TpM1txmlhGGJtbQZf//qbDacZTjOcZjjtoiXDaeeP057vPE3qvuE0w2mG0wynGU678Mm2I+zc+QwsKwCPi6vXV+C6geEzw2eGzwyfGT67BPjsrHcw0WNoWZb2ypVKJQyHQ1iW8v5yRxMVS6Y8UEvjokezUCjo+5koWNu29ZmFhUJBKwGPgaNgKHwAmZ000pvp+z5OnjyJVquFTqeD+fl5LaxyuYxer6fLlYKnZ5n5s0waKCMZqISUh8xDejIl4cnOJEixQ0k4/E3uCPJ9X9ddOsQkIEug4fv1el2TDt/hRxp8vu+YSFrSQPk+wVZ+z/ZIufHIPX4v2y/v84qiCO12e51RsM0SZAlaNHYm1on1knIH1KScujuJIEguecPj7xKcCF6yHwgo1HfqArd6yh1lsl2sq+u6GmAYjSAXBli23NEmASN/sSVlIkFKAjfzp5wARVIkAdu2M6C6urqqAZ3bVGm3eT2WNiLrnbdjvieBjO+RIOWHAwrmUavV9G/VajXTdqmrUpaO46BcLmt7pz5QLhsRLPuRfcvE/8uBHfuRuzJpl3LbMd9leymjfHSLtE1JrnncNWly2ojTCoUKHEeda1upVAynnQOndbtz+OhH/z9IkvXbpIOggL/8y3+BO+74Jl7+8kcwP38YwEuL09IBZtpvhtMMpxlOM5y2lclwmpmnMRlOM5xmOM1w2ks9jUYV/M3f/Ebmu/e85z9iYeEwAMNnhs8Mnxk+M3x2sflsUwdTrVbTHvIwDPXOomKxiMFggEqlop8ZDAbaU5Yk6n4inovIihIYWVHZ+YwaoBLyN+lVpiJJ0JReUr7HjpBKRiENBgOsrKxkwIiCYt1psLKOLFN2xEadzjrl22LbafQDow4kMMtyeXYjjYzKUi6XtQJxFxm9jK7rajCnobP9BHHp4GE9ZAQE6ydlx3oRZJIk0eddShBmf0lCpyLLtlAOPBeS+kBjpuGy72lwYRii0+noNkhw49mf8rtaraajD+RZk7K+9Xo9Q9TUR+oE+1KSO7e+sq30kPMczDiOx0eUdNHr9bQsZFQJ+4M62Ol0tDylHCW4U9dIhrJ/WDfmzR171GvqiuyTOI71MZGyj/k7B2IkhSiKMuAl60ecYB7sb8pTylfWQw5+aJPS5gHoCBjXddFoNPTfhUIBtVoN5XJZn30qd+mxXVLeJGBuG2WdacO1Wi3TJrkNl23g4In1poypN3JwIAdBEqNov/zedV1t20mS6IgG2goJnwNY4utGkUUmTU4bcZrrqn/r9Tr27NnzPc1p/X4Jn/rU/4ZXveoL2Lv3mQ05zfcLAHjWeD6p76anO1hcXIVtO3DdlxanMRUKRdRqNcNphtMMpxlO2/JkOM3M0wynGU4znGY47aWUpqdHeO97n4FlAbat+vLP/3wvul0Pk+Yc99zzfuzf/zBe+cp/NHxm+CyjG4bPDJ8ZPtt6PtvUwURPPBtOgXHbo+uqO4h830en00G/38dgMNAdS5CXQC0rxY6WysDvJQDI92gsNC5G7lHY7Ay5vdPzPA1M7HwJRnyObWbnsEzmJz3trBcVkQbgum7GS8qOJDDJTpftyoOrjD5gXfg9FYz1Z9v5LN9j3vLvPHBIoqNisQ7yw3z4nvzI+rM+Mg9ZLwDa001PMEFTllMqlXT+VGyWxfrm2yWVnmVIpxafYxnsd0nG1EV+mKeMtpB3c8VxrLdN1mo1zM/PI45j9Ho9DTpSthIE2K8yOoL9I48zkX0g5UhylO2gbVQqFS0nlse2y8GBbHfeTmWEgeM4mcvngiDQWz4lEedth/lu9H+2UeqzfI4y4+CFx1uSzHgearVa1eccS92X+k/5sd7yeE/W2/M8XY88QFOvpR3k5RkEgR4IkPDZf5KE5Pv5QafUCSZJ2oPBAMPhUA8OZJ1M2jxtxGmlUgW2baNSqWJhYeF7ltNWV+dx4sReHD9+FbZvP4lSycGBAycEVjg4ePBKTE+vwHE2jl6xrAj79z+N2dkzKBRGiKKXHqcp3gAKBU9f+mk4zXCa4TTDaVuZDKeZeZrhNMNphtMMp72UkudF2LevBbUL10WhUIbnbbyoubx8BQoFHwsLx9Hr9TE7exqzsyuGzwyfGT4zfJb5v+GzreGzTR1Mq6uruiLcWlcoFLTXtFar6S1YrpueEcozSQmAbBTBSHaeFFrea8sG0thsW0Xj1Wo1VCoVTE9Pa4UbjUbodDqw7fRICJ49Wa1W0el0tFK2223tkQvDEJVKBYDysvMYiShS22LZfkYm8FxDEgO/o8HZto1yuaw7k576PBmxPCoekD3LkQpBmUhlZJ3K5TIABUxULr4jAZ/9wvfzSiEJQz4nQTCKIhQKBV0XpklkwLMsaZDS+JhnsVhc1yZOlAHoS8rY7zQGdQFiOjAg+Ur5BkEA13V1PSTYUkayHtL4ZJv4f0kQkrhoqNPT05ienkaj0cDCwgIsy0Kr1dJyo5eddaQMSRjMV9qBjCSgXOUigARggnueSGQ/VSoV/Q690nEca7JlkvYq30+SRNuDzCMP+GyLrLskMSlX6hp1Sw5U2EfEm3q9rj/Eo5mZGczPz+st7XKRhVEpUl8liA6HQ/T7fR3pQRnTrvNAL/MHlO3K6CE5MJWExbNbiY+0BSlvx3HQ7Xb14IX6nO9zyn00Gun6S1mZdPa0GacBKrJo9+7dLzlOC4IQYRij1xvBshLYNsb4Y6NQKMJxQriuhSeeuBHf+MbbAAD33fdGnDhxDa677kMoFm1YFhCGHj71qR/DHXd8Dddf/+iGcva8AO9851+hVArh+/Z4YOsgSSyEYQx1EezlzWl8lwNQw2mG0wynGU7b6vS9ymlmnmY4zXCa4TTDaS/NlCQY674Lx3Hhuty5pOYvfEb9q/r1xImrceLE1QCA2277O0xNfRpRdGnxme8H411Bhs8Mnxk+o+2xHobPXjp8tqmDidsWbdvW52DyvFHpvazX63BdF1NTU5ientYXV4WhOluw2+3qDwVIAGCiMQRBgE6ng3K5rLcz8qxC21bbD2dmZnSnS4Wi97hcLmNubg7lchnFYhGlUklH7NG7J7dUDodDDAYDrK2tYW1tTSvw6uoq1tbW0G630Wq19HY0GhYNSnq64zjWO7xYH4KTBFO5tY0y4fuUMQ0ijUhU+VC27GDLsnRfMeJBAhu35LEcSTZsA+vH+pBgqZh5UGCd8iDpOI4mICDdZuh5nj6fks9RzvSOsr1BEKDf7+u/LcvSQMItffSsJolyWlUqFUxNTek82fbBYIAkSbQ+ye3SQHqmpCRA5i3lIz8kIBLT0tISBoMBut0uAHVkiW3bmJmZQblc1v3V7XbR6XT0Nkn2M+2E+bEurCt1RRIU68V20T4pL3lkCtvIfmXfymNSpA7yX8qF9cmTBPtEkhi3X9JebVtdJEg9km2TixvcZl0oFFCpVLR+cYBA0J+entY2NT09revKKAd++A51T9aRRMz3SECyXrZtYzAYZAh0dXVV2xLJjFjIyBBJtvkBFWUhQZ99wgs7abcyT541ywUhkiKjRWi7Jp09bcRpvs9t4opUX2qc9rnP7cJf/dXLAACve92ncfXV30ar1cIXvvBLWF29ErYd40d/9PcQRdlLG0+dWsR//s+/Ir6xMBiU8LWvvRb33vtqPbHLJ98v4oMf/BcgNUiOYHr1q+/HG97wTQCXJ6f5fgWAhVqthunpacNphtMMpxlO2/L0vcppZp5mOM1wmuE0w2kvzWTbFkqlorbhMFSLsq973Rre9KZluK6DIAhw7JiHP/mTK5E/Nu+RR96CY8duwfve94ewrGhL+cz3i3jmmRJuummAYjHls8OHE5w8qZxko9EIntfD7OwZw2eGzwyfGT57yfHZpg4mKTjpjZaeLQIRO4TKwYLl2Z3S2yijBeiNY8OpEPxQaNKrPRqNMuVwckXvsexYCahAut2V9SDwkkRoIEtLSzhz5gyWl5e1YrO9rBvrT3lEUXrJmJz0sTwaGBVEGhfzkp5kAFo5pUdSyoZ5UHkoI9ZHAhcNlvXLez4JYrL/pTHxPZ7JyfKYWC+2A0g9yY6jzjdlH0hPtZSj9NjS+KQiS/CR4Md+cRxHg5skA6mn0rsdx/E6o5wknzy48f/ysr1Wq7VOztL4kyTRwM68JQCx7lI2lCPzorzlv9KWAOhBk4wOkUBP/ZJ6LG1OliX7lO+zHA7apA5LzOBgR+5kkwDPv1kXIB1EcKekJIKpqSkducNBFtvIvs23idjF7ygb3/c1jkg9Yvs4OMrbSF5f+ZFYJe2ZfUniog2zb/O4mtc3aROMMJDbvCfVzaTJaSNOY6LcX0qc9r/+1x48+ugshsNFAMChQ69Arzc/Phd8H3q9WVhWjIcffiPOnNmbkVcUuWi1ptbJcTQqYzQqr/s+TRY6nelN++Lppw+M7cTK9MPttz+BRqN3yXOa74+gJopqomI4zXCa4TTDaVudvhc57VKapzWb83j22Vtg2854oSvBnXc+CMdJMrK5HOZphtMMpxlOM5x2qSRpn4CSXakUYWbG15xgWQne8IaTeOCBeXQ6Bf1uEFTQbG7HAw/8ECwrBiDvHQJsO8KNN94Nx0nvZjlffGbbEebnI9h2ls/m5y0UCtYYK6uw7RrK5eIlxWcvpXVHw2eGzwyfXTw+O2cHUz7JBhCc2QB6tV3XRaVS0Q3ntjY+nxeWVCr+LpWKDea5ptxmxrK4XZSKx+2q5XJ5ooeT9ZLeQ3mGpIwWGA6HGA6Huu5UTOYjjYadITu4VCrBcRytWCp6wcuAsCQLggj/zisR/y+Vit9RSfk+PzRWKU+p2JQP/833Ey9sywMsn6E8aFjcwmjbdsazbVmWNn5ZLpUagHZEsY2yziwr33dyMMB28l3Wlb9LI5b6ngdRSQSSiKiDfJa/kcAIZJRZHKfe++FwiHa7ndF91g1Axk6AyZfVyXLzv5HkCOaS8CW5UD4kIClXAviklCcDAjH1Nt8nBGbKhDYl+4qkzPy4RbVcLuvInEKhoC9QZJ3lNmD2H/VR1of6yCgV3kPALaeUvZQNB2zsO2knUtYSbPM2Ke2C7SdIs35sQ75fWQ9JBMQCiTsmnXvaiNPYXbSVy53TLMuD4xQAVPDJT87iuefSqL1nnrkJzzxzU6b9SWLjgQfekvnO8wJYVgLfL8gnUSyOEIYuoigdPth2jGIxwHBYwEa7mvLp0KErcOjQFeu+v/LKNRQKZ8a4mPbVYMABj4M4dhGGLtSk0YJtq4nEVnIaJz79fh+dTsdwmuE0w2mG07Y8fa9w2gudp4VhAUHgoVYLEMfReZmnRRHQ7aoo4WPHduPuu9+m5V6pDHDjjU/DtoNxWygnFblerwew7fRiaYlj/X4JSWKP50wuPG8E1w0MpxlOM5xmOO17LFnI70oC2J8RksSBbVuYmQnx1rcexfHjVcSxeseygNHIhu+X8Y1vvGNi7p43woEDj6BcbsLzRuedz2q1MlqtLJ+VSjYqFVvf05MkHkajxqZ81uspbPI8y6w7Gj4zfAbDZ5cLn23qYCoWi1pBer3eOocSlaTf72cEwO95Dum2bdtgWRaWlpb0sQf0oPEc07W1Nb19r91uIwgCDIdDBEGgvYn0LlJQo9Eoc6QCkBoJFYVKY9vqjNJGo6EBJknSC3FpyHKbKQ1lbm4Oi4uLejthGIYYDAYZIJPbSwlqcRyj2+1iOByu86TOzMyMt3Cq81fZFno5mb+MAHAcB9VqVYMkPdiUNbf0UkmphDJakfkT+AuFgq47AY2GS3CnIktA4CSOf7NfuN1QGhvLZd9bltp+V61WdX1p6FL5Aeg2Mz/2FfWQhmFZFrrdrq4rDUmCPWVMuUrPLD3e9G5blpUBTMpMRnkQsLg9tt/vY2VlRU+AG41GxmO9srKidToMQ62f0vhZT8qD7WO90gXk9LswVPegsc5RFGm7YPvZXoKTHJTIaBAZteC66RZmypT9myeERqOBer2O+fl5VKtVAND92Gq1NH6wjRIgbdvG3NwcZmdnMT09je3bt+uBEe2IWMEjXGRfsBz2I+1BRtXw/eFwqHGI0QfsBxmBQixi+2kbeUJl3nyfA03qRq1W0/lzCzlBvdPpYDAY6MGAJDD2A2UkIz+4tdi2bb2dn/cTmLR52ojToogRT4G+CP1y5rTTpxfx5JO78LGPvQLiZILnle6881uo1Xr4X/8rdTw5ToR3vevv8fjj1+Oxx27Q3+/YsYof/dFv4sMffiOazdoLK3CcPvjBH4VlbR4ZI/umUPDxcz/3H+B5/S3lNPZPr9dDq9UynGY4zXCa4bQtT98rnPZC52mf/ext+OpXb8Bv/MYHEUWD8zJPW1qaxp//+a8BSO//YOr3S/jd3/2liX3luhF+8zf/CrXacN08bTTy8Zd/+U+wsrKgn3/ta/8ON9/8NcNphtMMpxlO+55KScKjxniXiYUkicc2PIBtjzJ89oEPfBeWpXCxXC7hH/5hAZ/73NyG+QdBAX/2Z/8Kb37zZ3Dbbd+6ZPnsiSeKSJIRtm1bMeuOhs8Mnxk+u2z4bFMHU6fT0R1D5ZSdI72fNDrpXZZeL+kF8zxPn48YhuryKIIlPez08lFhpCeQhs4LYAkijuPoThoMBhmj5Rmpw+FQn9XJOvE5AKhWq7rT5BmZ1Wo1Y+zsEHY8PciUAzuFHS+9iJ7noVwu6zKoFBK0Uk9jgrvvfg+Gw+pEzzlggRFyruuBDkwFZowiiJBGgiSIIoIh4LoeXvOau7CwcCYDImyD/FeSOQ0175mWxkwQlZEHbCPzY570WkvjYWLeADJ9yt8IYiQfWX/ZH3xeetj5rGWl0QXSw03iY3tYFvMDFOlRbnzX9320221NApygM0KC7WU7R6OR1gHmLesm85aJ+cl2y3eknbJ/5CCAbeTz3O7JxD5KEnWeLMmcoET95gCk0WhkiDsIAm3z7FvWS9av0WigVquhXC7ry/NSGwh1ZCoJg3WSETOyTpKkJdCzP1gngnA+YkXqjwR6RslI3QXSgZiUJb+TBC4jEnimLJ/jIEaWK/ue/cr6kNCKxaK+DNSkzdNGnKYW4xIEgY9erwfg8ua00aiNubmTeOtbD+Pzn/9h9HqTnT533PFdzMx0EUXAXXfdiNGogEIhwOtf/xC2bVuD6wb4wR/8xlgnAcuKUauFuO66Y9i1qwceC9RohGg0ynj724+g37cQhgGCgFEy3E4eI0mo28DJk/tx7NjV6+oURc/vIuTh0MYXv/jjuOWW+3Hllc8gSRKNHxea0wDowTrzM5xmOM1wmuG0rUovdU7r9ar4/Od/EJbl6PmN67qwLB6jIy//thFFIeI4QZLECMMIJ07Moter4q/+6q1Ikkg/H0UxrrjiObz61Q+e0zwtjov4679+JQYDYDgsQO3gnbRb1xrvrl2fosjBRz/6Oniewsj8PK3ZnEUUpRG8jz9+BzqdBbzlLV8wnAbDaYbTDKd9ryTyWRSFOHWqjnvv3Y9+38MTT8yg2SzCsoBbbz2KPXuaWi9Uv4QIwxg33nga5XIXn/rUXkw+VcFCFHl46KHb8NxzexGGwZg3FHconbDH62vpjpk4jvDyl38c9fopABd+3bFW6yJJlH1t1bqjxHXbtjUuTLqTJY5jXfdSqaTrHkURyuVypq5yDdnwmeEzw2cvbT7b1MG0srKiAYoVo+AmNYSeLYJNkiTaCGgoFAoVhh1Eb20QBPqy19FopKPR5MTIsqyMMTBfCopGC6SGLf8/GAy0UQRBeqEcoLZSSg8kDZ3ePmlUFLjsbNZDTqYoQ0Y1sO0nTsyg34+1Evf7g7Ey+JlJ4pEjr0a/39i0I19M2rfvGOr1Avbty25v5cRSet2lDPIfykDKRBIXjYvfp04yaLKiIUkQkvnzX0mGHIhwopsHeklUcRxnSE4aEYlT6gOTBEZJhgQb2R7WhxEL/LAcykl6i6njzFeSmWWljsV8yrdRypp5qMm4lQEPSaYS6CRgSMABkDnKhFEX+YEMbYXAKgGJ7WT7JMAxKiaOYx3tQdkOBgMMBgN96ZyUoexLOYAAkInCBdIzUKlXsr/koIYfykjKTeoo+0fa+KTBEvVDyoRJypB9JJPUd0kkbH/+bFWTNk8bcVoQKOcHB2iXA6etre2E49RQLhewbdsZ2HaMOPawtLRtnGeCubllOE6UkUG53EaxqC433bv3CBYW2ogi4NFHt2EwKKJU8nHgwGGoc8uBq69+TmB/Att2sLjYxs6dysGUTmKKuPrqZR3ZoxYih0LGIeI4QhhyC7aFTqcBywKUKJTjqdebyyyynS0liYOnn74V8/M9NBrK3g8ciNBopDh7IThNchfxy3Ca4TTDaYbTtjK9lDhN/l/ZxwKWlhZw771XY7Iz59zTQw+9bN13QVDFFVec0YttSiYudu8eolpN9MLHYFDD8eNlPPzwDfD97LTZ80aoVluaH5PERru9MLG+SWLhwQevPOc6Ly3tRZLUMDNzP1w36xQ0nGY4TeZtOM1w2kspBYGNY8cqiKIQzz3XwCOPqDtkz5yp4MwZFTlfr68hSUZa5rZtwXFs7NnjY3Gxi0pliAcemIPoSliWhV7PxdqasqGTJ3fh5Mldz6tuU1PfxuysA4BHzK6hVFq7oOuO/f75WXcE0vtmwjDUNk+7maT/EovoUEoS5RSR91IxD9d1x8cE1nS7KpWKfp91MHxm+Mzw2UuXzzZ1MH3nO9/B1NQU6vU6pqam9KJRt9vNGEl+axY7lkKQAmEjud2K701NTen8uNUtjmO9tU8CGzsvDEOsrq7qDiXAUjlY32azqf/PMxDZGQQYKpUEFm5bpaHID7+jsnALHz2TcqJFQyB4RFGEfn+A3/mdN+HkyXNxHL24idXZ0qc//TasrbXx4z/+FDqdjo5S6HQ6GkA7nQ56vZ5WfJII+5p9HEWR9pDSAAFoWZdKJU0G8nd6Q23bRrvdRq/Xy2zVpTEyuoPypExpxHIbKcuQifohCY1RF3EcZwBA9hkBimTNvGTEA+shCUVGdkxNTel3KFfp5ab+8R1J1pJY8km2SQKofF9uH2Y95WCEek3AkADP33gOKSMHaBsAdPQOdYbvep6n5UtPOsuUA4wgCLC2tobV1VUcP35cb0GVfcq68N1SqZQB5G63q8GWCyXUFbm9ljoEYOI5pIz66Ha7mcEN9aFUKumBn2Wpiyer1aqWMfWfW2slOHMg57ouGo2GHlgAahs3yUxGu8iIAbZH4mp+UGLSxmkjTut0+oiiCJ1OB8ePH78MOK2Fz372n2J1dSccx8fb3vZLKBb76PW24R//8T8gyxlZ/rj22vuwb9+jsCwbYQicPKn0/rWv/XttI52OJRxHBb0QaVkqSpyDulSvI4xGQ4RhJAZIsbC/IoLAH0fyqMXIXbsewa5dj2QG93Ec4+tf/xm0Wjued99+4xuvwTe+8RoAwB//8SHcfrs68uJCcRrtdmpqCvPz84bTDKcZTjOctuXppcNp6+dp3/nO6/HMM+/AhZoDPfXUVXjqqQPrvv+1X/sCbrnluHac3X33dfjIR+6cWI/5+RO4885/GGME4PtlfOELv4g43nR6fc6pUqniTW96Mywr0YtIhtMMp7Ecw2mG016KqdUq44MffNWmz3z1q1fhq1+9KvOd4yT49V+/H7VaiHI5wq/8yqMZR4TruvjmNxfwsY/tfcF1+/rXfyHz9w//8Dfx5jd/8zJYd+xrvKbucoGfdyAOhyookI4TILvDh/jFtvm+r+tLO3EcB7Ozs5idnc3chUP5z87OAoDhM8Nnhs/w0uWzTUfAdCSx49ipxWJRV1gqJj3PUoHo6aKy0QhoILIz6FWv1Wraq1ar1XRZEpj5t9wOSGUbDAbo9XpaWbZv3663uRH0aeij0Sij6Gxj3ojYVhqBVIJyuYxyuYxisag7mcrhui6+/e078cgjt0H1i6X/XV6u4UI7j84tWfjOd6r4uZ+7fiyXBJVKgN/93ceQJErx6vU6ms1m5txUGj29xJQV+5kTUwDaIGhwBG3ppaU3tdFoaONnFIj09koll31J0pSRCCxben6lYRAQAOh+ph5KMpKDgIzkBPkz7zSaJvV0B0Ggz8iVRs/88xEZcnAhgV4Cj5SNlAuBME8U+UEW6yI94FJusi20vXyUDfPgYINALvOQMsv/zTbL6Bi2k/+XhO26rrZbeewLdY115d8EZxkJwK3aURTpASHzp86FodoeS3lYlpXBAdk+licHClInpJz5PuXIcig71ouY6zgOpqenM2dAS32v1+uoVquo1+sw6expI04rFNLopvxg+VLgtDNnYvzBH7wSYajOJo+iCK2WipKOIg933/3vYdsx1M4fC5N4xXF8vOpVH0O9vjauY7pVP0kSDAYDqMtzgWKxgGJRnbkdBBwI0c4oGx4ZIe2bUfDZSz6V3hKbUyxmmyWW3HbbZxFFnDzEUMdUqPLkILxWq6NUKsJxKvjQh66H76fRNL/927tQqWyHZSX4L//lMBYWVHTS1NSUPuLpxXJakkwBUAPg6elpw2kwnMbnDKcZTtuqdLlyGudpH/nIzWg2Hfz0T38Zw+EQDz+8D5/5zO2I4xi7dn0dr3717+Bb3/p1AC9sgXbnzgexuPg4vvOdn0CSTIq4XM+VH/3oq/HJT/oAVPs6nVLmuWq1je/7vi8gSSI4zkBzmmUBpVKAN7zhYwIHoed8gINvfOPdCILShHokeNe7HsT8fJDhtFOnPLz//Vfh//g/TuKmm4Z6HsVoZcNphtMMpxlOe2mms62Prf89ioAPfvB6vPa1p3H77ctjjOCRU8ouX/7yVSwu9vGnf3p1Zt7wQuv19a/fiEcfvVLgBOc+PPXBx2te81twnP6WrTtKh4G0D9oBMY+6TD3nR2KmdIABKTYzgIVYJscYvu9ndkxJG2w2m2g0GnoB2/CZ4TPDZy89PjtriBUr0uv1dBQzlZcCkIotJyn02PEcTmkM0iOd957K723bzjizJJBxRwwVQZ6DSk89PbbD4RCdTgeVSkWDCIFBljXJ2KSysUPpfaSy05j5TKs1ixMn9sFxHDz11BV47rmFyQI+a0qwY8dBOE6Q+VbJ04ZlZY0lrT8dWen5ieyD1AvvoFQq4tChGfR6RTzxRKoO5XKEu+7ahWuvPYXZ2SCznZCKJgFFJoI7y94oyQVI9h/7QOqX9MRqqYzby+MFeQ4qn5WkzPzyhifzZVlACoQkMOmhZ735DtubL5f5SBAggLC9EuTz+UgZBEGwjlBkPWRZqW6kdgmkZ64ScGRZBHraGsvN2yWPN2RfBUGgbVIS7iT5StKQ7aYNSrDNy5qyk4AvyVe2l/8n6MoBActj9EAYhnogQ9uRkSBS3pPawe8ZtZKPeJB4RdIhgFN32X4ZfSFJvlAooFKp6AggtiWOVZTw3Nwc5ubmMD8/P9nITFqXJnGa7yudHI3UcUKXEqd9+9s+7r03xvHjOxDHk/DURqu1f9M21+tNLCycwOLiKhwnBuBm7JD6q9qiHFRZnVYLZYA6zk4ddZdG7AAYY4S6G4N/y4XKJInHEy4Ldm6t0LJSDJuebo/LSRBF4fh3OyM/z3NRLlfHg88yrriiiFOntqHTmQIAHDlSHL+X4ItfnMP0tI8w5OWc6n6o4XCI/fufQqnUeUGclmJ9OnA1nGY4zXCa4bStTpcbp41GI6ytDfG3f+vj4MG96HZdPPLITQgCH4cO7cTqquKzev0YbDtAdkEvwc6dR+B56bEnSs0k16QYtLBwGHNzZ7Bnz1NQDqYUr3u9BlZX1++WPXNm45Ml5uaWsLh4AtPTpzSnxbHktBjT02eg7htERqZxbGHnzifh+97EeVq1egjFYrSO0x5/vIIvfamBo0fVJdQM1JCcVij0cd11h17UPM1wmuE0w2mG0y5mOnDgm7AsC75fxnPP3QjAQqWyhu3bn8Jzz92CanUJMzPPwbYtnDlzFbrd/NqahVOnqnjqqWkUChjzIZ0FasF/YcHHvn1DvPKVbTzzTAWtloebb+6Ca2r5lGKWDdd18OCDLjqdFEPa7Rra7cn33AKAbQc4evQ1sO2Bxv2rrjqKqaneeV13ZB7EQ2KU5HG584dlU7+lTUmMlGu9+Z0tXGfl79LBlCQJOp0OyuUy6vW6tjHaCvPl7mbOQVn+i113NHxm+Mzw2cXjs00dTJ7naUVrt9taEFIwLJCVlFs2eeZmo9FAvV7XQMFOTpJER8+lHudEXyArve9UVnrFWcb09LR+lkpEDyG3XPb7/fGRER0sLy9j27Ztme2Y0jg6nY4uT26FGw6HGAwGGW+/65bgeSW4rocoAqIoQRwrb+PBg1fgC1/4iU2km8C243VkliTp76peCW6++W6USu2JYET5qH5RRxRJJaUi8rgi21YXm3EL4cLCAj760Ztx6JCHOE5BajBw8Nu/fRX+yT9p441vXAIQa0CQhkr9kAZJjyeQ9QLTW0ww73a7Wp7Sy14qlTKKz+2/nDhL0Ox0OtobzKiGvC4QhPJAzOdo3DRYklq1WtXeXf4GpBEHrK+M3LBtW9fdstSuLda52+1mojelZ7lUKunIDMdR59qyPF6u7DiOPmKQeVIX5KSf9WJ9pNxJ/txqynbLPPg8kJ7Vyf/Hcay3fBPY2N95kCUuEOzowZfATpAjycsBihxYyUGMBFG2k9tnmeQgybIsfb4qI2ipl7RrudOCRCMv0GQ7qBsS6Knb1CP2I/WRfWvbahs2nduyj0mk1CVGs/DiQUm4xMlCoYC9e/di9+7d2Lv3hW/3/15KG3HacBjoaKxjx45dUpz20Y8Cf/ZnFsS47pySZSVwHNWGXbuO4tZbv4FCoaYnWlzo424j2izLBrKRdEwKW4EkCTXGAvJcXzuDCRxMpfVSDqx0kKsmXI6zntOUXU3mtGazqTntVa86hQceuANPPXXdGMPscRkW/tN/2r6hjH7915dx4ECIQiF63pzGydBo5OuJhuE0w2mG0wynbWW6HDnN92McORLjv/93F2Go+v9//s/vX9e2o0ffgKNH36D/VpwW4xWv+CZqtbZuS57TyBlJkmi9u/32L+l8aBdHj16L++9fHLcJ4nfFH9mkyr7qqu/iwIEnMBiEol7nzmk33PC5TTgtRquV5bTvfvd6JImNP/3TzYMU9+xp4rd+6xAcx0Op9MLmaYbTDKcZTjOcdjHT93///4RlWWi3d+D48euQJBYWFg7i9a//E/zt3/4n7Nr1IG699a/hui7uuecX0O+rXSqAuuMuSdSy5sMPz+Lhh2cnlnHnnat43/tO4sd+7BQ+/vFtePLJGn72Z8/A8xzQySQX0h0n3TlTLpfxb/9tDU8/nfJDFKnggY1SHHu4//5fznx37bV/je3bD8G2LXQ6XcSxsuVisZRZDPb9AcIwQBQlSBJ1ZHmxWJi47ghgXHcLvENXHVuucN11PThOgmLR1TxPLJB3RPF7yR9KDunRdNLeiRe0SeLtmTNntO3Pzc3p8caePXu0nXKMQqypVqsaT7i+/ELXHQ2fGT4zfHbx+GxTBxM9yOxoOm24rYrCYgcC0B4vz/PQ7XYxHA4xMzODmZkZ/TsrTIUNggCDwUArKUGcZVMhKRA2lmcGUpH4DgVWr9e1sdMx1uv19JmDg8EAS0tLOt9CoZABHBpJGIbo9XrodDpauX3fx913vxJf/vLtE2UXhme/pPzd774Lc3MDvY2UW9ZGoyFGI1+DoOMMEIbpdk6ZVARdNJ5QjsAtwMr47PHHgm272mCazTU4jqsV6G1vewBHjmzDpz5187o6fuhDN+Kee/bit37r65ientaGvLa2ps9HTZJEy14Ci6q7o/uAf7MP5U446gITQZ0fGgR1ju/xO99PL8qTxsBnpHNO/iYBg8AkJ4RS7/heHMf67FYSAkkwT2BA6hkvlUqIogie56HRaGS2i7J8Ai1BVdaVWxgJ6JRnHnRYplwQkCBSKpUy71E2zHeSfAFkvOBSHlw8dhwHxWIR9Xp9IvFI+RHw+aHDmknWpVKpoF6vZ4icuEBSIbGQRNrtdiZfWRa3uLI+sg9kYjlsM2WRH8zk+4BEwDxZBxIa68VIDEkIxE+ZBwcr7CduTd29ezf279+f0SWTNk8bcdpgoHTb90dot9uXFKf92q95eM97XLziFUAY5lu0cbryyi5+9mcPIwwDOE4I171hXRCGHOSwXKn3XLhLxruW1IDRyfCqjKorl8taXnJQZlnIcJps+/nitH37vowDB76FhYVFfPazd+LkybPvGv6v//UH8drXnsG//JePPW9Oo370+320223DaYbTDKcZTtvydDly2u//voff/333efEZoDjt537uMCxrLywrPS7khXLa7GwLL3vZJ2HbTuadu+9+3bqL3z0vws/93N1wnCbCsLxlnDY//yjuuWezYEWVjh9v4J/9s3fit3/7O7j66tYLmqcZTjOcJuVnOM1w2lYnynphoY2f//l/jxMnZhHHHXS7vXV8du21/w1XXfU/4HkFFAoejh27Hffe+7+dtYz77pvGI4+oXaqjkY0wtPBv/s2+dboFAJVKjN/8zeMoFCytH//7/95Ckjja4fK3f+vi4x9/fkftfexj74brpnf90KxUHYiH1H15RFq663XjpJ5JTZX2A/zqr/415uf7KJXUMa20SwA6iJ6YT9soFouZ3OUiPd8h3soFdjqQfN/HmTNnMovXg8EApVJJ14NYSkyzLLWr+sWsOxo+M3wm5Wf4bGv5bNNf5ZaoQqGgF494Xia3q+U9vFIpAeV57Xa7aDabmU6iVxRIz3ikgsqyeCEtf/N9X3vaaCDSsSSVlgbL/DkpYpsUOVRx+LCnJxmqwwBGpBFI5DmnURTh8OGdGAzOfqbulVcuY8+eVUSROmpIHakQY2amg2IxgG2Pxgt3Plw3QBz7SBIfti0NIPWe8jt14VeMMIx0X6RkZMG2LThO6h1NjTiN7ltdXUWx2EOh0MPNN/soFLzxhK2A+++/BoOBh6NH6/jwh6/D299+FNu2jbQy08lI8KJi8nJAGgLLl8AlgYQgKKMO5LP0glNX5HOcsMrIe+kRT5JEA6z8TZbNJMGI5Uj9ksTDgYWciDNRFkmSZICJYMR3WUYYhpmJO8mSMspHVeQjCZhoe5ZlZerGiT37h2Ww7XyPMmB/sQ4yYsGy0m3QPFKFMqOsCDp8l/nYtq2BjtgApEBJOUryd10VaSM96qyfjNoA0nNMiT1y+6zcFioHGBIb5ICDnn4mSYaUL+stsYaDKtaP/TcJQ6rVKmq1Gur1OiqVitYNRowQ8EulUqb/SAozMzOYmprSgzgzeTl72ojTXDfd4s++vVQ4rVJxsX8/8H/+nypa7cgR4H/8j7POMFAo2Ni2rYAwtJAkBf097bJSqWSOJoiiEJZl68Ehk8RDFWVnjW0tGEfHpbuZifOMlqG98wx0+aEdny9OU98HGI1i7NnzIGZn1ZEMTzxxE3w/O0Fi6veLeOKJWXzoQ9diMBjgqqsO46qrnjsnTstPHAynGU5jMpxmOG2r0lZxWpJYWFuroVQaoFTyXxCnOU4B/9f/VcXnPmdjdfXsHAYA1WqMd7yjjySJMDMzwuJiAWHYEItn55vTHNxyy3Hs398SmJ7AtiM4zhqiaLilnFYuL+Paa7+Ko0dvRL8/s6Gc4thGu13CcBjpxZjnO08znGY4zXCa4bSLmVI+i1CpxNi500Ich0iS+jo+K5V8xPFwrGsOpqcfwfXX/xW++933IIom3XOnUhja6Haz53R3u5P7ZTi08Hd/N4NXvaqDmZkAd989DcvKHkf15JPnxmUyjUYlCNXdopTgrrtuxw03HMFNNx1CFKldPNI+Je7n+SyOYx3AoYIlhpkdEdLhIh0JEl+SJMGpU6fQbrdRLBb1PVJ8p1qtahsajUYZJ4jhM8Nnhs8uHz7blOmGw2FmoatcLmtlo+daXQweZ4xLbu9j57bbbSwtLWUW3Or1uhaSBDMJOqVSCdPT09rDxjuUOPmhMpRKJUxNTWUAhPnKBa8kAZaWCghDF3FchG1P4StfmcO3vrUxGb2QVCjEqFSU9/zaa5dxyy1HEIZRZlKXAlYA3w8QRSHk0Q5sAzDZAKnIynuqjjpKHWMhbNtCHKtL2VVeVG4lhyRRl+0px1wL1113GpVKZaw4ZTz11C70emW0WkV87GNX48CBFTjOEDMzkZ54sj3SscfJTZIoDy6/lyArQYrKS6Pr9/u674D0gmEqunTyEdzl+fFSTkB6ziwBjxPtfGKZMiqfhicnqCw/T6Jsl3yeERaFQgHVahUzMzOoVCqoVCoa8Hzfx+rqqi6TR36wjZI4y+VyBozY/9wCy/bS2ZpvcxzHmrQJ0hLM2UeO4+jvJ+kgAZjyp6wBaLBlf1JPCXoS+FkWbRqAbq/ECeKBTNQl5sejO6VceP8acYr9JusoExfI+b78XtaTbWdfSKAnyEti48IC5eA4DqamprCwsID5+XnUarVMWeVyGaVSCdVqVW8t5+CA5TDyhQM9mYdJk9NGnOZ5UaZvLxSn2XYVcTyLPXsGKJXcc+K0tbUYoxHwUz8FxHGC++8/u4OpXo8wPa3wJwzd8WJcohfFgHS7M/Uzvag8O/GahBW+Pxrr9iROiy46p23f/hhc14HnFXH06G4MBjVwMS89Z91Cv1/A0aNV/NVf7QcAvOUtCXbv7qLR6J4Tp8l2GE4znGY4zXDaVqet4rQ4Bk6e3ImpqTaq1SHCcF47mhRX2XoyXi67sG3AthO4Lu9viGDbwH/9rw5WV+2ztCpN1WqM97ynhygKxvx14TnthhuWEEWnNKepHUjxReE0x+niyiu/iXZ7HlHkjMtJj+UbDKqQx/ktLdlYXk5e8DxNtsNwmuE0w2mG07Yy5flsdlYtCo9GjbPyWaVyGNdccwInTtyBIMjeo0e+AIAwLMH31dFWxWIfjhOi369j/bGoQBDY+MxnplAuD7BzZ4hPfKI88blzSZVKjHJZ1aHdthEELyyfF54sfPObN8GyIlxzzRP6NCq5HikxXdo97VW+w2AO6jpth3gmdyXJQMSlpSW9plsqldBoNDLOCf5LjJH6YPjM8Jnhs8uDzzZ1MPGeIhq3PK9ResekJxJIDZOKRGXjb1TMfr+vBSIVkw4hRuS1Wi0NPpzA0INcq9VQLpeRJIk+M5LGQqFxAc/3fTSbPn74h+fQbGbPTz3f6aabWnj/+4+O2+sjDKsIglDLgGczKq8mjSLWRJlObLIeTOkt5WKeeg5wnPSoBfVbgiRRhEAj4qW0SaIuXfc8K+PFLha72oP5vvf9A+6++9V46qkrAQC/+7uvxu23P4d//s+/ngEyy7I0OFLmTAQfIHuupvTo0rCYJHjmdUkaX/5Z/k2DpNEC0GBLA5LbaiUpUfdolCxX/p/tot7LSSbJVRKTfI+RA3IHIIFQLjKzTnEca6cfz7Kngff7fTSbTXS7Xb2FVYIwZSu9+fzQTnhmvYyakOQmCZrl0u5ZXxk5AEB7zOk1z0dF8DmWyTrnIxWYP498oWz4bmo/sQZ64sjMzIwG+Xa7rcmDSfaPTNzuyjqyPsViEbVaDaVSSV9+J6MIaJMEXSln6iUjByjjnTt3YnFxEQsLC6jVahldZYSA1GepE9S3Xq+HVquFtbU13HjjjTBp87QRp7luurWfA4ELwWmHDm3HX//1j+Ff/+v/iW3b+ufEab//+wX83u+xJhYmjFHXpV/+5SZuvHEgFsWyF2HKQTm/VwtfFhwHYNQ328K2JoniKeW88S4DTuvgjjv+cnyERgFTUw1UKlV9hvmHPvRKnDgxpeX2uc9dj29/ez/+6I/+EUnib8pp0l6LxaLhNMNphtMMp2152kpOU/ceOTh1ahEf/vA/w8YLben3OXV8AfMtTqAdJEk0Xjz43uO0G274JOI4gWUBhUJxvCBUw6c//TOIonQq/2LmaYbToOVgOM1wmuG0rU8vls9cN8Db3/7v9bxK/RYiitIjU59++o34xjd+BgDw6ld/Bldc8SQ+/OHf2LRen/jEtvH/XrhT6Ed/tIe3va0Py7LwO78zhe98Z/LJChc6UT/lwjgdR1yMJjaTz9ZzWnrHIZ+TGDIYDDI4wHyZd7fb1f3XbDZ1EOZoNEKj0dA7hHg3lFw/NHxm+Mzw2aXPZ5s6mHhWHwB9viMLpWB5vrYUWDpITrSHm99JhZMNoUKQUCSRSCDpdDqI4xjN5jS+/OUfgOO44EWq8oxH2852YBwXEMcugqCEVstCFL34yIFrrz2J668/oevK6GjHcTA7G8L3h1rZRqMhfD/IKATP5Pb97EV5kjQJxuxgBRScXKXgpowtPbcVQKZeqh8TxLEN207GdbXAC97lR5LJ3r33oVI5gYceeg3i2MKTTy7gD/7gtfjFX/wWZmagIwBotEmijiGUfZwfBLAt0kMtwU166fnhO91ud927snz5Dj8A9G64arWKcrms9bHf72snKWVJvYzjWD8nyZj6LMmHQCjzJ0BIjz7rQ2O1beUtbzQaepIqycuy1OWDPHe10VBROb7v6wvvCExy9wPPwpVyJ1mxj2WkAmUqIw/YdubD8zhJEIw0YuQD85UX8REo8wMQtp+JTmv5HIlY9gfxhnnQo18oFLCwsJBZPCCxdDodnDlzRhMqFwfYl/k6sS/pvfc8dZEfZcK8JclL0uB2b0lYtr1+Z+bu3bsxPT2Nqakp7VCQOsj8KG9GJHQ6Hb01nfrr+76ZuJxD2ojTLMsdY2t0wTjtrrt+EIcOXYUosvHxj78ZhUK6BV/ZPQc5WU575hlXLMpleevaa7v4vu9bxUc+sgu+76DRCPH+95/B9u1DDAYBP6TzFwABAABJREFU1MXnnDTEGI18qAhptdBFu1e2EoLR05MW4xSnWpclp6kgDw9xHGAw6I7tuoDbbrsX8/M78fDDN4zLsRDHio+oAxtxWn4x1nCa4TSZDKcZTtuKdD45LQyn8e1v/xLi2EK9fgTXXfcXGA73AChqHTx27BacPHkD4tjGi1lsO5d0551N3H57B/1+qPX8e5XTVDvi8f8jRJEPxwlw221/j4MHb8PKyhUA8KLmaYbTDKcZTjOcdjHT+eCz0aiHKEod9vk52uLit/GGNyzBcRwsLp5BuTzAe9/7F7Dt9F4lyyI3evjYx96aceK/0PSVr5Tx5JMeAAuHDp39nvbznxK8+c2fwq5dh7C6uqqdeVyApzz5f4mn1G1iEDFROjoATMR1pjy+5B1cPK6v1Wrp+5lonypAcErbu+Ezw2eGzy5tPtsUMSXIUBlYaQX0oRYYGyo9p/SwUaGYpxyY8zm5GEQB9vslnDy5J+NB4wB/ZWUG9923A+c+wXHGn/Vp27b++Dg7C0B62V7akfxOHr0H7Nu3hAMHTunFQctSkwTXVYo5HFq6M+gNpWIx7ygKEQTZS8dSL2e6rZcRbWEYQR3PQM+2uszPcZxMvkAi+iEF/6zsrUw71TnhWQWu10/B8wKcOHEAq6tzaDYruP/+Er7v+xZw3XUD7N4daUDSuY6NG4AGeJbDOlJP+H+e8QlM3mrJd2U7AWQiLiT4A9Agy50BpVJJ/0swkYl1od7LqADqJcFAer4JkNyOysv36PmWhJMnMPk+28DvKAtGcnA7MHWFUQAEbW4b5bOSpOTggbLML8Lm5ZfvUylnWYc8aZCMZV+ybiwrL3uJHemCu50hCwK3jEZQA0H1O6NaSJ6UlbLFYWa7KHEkvz1cDm6pK6VSCfV6PSMf1j+Pd2yv3JnCPimXy6hWqygWi6hUKlhYWNDnoHJAJPGP5bGfSa7NZhP9fn/d2ckmnT1txGmcvMRxdF45rdmcg++rSJFnnrkGp06pi8OffnrfC25DoRBgYaEFwMIVV3RwzTWr2LevhtHIxvR0hBtuaCKOI4xGCtPVIlv+qNgEDMJg3XmfUjpoXs9plnX5cxr7uFiMsLh4Ev1+fgv8+vO7J3Ea/5WYZTjNcJrM33Ca4bQLnc4Xp3W782i1rsaRIzchSRw0GtOYnb0PlUoCx1G4HIYWlpZ24ujRa85b/Wdm2iiXfVgWcPr0NMLQhWUl2LWrjauvXsOBAx0MBq6wVcNpktMWF59FpzOPKCqj2VwAYL3geZrhNMNphtMMp13MtBVztFptBY3GmljY9nDgwGN611SlUtH2PRh42L//OZw6tYhu98Udb3jsmItjx16ooyrB/v1n0G5XsLJy9rvfZZqZOY5isQfLAvbtexSVyjJ6vXTHB/mMOEodp0zzu2tov0DWebeuxoLPJF4xf2nP0pbCMNSYSfuMogi1Wi2DM4bPDJ8ZPrt0+WxTpKNxSqCmErKzgyDQ30sBbwTu7DhuG0w93BYcR0XEjUY+4jjB4cML+Nu//cmzNuL5pfVCedvbnsO117YybaMy0mup2sILcvn/BEGQns/NSDUqteOobXa9Xg+9Xk97A2nMBJAoUhfG0pDSSAxkOh+QW1Clcai7OaQBAxjLU01G+F0KJCpCzrJsoVSpYatyI0SRg0qliTe96dP47GffieXlRcSxjT/8wzfgp37qu/jxH39Ky0i+a1mpNxbInoEqlZi/8bIxGqe89E/qlDRKYPK5m1R+evWnpqYy4EtDDYJAgyI9s5altgNLAM3XO0kScZGyIj62tVarZcqybTtD2gQ9tkkmlsHICQDaA00jp5d+NBphdXUVrVZL2xnLpU1xWyf7h+CcJ3PpBZekRF3N9y37jP3AfAn8PHdUEm4UReu2z8ry5ICDMiBAsi3SWcl3Aejol263q99jHrRhDhbYVllnbtWN41hvM02SRHv+y+WyjvSgDOQCgyQwOSAqFAo6AoTRIvV6XQ84pqamdKRCkiRa5xjVQvm12230+339O3UAAOr1utYzk86eNuI03ndAXeL3L5bTHn30h/D44z/wImud5ay5uTbe+957wKN2Vldt/ORPtmDbSg9arXQQmec0Dp4UloaYxGnq95c2p9n2+gFu+hzGGLk5p/FfRqgZTjOcJssznGY4bSvS+eK0gwd/AE899T6db7u9D1/96n/Ae9/77zE9fWL8rYVyuSdKf/GLpq9+9WO46qoTcBwXH/nIG7G83IDrxnj/+x9BpRJrPjOctjGnXXPNt7Fr11F88Ys/qfvkhczTDKcZTjOcZjjtYqatnaOl97+MRqOMXbO/q9UIH/jAR/Hxj78DDz5481aKQqQEjhPjl3/58/j616/Fpz71yvH3Kig+m6zMewDwqld9HHv3PjLmIRu9XhqYQVwmDkluJa7mdy5RRnyO+A6kO8nIAXxOvic5W+KrXHeV9srv5d+GzwyfGT67tPlsUwfT3Nyc9rQxskt2BhteKpXWbcNjx7HRtm1rZ1SSqCMZGIGQJBV8+cv/DmFYhiWih4Pg/J9P6roxfuZnHkSppIwjCAJUqz2cOaMAQno0JUGxU3lkgfoOY2VK9CQjSdQZlQq4QwwGQx0lIDvQcWzwKCTb9pAkyIB06mlNkCTp1sW8Q0/JOwEvnbUsNWHhxIb9AfCIBgelUnms8CSBNCohTy6SHN7ylvtw5Mge3HOPItl/+Id9eOihWfzrf/1Z2HZKFjRI9j8T82H96QGml5VG6DiO3hoLKP2SRiM9zZwkSoCm15We4Fqtprf+sV85mSShdDodHS0TBME6L61lWdoDXCqVsLCwoM9UBdIIAdlGypyfdEKaZHRNyiaKInQ6HfR6PQ209IQThFIdclCtVjNea/ZdOsFNMoMIOWmWE3TZTtYnH5ki5e66riZVgg/lxYELwZ6ESlCTZbMutm1nSFtu9SRQ82+2wXEcfcmgHNy4rot6va6365KI2ffEMba5VqvpPpKyo1ypd7zLDQCmpqb0O/nLEeVgjFEIlUolcw4vP1EUodvtYnV1FYPBQMs0r9/sJw4wJBkxL5POnjbitCiyoI6fUYPF88Vpnc62zapzTunWW5/DLbccxmAwRBSFsKwA7XYbpVIRvFtIHZuTHgUgI2YMp23MaeQWmdhex9mc0yT2S34xnGY4zXCa4bStSueL02x78gkPnU4Htt2eyGkvf/k/Ynb2JFzXRalUhuumR5zwuCFVrjfmBQe2bY13+AQYDIYolzsYDtVFyW9601dh28XxHXkRosgynHaOnFavN/Gud/0dKpUKjh3b8YLmaYbTDKcZTjOcdjHTVs3RiHHUV+KNbdtotVoCjzycPLmIXq980WTyspcdx0/8xNfRaAzw+tc/jquvPoH/8l/egShysH37Mn76p/8Bvu/jiSeuxuc/nwY0lstN/MAP/P/QaKwgCAKNf3QgJEmSCagg90l85DNyAZ42JHc1ycV0AOtsvVqtamzirinp2NhojlapVLRt0ykj73Y3fGb4zPDZpclnmzqY0omCnVGUfOJ3eWFRcSRwyBRFEVqtnVhevhVra7sRRYV1eW+UCoUhdu16JqMAnMi4bupZo6dzNBqOI9oCVKun4XnKiFw3RhBEGI3Wb5+Tiq2OXkiQJDKiLV5nROoTj72qwRi81eRAtV9FoCVJuk1TGVrWyJRx0bufejD5YR3k9ylwpBMr/ptOuux1efH4Bkkoqn9iAMp4HcdBo9HEtm0erruujqef3ot2u4hDh6bwpS9dheuuO4zZ2ZY2UraFOkB50iMst6KyLXIrIQGYAFsul7Wh8aIzgqd0WpFkpJFQ1yjX0WiEdruNTqeDlZUVrK6uotPpYG1tTfdn3psr/5VRAiyHv9M7TdnSSyx1X/5NIOVlwvSKy+2YHFDlAY8gxGgIaackCrZnOBzqciUhSHCVOmRZ2agVaQ/8W26h5OWDUtaUF8kzDMNMBIW0GemNlwMZuU05L3cAmUEF6ygHjJLoWCeJa7Jd1DXWT4IodU5GCEiizi94SAwpFov6oj0muQU8CAIdOcu+II5y66yMqmA/c0GEg7/9+/fDpM3T2TiN/30xnKY+PprNbQjD6oRaJNi167twXV+XJfUnz2kve1kX+/ZZCEMPUWQhDG2MRg1dPvVG8VOieUnWmclwWpbTOGjK9E6iLrHk2GkjTuPE1fNccRGx4TTDaYbTDKdtXTpfnCbgLZPShaT1nFapdFCvr8F1HXheYeyIsbX+O46DYrGkbdrzvMyijuK0BnivRq3mA/BhWRb6fQeG054fp5XLp1CtNlAup0cotdtFHD9eQbdrOM1wmuE0w2mXdtqqORqDFqirhUJBl83Fa7VA7CIMHezY8QjCsIuDB29FdpfQC0+NRhvXXPP0uJ5p+3k6BY983bNnBQsLy4iiGI7Tw/R0CzfddC+iyMLsbAuNxjHEcYxdu4AbbihA7ZSN4XkdTE8fBxBD3etna70lT+cX96X8+L103tEOJ2ER32W/5HGAeXCRX/ICMYb4FMfxOHClpB1ExELp8OKzhs8Mnxk+g877UuCzTR1MVAoKG4DuMHaK7BwpRDkwJmhJZbdtG2FYxOnTN+Hhhz8wsXzLimHb4cTfarU13HbbVzKXV83Pz+vtiUooquxer4+1tVUMBkMMhwP0ellPZzpBUZF1rLckHNt2Mh3IjhqNRuMBPuusot4IglLBUrkAlpXAtjkRAtTkJLsVjnIaV0ErEvOkQciJC9uinidAZS9w42KkOjpC/Z/KlE7KstsP41htoZ2aOoPXv76DI0d2IAxdDAYFfPjDd+ADH1jGjTeeyBgc9Yb1osLLMyFZ3vLysgbfdrudATTPU5ed1et17NixI7NNVAIBIykIUr7vw3EcDXJBoCaSvV4Pa2traLfbWFpaQqvVQq/XQ7vd1oAiQVSSmATmMAzheR5qtZr+fpInWQKTBCLWOwgCrKysaG88QZz9EYahBqGpqSlNYJ7nae899YXgsLq6qvPLg2e9Xtf1lefN5+1ho+gDggxBXl7+yDqwfnLAMBqN9Nmw7He+Rztj+cQSeUYo+4a65XleJopJLjQUCoUMkMqBExeV+Z3EJ3l5H88oTRdDVH8z8kcOHCTQy0iAdICjznXmmayj0QidTkfrLbfn81mCehRFGuQdx9F9woERSbbb7eK2226biJUmpWkjTrMsRuikg5MXwmkAEIYW+v3sMQW27QtcTnDddfegXG7rPHjBt+O4G3Da7KacxkvPDac9f07zfT+jI3Eco91uo1hMcYr1lpw2GqmAmEKhqM/ANpxmOM1wmuG0rUzni9Ny63A6hWEBo1EBo9F6TnOcVLc54WS+Kaf1M5xWqVR0ZKXhNMNphtMMpxlOM4lpK+Zo0v75frlcRr1e188Qj9R7K3jZyw5j+/bdOHr0eoRhAdyxerZk2zEch4v2NuI4fW9ubgVvecvnM86Bet2G57laBuSJIIDWo8FggB/6ob/T9et2FWbMzz+DN77xEc0Jqp3pMbFycX2S3IkBeQcK7Uou+ktuk9gpbSzPZxJ/+S7bKJ1+3BUUx7HeqUFspQx838/sEGG9DJ8ZPjN8dmnw2aYOpm63m1E2OZgmwLBinCzICQaVjJWU4BZFMe655zfRau3dsPz5+WO4/fZPZ97jwN+2E/h+BMuCHmQXCh6CwMdg0Ifrphd8FQoFTE/PoFwejQU9GgNZqDuJdSaJAVmCUaCS9ZAHgTSQdMKQAuQQ8vzv9RMY/qbOl1WedHWGeBTF2rCkTFV52cvlqLRyu68sSymKA97RofKKAUQIgngdqcg2AsgYf/63vCy4lZTlM/rAdV19FiS37NEQR6MRlpaWNNgTcKlrBKepqSm02200Gg0djUDd5NZntj8MQw0IpVJJGwW3NnI7KM/t5Ltq4pntZ+oyiYIfbmVkHtIWSEYEMJKDJHn2r2wngT+OY/1dqn9uBnySJNEeaG4HJQC0Wi00m0090ZT59ft9DcCdTkf3MT9sP3Wbv0s7lFEK/J02nvfmywGABEFJjFL3KAf2LcmLBCzLAqAXLjjYoI6z70gorJcE+iiKMiQso1ySJMlM1Pkez39m3X3fzwyo2KeynSxPnmnb6XQy5Cjti+2X9uz7vr6ImvmsrKyg1+uh3+9PtEuTsmljTkttn2T//DlN5XX8+Pfj4Yd/HmFYAQAUi6u4+eb/hkqlDHWknQfbHuk8lT0pPHac2HAaLi6ncVAdx6Nx/03mtOFQ9W+j0cDcHAynGU4znGY4bcvT+eI0Ok7y6atf/U3NIes5zUUUubBtD8jt4DGcZjjNcJrhNMNphtOeT9qKORqfZb+y33zfh22nx8cB6Y6HMAxRrT6H97//3+DTn/4XWFrad07tuf7603jLW56E74/w1a++DA89lL535MgV+M//+V/ov4vFIX7xF/8ArtuFZVlaxyXOcWFdYiG/H41G6Pf7GacE28i2yDZHUaQXn/PykDKVzgBpC9wdkncIsp8k1wHp7jE6GADo3yetLUq8SZL01Am5qK7kZvjM8JnhM8rkUuGzTR1M9MSyICpSHqjlWYyywvReAtCeaaWcO3Do0DvR7W5HHBdhWSF27vwMbDs9tzpJgFqtiyBoaoMm4KoOVNVXih0gimI0mzb6/cG4o6g4apspz75WA3x6qd11F4DJ7WJB4EPdbZGeqa3qR8VPj0gAsl5YBa4FLSvlsecEh/VPdFt4+atSEvVJEjXBiKIU6JUc1KWw/JtAXq/XNanKugMYtzklARpAHCewLIz/tYRhk9CRARwlwwB33vkEnnlmF44cUeexf/vbN2IwmMeP/MjhDGhIMmA/chfc8vIy+v0++v0+jh49ina7rZWYesA6DQYDNJtNnDx5UoNoHkjzhkH5yGgL6gC9y66rdgyQCCWYy3dTnUg98wSEMAxRqVRQrVaxsLCQ8UbLMj3P08BF+6C8WW96xfk3v6M8JLDato1+v49Wq4W1tTV9VmoURWi1WvpSNhIzHcODwUCXnQcp6ZVnXtQhuXNxUpIEwIEa5U4ZMm+SlbQ9Ekze8z8cDrVe0/6Zx3A41MTK7c2Oo6KRJEnJdjEyw7btzHP5Nkj9kWeYSjthRIOc2Ms2O44D3/c1qDOyiANgtl1GB4VhmDl/VQ2Uhto2WMd2u637PR+xatLktBGn8SgCORh4PpzGPjx8+KewtHQzeIzQ9PR3MDPzOIAefH+IJPFgWcUMp/G8aDmhMZy2dZwmF/kAYDgs4hOfeAPe+MZnsG9fM7MtXXKa71f1+4bTDKcZTjOcdjHS+eK0SQs8gIUoyt49ITltMHAQhp7GB8Nplyan2baNRqMBy0qjkg2nGU4znGY47VJLF3qOll/Ep/4EQYBOp4PRaIRqtSr0xcVwuA2VSg+FwhBJEuG2276AXq8BwMa9974dvr/x/UynTtVx110HEEURTpyYyfwWxzZGo5L+OwxdfPGLb8PLXnYvduw4hOFwqH+TOCvXYanTlAudY1KH5eI3251fvE95Deu+p03Kd11X8fL09LTuB7VTOHUQEF8lxrJOcvcEF7YBZBwucqcQHTXcQeJ5nu73c+GzbjfGkSMxHOcUfL9n+AyGzwyfXVg+29TBRIFIRWDnSSFKpcwDWdo4oNlcQJIkaDavxLPPvm2s4AMUi2tYWPgGbHuYEZZqYClTBgUNpBFxcZx2DA3DdWns0BMF1QYLtVpdGykNlZMLGqHcskqFl0411bEyagy5vy24rtyKmiCO0ygMFd2GsQxTQOeEJS9DJoKYmsxwe7DamletVuB56szG0cgZ15lGlJ7jyMlWFMVjw0n0JEcdxQAkiQXHwXjiZ2cADwCuv/4wwtBFs1lHq1XGd797AEEwhze9aYT5+T6KRayLICFQkihOnTqFbreLbreL06dPYzgcZqIaKEt5oRq3C/JDsGe/8/8ERRqSBE/qmOOoO06mpqY0UA+HQ3S7XXQ6Ha0DBCGpC5zgslwe/bewsJCpD+vMuvA9fs88WR+WRVtin0n5SW98p9PB6uoqTp48iWazqXWFz1H2jPCwbTszaKENs3w5mJAExwkn6y6BU+lLuu1WbtVl/sQTOQnOT6IJrLLfWB8pD4k77IsgCPQgh2TA9rAstpP9Ztu2XrjI2zrbQ5KQuCMHqradns3L/PmM7AuSLpMcBMvn+Q71ulQq6Tox8obRC61WC+12GzwaxaSzp404LYrSexHyA62NOI19oojXQ7e7gMOHvx+DwS4ACYrFFUxPP4bZ2QcQxxZGo/ScZMNplw6nOY6Per2FbreOJLHh+x7uuedmXH11hF27TmJhIXucA3G6VCqNdSq9jNhwmuE0w2nIPG847cKm88VpcuFtcjKcdrlwWpKrnGVZqFQqcN1EY4ThNMNphtMMp11q6ULN0WiX0iYkVtIuuJhaq9XGz7oIgkVYVhOe10WSDHD11d8Z5+ngmWduxWCg5g7t9hyQu59pebmG5eUaziXFsY0jR67Czp3fxeJi6pCQi85KLjaSpChklO5+LRSyxwPSBqWjQMpJ2pVMeR7Ny0viMhfZeTyaxB/p9EgS5TCkjKUDBcguyrNfubANqOPO6ESjU+lc+azbHeLo0QiFwmn4ftvwmeEzw2cXmM82dTBxokCFzVeSFePxBRS0VABWrtudxl13/TckiZMpY27uYezf/3djpU8jrwgqg8EgI2xARXnZto0gYMepegyHw5xBY6wU6dmgSlkUoLiuio7jYJ3EYNsWXFddTss2SqBX9VNRbvRaFosFOI4SZximF2nJSDuVN8b/UgnTs1FVO1SkmpJfWh+2i5McJpKNZdkYjXyEYSQUyMrIQQI5ZUxvJhAhisJxmYl+Xxo9gcNxbAyHDm6++Vlcf/0y/viP34g4tnDw4BR+5Vd+AH/4h3fhwIG+NkR6T3kBnO+r80iPHz+uvaPsZ8/zEEWRNp582TRYaZA0Ngn2Sm62Bp+0710NFjQkns1aKilnJvu42+1qIpT6Jwc9kuwIBnyHMic48nuCiDyrU24Zlv3D+gOK8HkGK+V65MgRrK2tYXV1NeNNJqjKOrDuJD5ZTh6M+bcE4Tx4EyyJAfkFAOYrMUGSbT5JsM1HLRBE5Y449ifbx76lDskLG6k3lAfr47qu1i3Kmm2Qgzq2jXgoByJycCTLkr8Tn6RM8uQiowmYnyTYOI7R7/f1Ob6nT59Gs9kEIzRMOnvaiNNSfUzOmdOI7wDQbF6DBx/8A6SY7eNlL/svcJyBXogynHZpctri4mG8/e3H8IlP/KQ+UgEA/uzPbsU3v7kPv/u738gMwGmrvq94Znl5BUePHjWcZjgN+WQ4zXDahU7ni9Pyi0/5ZDjt8uG0PO4yH9Ylv6hkOM1wmuE0w2mXQrpQczT2Y96Rwt9oC2EYotvtirxVmb3eIqJoFnNzhyEXpN/97v88XkCfxYc+9O8Rx866/M81FQoj/OIv/nd43ghxXM3sCGIdVTtLaDZfhkKhCNd1EEUW+v0AjrOCUulQxs4pU7mYz3ZKHJAOLL4jcUDiE2Vt27Z2KlH2eTsmpkpZy91HxP08ZrGu5GbXddHr9XQ9yQPnymfz84fheYbPDJ9lk+GzC8dnmzqYPM/TBcnCaVBsDCvJBlGY9KCdOPHDOHnyh8fOpZQkrrjib1CrHcmAVvovjx5IBWFZ1tiDrS5Dr9VqGnzCMNKGkveASuGx05TCqcmBmrhkzzFkG6RXVyp6HMfakz09Pa0nU6p86EkXz6MMwwA8/iEFqXTCIr2vrkv58hgHXiRmjxWaxyXwvFR1TIRlAekZ42mEnXqXRzzkPY6MHrRQLJZQLqsIEQl4QLpNURkIf1cXAKdJvTMa+fqyN7l9kpezcWJHYHJdtV2UQJA/d7ZQKOjto1JOlCXrSYXPRwyQyKi3MvLBsiwN6NxKSkNXMlFgwWgW6tdgMND58RzOMAyxurqqCYbn4ea3t/J9Riuw3nyGnnH2FckzCAKsra3pHWDc2itBQgKlJPzhcJgBIjkYSyfV6QWJ7Gu+w3azPvnLAFPdT4lAAisJmYTIOvB5RoawrZIc8+TBj+/76Ha7+mxR9gcHIhx4FgoF1Go1XV9J/OwPSWRSn/g3t4PyEsZKpaLlQx3ne1IH8wAsZUlSYh05QCFxRlGEbreLM2fO4MyZM9qmms0mer0eut2umbA8z7QxpynbLhQKqNfrZ+U09je5S10gnu9rw2mXC6dZloXXvOYLePbZ63D48NXsQRw5Use//bevxi/+4r2Ymspy2tJSHQBw+vQpVCrPGU4znGY4zXDalqfzxWnD4SiTb6l0Ajt2/J3AnRiO4xtOuww4TeoC5dBud1Aq2XrBSs3VRobTDKfp/A2nQcuFcjKctrXpQs3RJi1eA9D6IZ0vxCzFVUPUaocQRbsQRVPo9a5GvX4Ktj0YB0Ao3nTdId71rv+mAxDiOMY99/wMOp2Fc2r31Vc/ile84msA+gDSY8NkwISytRkEwQIWFhb1M6pdQBw3EIaziOMmwpA7DyLY9hIcp6dllV/YJk/y2C3ahMQOylHKmniVdw4A0HYi5Z1/xrIslMtl1Gq1DA/wN9aXOEi+I+awzszT8JnhM8Nnlw6fbepgYgQBDYWdLxWF31O5KET+rTpjEc3my3W+jtNHpXIYjcbTKBSa4EBZet8UGKnjCrJKk3a6ugzVQZIAjoOMsJkIRio/IIrSc1x5pqv6PQVEWRcCRF6RVTucTGcxP8tKZRQEARzHge87sG3lNU5BiueNxxnQimOeI8soAoJ39jJZyoROOxVVF48nKQkYZZckzhiYVVkS6JWzSMmCRsLfFLkmesKj5M4JpA/LshGGQ+REDt/39RmdvGAuSRKtD9QTtkWCsNx+SjKRhENgobzySQIBE9vDckii1F/pSVZyzJ6bTqCgbAhasu9lFAH7fTAYoN1uaxK3LEsffdHr9dDpdHSZBJw4jvXRFNKbTZmtrKzoyINWqyV02RZ6lV7UmCe7rN5A581nJbhR7yVhbPbZiBikvchtxrI+PNtWtoMLDNwBR0JjGyX2yHrzNzX4c3UEgCQmtpd4JeUh6y91SRLQpO/lgJbyyMtEto8RKKVSSW/xTiNvfN22U6dO4cyZM1hZWdEEz23dcvBh0tnTRpzmeewfR0enbMZpABDHBTSbtwGw0etdua4sw2mXF6fNzh5Dt1vFaKS29a+szKHfL+M735nD/fcvYs+ePmq1k5rTul21QDgYDNHr9QynGU4znGY4bcvTi+W0IIhw5syN6PW26zzL5aOo1Z5Cvf5UJpgQQMY2DKddmpzmum3Mzx/E6upexLGLIHDw8MM7sG/fEmZm+jrvwWCQmacZTjOcZjjNcNrFTOdzjgZgIsbIRdk8n0ncVh/AdbuIohHiOIbaUWvDttNdM0mSwPNi7NnztNazMAyxe/ej6HTmM/nJeg0GVSwv78OOHU9jz57HsXPnU4giC45T0AvGfDZtRwlRVIPrruezOHYBFOC6HsJwCMvyAQSwrBaAXsY25Ic6TeyU67wbyTDl+hSHJbez39gn8n2WQayh8yVJUsci60Q9sCxLcwJtkxhl+MzwmeGzS4/PNnUwlUol7SWThsDEzFlxCoGNz2+9S/M9iSuv/B+ZQTIH0BSQyseC57mZbYsAz2j19Za01NOo3lEGmr38KorUEXxKOQioAMS51tzmaVncjphudVUkwolFtoNplOxEArPneahWq2MQU5MXFQ0Xjeuh8kqSNCIgVZpkDEj0LqrnePwBwZtt5cWwTErZWM9I56PapSZBSu5q0kLPpzJAtg1jeQJJYmXAHhiOlVKdDS7TYDBAr9fLAHpeZ2zbzni/KWNOniTZU5byOwKRBCM54JBgR6MmOBOYHcdZR2D8jv3IZyVQMC/5vawHPf08w5VONbYljmMdZSGJmUbe7XYzgMbtrEEQoN1uZ2yRZaY2kxKabLMkSxmhIvuFnns+MylNilRhuzgYklEQkjAlyEsntexfWQ7lSt2IYxUxI4E0TxysX5IkGf2S0SkSlPNlk4il7tm2raMS6PXnOyyLGBgEAYbDobZTtpu6TR3mb3LrLfWGgyRGz/AyRV7YxzOlS6VSZkBg0tnTxpxGsk8y/bkRp8VxgtFoCo899p+wnkbVopskesNplwen7dr1OPbvfwbFYhFf+cr348iRKwBY+LM/eyVuvdXBu971qOa0NMLScJrhNMNphtMuTnoxnBZFMYLAwZNP/n8RBDNQWJZgcfGzmJp6JDORB8w8jfld6pzWaBzBbbcdw113/ROMRjX0+yX88R+/HT/1U5/AzTc/A9/34bouhsPswpvhNMNphtMMp13MdP7maNkjSoGsQym/GMtFZS48F4vFjJNF/dbH3Fx7jBUppzE/2jD15847/0KXm3fYhGGIY8dehs985tfwxjf+BaamljAYWBmnEvNjW9VO3uR585nnuQCyzhPKR2JakiSZXQ3UWeo420a7lzuIWGfqOOXC9uf7QXJZsVjU77D/WFfilGVZmWs2uLPD8zzDZ4bPDJ9dgny2qYNp3759+nLPdrutwXQwGGQ6g423LGvdmYjS6PIpjuOMMcmJhuOk29goFPVOAt8P4PuBrgcgjyNKverMa/zEuDM4wVFbSiXQEDzksQWyc6VAeTkt28kJAKPiWAcaeRRFCAIfQcCLbuUFX9mICQI636Ms+ByVk0pL5XJdL9MeSaaOk5bhup74f3ayQ3BPkgTD4Wj8vpqI0nD4u5pIZS9uA4DTp08jSVa0DKQhcIvqaDTSvwFqUEGA6PV6uow4To+/kCQuIy0kkbANErjL5TLm5+dRq9VQrVY1KJIsuX2SE2R5Nin1m4QdhiH6/b7eWmvbNqrVaib6YWlpCcPhEK1WC61WSxs9AJ0vzwaWXnH2F/OlfrXbbS2nSqWS2RrJi/MI6FKH5MIA/2Ud+L6UlWVZmUgSRkXIOrmuq0lCgrWM+JSgL0mUcs8TiSTAVLdoZ+nChWVZqNfruhypjyRLWVfqA+1TRqfIz6QIXVkv6pB8rt1uZ56TgCv1WgK8rLNlWVheXkaxWNTY0u/3NbhzgDAcDnUelUoFcRzr5+M4RrlczvStSZunjTmtP8aRcIylm3PaiRM/iZMnfxzA+vO25+cfxhVXfA7VqgXLSs9XNpx2eXAao+/zATJyEkE5JQng++pIDsNphtOYDKcZTtuq9GI47cyZO/D007+KIGgAAFy3jf37/wCe19Y4ZeZplyenJQnw6ld/CIcP34HnnnslAOCTn3wTHnjgGvzIj/xFBq8MpxlOM5xmOO1SSOdrjiZtizrCfieOSJ0lhlEnaCuep+4LKpVOIElO4vjxdNEcwISAAWudvrNu8jkAmJl5HO9+97+C53UwHKa8NRwO4XkeSqWS1tckAdbWrkAcl2FZg+fFZ44TwLZT5wE/sp7EKb5LWfAjbYROnn6/n9kFk8dHfgdA95XEQCC9CxFQNtrv9zUucBeJxBw6OhhoIjGOyfCZ4TPDZxefzzZ1MFFIbLBM0qsuhZ1/ZqNEI5TeY76vJhb0GkMLk+VNyt+yeHRCujWP76UAvb6essNlnvxbemdVW+PxBMKDba/fTiuVnp7ZlPSCDAlyQYuRcenkK62HAlc5AcmC3rgVCMN0W6Oqh6VlkZ3A8czyNCJOtp2Gn8qFQJ091oLfTerjMAwyMpB9RwcTL56TAMc6SCKm7vE5bumjzPP/l/IheNdqNUxNTaFcLqNUKul+jWO1XbBcLme8w4yMGI1GGtB4Jion3CyPMiMwS4CmI41bVfkdSVNeyicJLD9JloMT5s//521jM92Wz0lQIihndSrVQQmAsmx+WDcJ9LJf2c+yDrJ8OcGWz7Cc/KCE7zG6Ih8lwbykc5N9SeCWspG2KxPlmScr27b15ZBsC/Vd9gn7lEQjI60sSy06EMjzZ/3m0yR5Uu9MOve0EaclCTK6dTZOq1QOYXb26zh16l2QkXWLiw9gdvYpVCoduG66Hdtw2uXDaZYVw3XXR+Zw4sZ2cnIRBL7mNcNphtMMpxlO28r0YjgtjssYDreJ3GIUCquwrFSHzDzt8uQ0ywJKpQ5cNw0C7PWq6HSqGU6SsjecZjhN1sFwmuG0rU7na44m9VTqm9wdIo/M4rP5OUGKdUongmD9gnBel6XtMuWfVzY1QrU6RJLYIJxKO+Rz6pNArevHcJx0l89GfOY4Hdh2F5YVIgy7sKx0V610MEmbouyIhZSbXODP27+0IdaHecrfpZzoDGN9ZcCErJvczcT68flCobAOK2Wehs8Mnxk+u7h8tqmDqdvtao+WbKTjOFrAUpjScCV45QmAA3R5jqT08qntjOngOW1gOrC2LMCybACSDJARhgRSlqvetfXf7FBV1/VOMfk3L3tVZ2araDPZfgmqBAB6kWUnK+VIATUlIU441hMpj1fgJbXpMRVKFmEYwbIAng1r21kQVJfQS+NPlYff552GVPIoUsCgjrPIE0yMQmEI3y8hSRRgDIclDAY2LKufOfM0SdQlc5SHLF/mKQ1LemIty9JAzUTAJnBIwK5UKiiXy2g0Gmg0GlrfSAiA8swzEoBbfamPrBe3G7KOxWIRg8EgJ99IEwH7WkYkkBT5L0EqLwPHcVAsFrVnWupRHMe6/nxW2hplyrykDuVtIq+z/Eh7pa1JIpYDCNlnsu/ktl0CXP4ZGcmRtzlZN+ZFfJBROhxA5Ekhj0F8jt+XSqXMAFUSpBwUSbkyAoB9LM8Jllu4qSfMm1tcJ2GiXNRgBA+gFmBKpZLWQdaL8peDivUYYtJmaSNOI+7Kft+M06amvoNicRmnTr0DgA3LiuA4Q+zadTdqtTXDaZc1p6UDU5lop8Qc2iwH9IbTDKcZTjOcttXpxXDaJJwDkMnHzNMub05znBCeN0QQFAFYiGMHo1ENjtNBHAeZhQrDaYbT5DOG0wynbXU6X3M0mfg77ZZOALl4zMQ8JddRL2W/yudZR+qhdFjI92R9+Bwwmc/oGCAWKHyn/tmwLHdcD+ad8pnvByiXT6FQWIJyMGWPD6QMpWwAaBmybuR9efwt7R1IsUc6oPJ4K8vLvy/Lkn2ZBnuEGV7hO7RjiV/SoWL4zPCZ4bOLz2ebOphOnjypCykWi7qyBH4WQhCUYCKFmK8EhZ1vjOpYG4CbeS+/nZJnczNyPO1cRysHB/txHI87Iq2D9GJKD7kqQ26TSydUcSy33doYDgdaCYOAR+lkPZOWZWM0Go4NPBbgpuqqJhncnptVLlUnZKLg4lgplSJIF6VSGZaFMREF6PV6Y6VMLzikkcj/uy4jEZSiEAD5nDQOLqiNRsPMNlROYFy3j7e85c9x330/jJMnDwAA/uZv/l/Yv/9uvPKVf6rPbZSg6DjqDEgmCVKTwIP/53ulUkmDeVo/VY6MHpidnUW9Xkej0dC6S+8rZUHioNxJRHKrcBRFWF1d1eDMLYSsE7exUja8f6rdbmcGCEwy0oGGSz3kVliepcrti3kP86QBkfw/Ada2be3NlvrF9nMbNvtWnlUqB0sSDAlS3PIqIx0kYUq7pv4RBPODL+aRtR9Le9lT3c1GyUgPPsuVMpUkwDwoExK0JFVJvAThFENS+U8aODEKRLZLtpO/kywYTUJ9k+et0h6SJN2OLGUiB9D5AbJJG6eNOI3H8RDDz8Zpx479JI4f/xnwiLxa7RhuuOHPUKm4cJx0MGo47fLjNPZJ3qaGwyHOnDmjuabXU7/7vuIFw2mG06TdGU4znLYV6cVwWj6pvnDhOFlsMvO0y5fTrr76Aeze/Qy+8pWfR5I4OHlyJ/7oj34d73nPb6NWO4zhcGg4zXCa4TQYTrsU0vmaowHQ9sv+KpVKGgekvtu2PeF41RRH6byQTgi+S0x2XVcvdEs7ZqKecw2V9ZOLy0B2cVjuOLFtG+Xy4+PyXASBn1vkteB5FopFG0APo1GAfj/MLNizrtJJID+sk7TfOFYnD1UqFRSLRVSrVQBpwF2z2dTOCL7PPChHfiT+0tEndzNRNoPBAMPhEIPBQDu3KG/KXu3ScjJ1pI0aPjN8Zvjs4vPZpg4mdjYbJg1OTQzSLV9SESWYEXRkSpKsBx3gcQtpQxgdpxoRQ3nspVczgW1nt5xJAOf76b8xVHEqEiAvGKV8WY+cbcdi4qAi09KoPTX5UJFpamKjJkfJeKEKsKwI6iLALHgz6s+yOHFRZ5bL7at8zrI4oaERu7BtEoYqn5Megl928pM9eoFt4WJakmQvbnNdTl6yW03juAClWxaoY2xTkgyQJOlFjFHkIUmUwrJNzItKTm8s20pg43OSCEkwkkxImhIc5LZnnq+bj7rkvwTtIAj0M5I0SHAyKoLlMdqRdWm32xockkSdITsYDPREkjopB0OUtyQ2tp0AwPrJSAIJuHJgIIGe/Uz5Se+7JH/P8/Q2XdZlkr5Icmc+UldlBAplkgeifHuz9pDqU15vZZ05CJQDAuoMsYhlyOgTfs+/aSOMEKJs5fsAMiS3kZwntUvmL9vJ32RfyzypG1K/WZYcFLIfgJRY8+Rq0uS0MaelesPt8JtxWpK4SJKiznc4nMPBg+/CVVfdhVKpC8Bw2uXNadnxCcuRnCZxzXAatCwMpxlOM5y2denFcNqklOpBsg4HDaddfpyWJCGA1JkF2AhDC6NRgHI5MpxmOE3ovOE0w2kXN52/Odr6xXD2t8xf2hqT7H/aKPvZcRxdZhzHGUeXLJM6zWf5e75eXORnku3hYjyT4yRj/bIQhsmYz9Lnk4Q6HSCK0gV4aacSa4mTrL/ER1lXz/P0xgLpnEqSRB/jRtuT5TEvylTWg7YjF/DlGEHiaB5/AIUpfEcuyhs+M3zGPPLtNXy2tXy2qYNJVp4eWCl0pjTiK62EBGUZ/cYkAV81OkSSYDyIp5Gv95oxbzWRcIQRRXpCoqLr0ohkgl96dEIqXCn4vKLlySCdTKh/lTxs2HaUIQ/LSo1ZOljY7vVytmFZWS9x2uTs5WAEAv4mlUwCoZwUZuWWGpFlZbeGqvdo6KxHSnJpnonuK/UcJ4ppchy15RJIt8FGUbTOkzoJtKhPqcyzkSLSYHkOKyMMCGC1Wk1HXEgdyoPuaDTS3l/WcTgc6g+BnvpPW5ADk3w7GEExHA4z5eUJmBNNRrzIesmoBZbHfmL72S7WS5KZ9LzLbbYEM5IhoyCYsoOVrO7nQTlfZ5mkXsr/5wcdUrdpJ3mSkQSUz09uGeUAifZO+cr6y36inPMDQ2lrsv/yMpK6LvOWNijzzRO1HBTJgRIJQWItbVDqv2VZqFar+jxfk86eNuI0UiEvSz0bp1UqLdRqR9Dt7gVgIQhqOHPmViwsPI04PoFisWU47SXOab6veKNQKGieMZxmOM1wmuG0rUwvhtPkPC6bX6Lx0szTmO/ly2mTkm2nCyKA4TTDaYbTpI4bTrs46XzN0fL9KHWZv6W7XdXz0sGQ5zO5iCz1hnnk6zMJLybxmfzestKj82jvkqPDsIg4VncYOk6Q0X2+myTJuh1Xsv1SzrKNKS9mnQ9s46T7iygPyi+PL8QxadfShoiv+XqSR+Q9S/k+ZFmy/YbPDJ8ZPrt0+GxTBxO3SLFSkzxZeePmZIKKGIYhTp2qZPJVSq4UkOAVhqnXzXFi1GppA1R0WXq+pFSAtB5ZpeHxAvREA0AYBojj9edFshPy5ypKL38+lUpFcGJULBY1GEjl4WRMGg9l6LqOvgAsrxhKxukEKZ1kpI4ay7LGW0QJxGk0PcvJJ9Wm1OPNKDf2oboILxvpwfqWSiURLaImLioKMDW+rHxKWFxc1NsZ035XZDIYDDRRU3covzAMdbn8l1sqK5WK3tJXKBRQr9f19r5qtZrZckkPOfOkbgRBANu2dfRCq9Vat0VVevH5Ps815fZm2kLeoGmwjErIgyH7p1gs6rpL7zMjGRhBwPccx9HbdPme53mZraJygEByLJfLugzqGz3QHKhNAiUpM34nwYdtoU5SP9hv1EF5rIcErvwAhvWQgz6+x/wI5syHzyWJut+LeQNArVbT27Bl2XJbLOUo8SBPAhKgaRsykVDlwFTmxTrmSUaehUo9ZT8S1B3H0dEedM4ycqBcLmN6elrbg0lnTxtxGiOQeX/E2ThtaupL2LPnAXz5yx8E754DLDz22I9hx477cc01/2A47SXOacXiHABgfn4O27ZtM5xmOM1wmuG0LU8vhtPydwsAGOtWNMYcM097KXCa5623pdnZOWzbphapDKcZTjOcZjjtUkjna45GXOCz7JNCoaB1Re4iUPO6KZTLZdRqtcwiPzkg7zSQi8JcXKaelUoldLtdjVmSz4gjygnia10lFkg8ScsCut1rkCRFuG6M+flnMthH3JM7hMlT1FHP87T9UZdZj/wicp5zicfD4VDzNReY+bzUb4lvebnJhe3hcJhxhIRhqPu0Vqtp+eSdANQPQB23x6N8DZ8ZPgMMn10KfLbpr9Lg2LGsJCvP7yl86d0kmHE7P5Nt22MldzIdLJM6rkAdr1AoOONy03M+JaiTdNTzrIebeY5Kr9qVXtDG36SSs0Okxzjv3XRdB47jwrYtRFGogV1OYPJtovxUSsFcpSxQqMeoICxXRS6wP1LQVsbGiSAVU+aZ9qODQsEbl21n2qaeSTIGLj2fqg4W4lgdlZHKRB0dke9jKiv/ZhQBAA2mcqIoSaZSqWiwnpqa0pEBjJZg+3lGr23belIrQSAIAgwGA72lkf0qz3jtdrva+y/PxpUAxsgC9rOchEowkCTp+76O+shPTLkNnAAh+06ezSn72bLS7bhymzjtkc8ReFhOtVrVXnXLsjJyl0DOvKSuSj2W2yGlnkt9l+2UNsO6y63QLFOSgMyHdaBcwjBEsVgUNuhm5CYjM3gebrFYRLFYXEdaEvAlybEOktgl6UhC4/NshyRSCex5bMzaebyuzbRF9qMkniRJMrsAqWOTsMak9WkjTotjZfeSyDfjNMuyUKn08NrX/joef/wDWFl5+biEbESITIbT1HcvFU4LwyosC5iensa2bdsMpxlO02UaTjOctlXpxXDagQPPYseO38NnPvNPMBpVEYY1PPnk/xtXXPEZzMw8Z+ZpLxFOm9RGdceUnVmIMZxmOM1wmuG0i5nO5xyNzwBqwZnYxEV8qVMAxg6PAobDvZifPw3PG2kdYz34nvywruQuPkfdANJ7nLiYL/lM6ht3j6znMwdhGMGyUudY/jgz2ab84rKUB2UibS1vKxK/gayu8xnJZ/xbypO4SYebtDtpr5QX38/XV8qU77GO0jFm+MzwmeGzS4fPzupgkokKJZWV2+DYSAqbnzAMxwPuNFkWYNsO4njy9jTLsjLGoowiRpJko4olQCe5yDjWPVWqdHDOgX6+TVL5AHXWt6yPyo+gEmo5RFE8blN6SZcsW3a8eofgCPG9yiONOstG7rHc9JNelpsnDNkGvsv3CSr5iQvzAZKxgjmIIgKNek8RZAzbzsqLbZGJDiX5N4GbiUZvWZYmHxpFrVbT3m5uOaVzKi0zjZDMT2QBaNLt9Xp62yifo4EQ9LktVOoDgZ9kIYGBHngJTnn9k2RPY5VyY/SM9ALLfCZNmEkuJGxJ7owYkHYpIyyYX6/Xy7SF9aJc8qSVT5K45O9Sx+X37CMJjnxu0uRXth9IJ92MpJDRIrI8eWYuo5CKxSIKhQKGw6EmbhnZJEFb9ouMAqGdy76UbZbt4MBI2m4+MohlTSIEqQf8l3Vmv/I5Ek/eFk3aOG3EaXFsj/vZ0eftbsZparCXYDBYQBRlcWk4nMXS0ssRRSHK5eMol1cMp70EOY18xKg2/t9wmuE0w2mG07YqvThOSzA7ewz79z+AU6cOoNnciV5vL5rN6+G6BVSrJ8087SXAaXKBgclxXL1wxGQ4zXCa4TTDaRcznc85msQqudNJ2hSQ6r76ewTL6iKO1RyPegWk9i31AkhtgYEVfIbPETuYDxdtpa5SR1lv5puWG8O212DbBTiOCh6wrHS3heRKOgkk7so28JOvJ1PKZ+mdOtK+83zGJHVc9gcX3/N1Yv5JkmA4tDAa2UiSCEniIElshKELx2nDcdK7gCR3SkcJHQmGzwyfUf8Mn11cPtvUwUQQZEP5N0GB3joKl9/RSxrHyivquoBth+DZoerjIggGWtA0GkkQPIu7UimDkeEUmAQvddGdhSjyoY4niDMeYeV5S4GORpp6zCd1YNqRqmz+FulJQBimYMvOpHyoVBLox2osFNkW4MZIvCyASSWgjNX3akIiIz3WT8ISAFam7aqP6H2XRztYuh6qf7Le0SRRWwHZJqngbEtGscZbJCVR8V/KmfXxPA/z8/Ma0LkFldHiElTo2aUREJw5ySVgj0YjNJtNDAYD9Pt97Z3nh22mPJm3ZVk6r1arlTnXVMqJR4tMAjs5oZY6IZ+L4zgT0RJFUcbQpXzZF5LsXddFrVbLRIXMzs6iXC5nLl20bVufd8o2rqysZLzkrE8Yhuh2uxoIWZa0UbZX6neecOXAybLSyAUej5jabbpFXMpSvktwL5VK+igW2Q8ySdmUy2XU63UN/myXvEiROiH1MwiCdb/RdvLHvkyOsspeNLjRs0mSXhAo+xdAhnApSxJ4oVAAI6B46SaJ36Szp404DVCDdGLP2ThNYU0F99//r5AekadSs3kAzeYBAMCePZ9Csfh1w2kvQU4bDEpjOaaDRMNphtMMpxlO28p0Pjjt9a//C9x777vw0EM7AQAnTrwWg8EV2LHjI3pBxMzTLk9Oi2PA9+UiWQLHiVAsqiOOuGACGE4znGY4zXDaxU3nc44mbZEYwjtuJCbwOVVWE8XiAMNhTduK1Av+P79Qzh0ngDylKd3Bw2PFpK3yPSbiD20nv2BeLh/WeUZReqIC6yiPV2ObgWxQB3WR8uDvwPp7XVh36ZShHjOPvEOD7ZA87nmelofkzCy219HpTMOyVF/HsYU4TlAuH4Tj9DQmybLkrhoeS2b4zPCZ4bNLg8/O6mBi42SEgNxyJgUlG8FG1ut1vOIVX8aePffj7//+3yFJHDSbO/GVr/xT3HLLn6NSWYHnuXoLGoVCJVTK1xsP1NWRCp4nL3JLtxNaFr12Dhxnsgc0DCPdKUpRqby8sFUCH9sDRBHE5CUFbLafRxFIJadc+F0K5tAdmk4wGN2XdnoKxDIagOBEAMm2TxqLKj/7W5KoKAsJcJII2O48sZC0WGcqKQGC0X5Msv4sX+qKzJ/RKLLeHADISSABWMqZk6koitDtdjEYDPTW1F6vl9mWSY+sjBiQBkKwHY1G6Pf76Ha7ur4yKgJILwWO4zhjsNRLGp/sE3lsyCSSl0Qg36OMyuUypqam9MWCc3Nz+gxctoU2IwcUPCNUDgB4Zi11nXrBwRnPi2W7KEf2A9vNMiUW5PtZDsrk4ILATNLMD8bYZkZIUM7sO0nUlpVeNsnEviCBE7sKhULmnNN8vwLpwEr2h5Qh82R5rAsHKnIAJQFePi/fz+MG5cRBFOXHOqoBZqRxk++ZtHnaiNNcV9k3708ANuc0NfFZf39FPh0//ma0Wrfg9tv/AuquB8NpLxVOmzQ5M5xmOM1wmuG0rUzni9PykbpJkt53pBZkzDyN319OnPbooy/HY4+9HEmi5LRjxwn85E/+JWZmslHulIPUDcNphtMMpxlO28p0Pudoruui3+9rXWTfUXcrlUrGxmkzXOhn38u7dYB0YZxYIXfSSE7hs3yO2JZfKJcf6jMxIo83KR+lu5UknrBsPpvHdMpO5knZsB3SJqjTdA7RpmW+0mHBPuBvElelk4h5U65TU33MzobjQBN1rG0QBOj3XURRBVEUaWcB+2iS/Ro+M3xm+OzS4LOzOphYqPQKU0g0PHqneUYfOwFQ5yc2Gj6iSCqxA9+vIorUmdxJkt0+yci1OI7GZBMCoHE440lGBIDHD6gzqyl82Wj1Dj3YaoJCwSYJxlEF6lxTClZ2ghQ+37GsGGEYgJFtqq08MiFLFtkOUNFwzFcBVAjfh64jQYXeYILxpI5UyqEmPsyPCim35OajBtZPpLJ3hrAfJAhlt/WlEy3KmNF0k+rIvKrVKsrlciaqgnWKogiDwUB7UHkpIoGL+jYYDITsInQ6He1VZWSK1E+CndyaKAnatrORKDQguU2Zz0kjpfzooWdbSJx8TraR5ChBmM9TTiynUCjoZyi3SqWCmZkZHaXB82KB7BZXyoD1Jljk+12CLfuef0dRpOvA+vX7/Uy0iuxj5p0HeQKr56nLEgnaeRCkfPg8wZ7tlmCZTsotve2ZcmY9SOasK3VO9qXULSb+n/YjSTo/MKNe1+t1HQkqI4qoR1JO+YEu85E2SNKWgwWWxY9sAz8mnT1tzGmqn0YjH+12+6ycVqlUkCQ2br31U3j22TvQau2YWF4clzAYLODgwddj797HUS6vGU67xDktDD089tgtaLVmdRvvuONBXHfdEZ23yk9hytTUFBYXF/VvhtMMpxlOM5y2Vel8cdqVVx5GktyD++77PiSJjcFgGgcPvgHbt38DxWLfzNMuU04LAhe+X9H1cZwI9XoPtl0AkC4EGk4znGY4zXDaxU7nc44mcYL6xry5MMu+8jwvs67Je5fYv7Qb2oxlqcVluYDPRJuRNiefKRaLWleJbXk+k+/w4/u+tjG5EC/fJZbIJPOVjjHpMJKL6hvxmbQJYhTxW/aXbdsTj8TL83QURfqeIvZDELgZu5e7KmSSThXJAczb8JnhM8NnF5fPzupgkmAqhQNAgx0VAoBWKKncpVIJtVoZtVob/X4NUaSUczQqo1AooFhML8OjcgCp911umc0P0KWHjR2z0SCfwpTJcVw9YZADeSAb1Sa3WSoZpJMQVYfUySLBXv6t/g/9XBSFiKIUYCgDelDZ2WlUmno+nYRJElGTGEmWcgIySRYAxvVW70oCpSJRnlJulqXklm/nRonE02g00Gg0tKeV2wHDMESn01lnFNJY+G+/39dHdgRBgFarpbejyskX+4V58SxTliHbSFAg2UvvN9+Xz+cnuJI48uCel4GMisgbKcuU0TK2bWNqagrVahXVahWzs7MZe2QZeXCTk1Cl5yn4SIDgM/xOevzlAIbbV0kgsv20EX448GI7SqUSCoWCboOcVEuw5xZnDhiJHQT6vEw5QGS7pGwnEToxQ9o52yT1Rj5Dm6DO8RkOTD3Pw8zMDKanp/XFjzJfSY78TvZ7njApQ+4SdV1XRw5IkJfPT8I1kyanjTiN/eL7I3Q6nXPiNM+LcOutn0a3uwDfL+n+VLpjYTicAmAjCMo4ePC12L17BeXy0HDaJcxpUVRAt9vA44+/Akliw3EiVCp93Hnn/dixownAG5eh7rwA1AWyi4uR4TTDaYbTDKdteTpfnLZ//yHMzy/hiSduQL9fw2g0hcOHX4da7SlYVmjmaZchpwVBA0GQTvNLpQGq1cHEeZvhNMNphtMMp13sdH7naJ4+eopHmcm8uTjLY9ZUORHieJi5CyXPZ3yPifqQ5zT5Lv9muUDqaMjbuMR4qYPyuCwZkMB3ZRl5bmOSTg/aRrpOW9OYRjsgprCOks9YL3IibYsykikfGEHZMH+JMdKZw8R+y+Nqvq1Mhs8Mnxk+u7h8tqmDqdfracFLgMlXTFaa2xZ1AePKlsshfv7nfwv/8A8fwMGDLwcAPProz2PHjvtx9dWf1vkrgScoFIrwPJ51GuuOG42GYyF4okNUvUqlUsbzx7qlxy3QWxeBTbEsgLurCEickLBT+a/y6lGhEySJqjPbmB7VkMpJGkv6G/9OIwryEQvqiAVbGLWarIRhFiRkHXnGqefJS7mosNG4ztmzWNXviW4XwY55psCRVaQoSrfTygg62W4euZGflLJu3W5XE/9gMNB14rmjNBhelMb6jUYj/SGA57cVqzan8uS7ednlIwH4nvS4U8bSwCQhTiIBfghi/E0OIPL5c9LKyAE+W6vVUC6X9YCJOkXQzetcvs6UP9s5CdjY7rwMGMnDLcK0Leo95SdJlFEO9XodpVJJt6darertsfTM89JDmTflFsexrrcEZ7ZVeuUZZUIwHQ6HueinQH/knQJsK/OUNsX2M78oiuC66pJDnrNaKpUwMzOTiZaSNtHv9zUQU5b5ctkGyrNQKGBubk73dX5Lfb/f1+9Wq9V1tmfSxmljTkuP2TlXTmMUyfd//19hOBxpPFPP1vCP//jfEYYyctiB5xUMp13CnPbkk7fiySdfhSRRz+7adQq/9EsfQbGo+o6RS+kCa/p/w2mG0wynGU7b6nQ+Oc3M0yB+u7w5LY5tfP7zP4LBoKq/e/vb/x433PCkju6l3gCG0wynGU4znHbx04WYoxUKBQyHQ60rkn/kOqa6a6WOfn8epdLDiGO1UNvr9fRCNI/WYr0qlYrWbXkMGndLEYdkW6SDgHYvF9fZFjoIWF++kyRJxjkGZO1jMp+liW2XzhHFZ9ljWKW9S1lJWZfLaqOA2ixQ3ICXss436WyIIrUDmrZIWZIHZCIvsX9ZX8Nnhs8Mn116fLapg0l6xQaDAUqlkm4AwUiCIo2I3nUKhdtYZ2YcvO1tD+OZZ9r4zGe+D4CDtbWr8fjjPw7btrF376PYtu2wHri7rlKYcrmMMFQgRHAYDocYDgd6wY6GpBQ3bXhKQqlXkIpqWVmPOAC4rjrKQXa6Ot7BGgtaTXZ4VAOQ6H85MVByiPXfVJz03/RZyoryVrJV76pjFejJV99Jg2UbJBkBQBiqSYr8jYltTvuNYB7r+kijSmWb5pEk6bZhblN23Wz0HYlRnus+Go301mbbttFutzVgc4AggYzGkd+OysvzpAESwCSRy/bTg04ZsF8IIvnf8mDuOI7Wm3SCDQ14sg8mRSuwLKVjrvaCsx8I8gR6/sYyOCjK26WMYiEQsH5yG6skavmc1A/KiH0gZQEgc+Fgflsw32s0GpqYaLv1eh2VSkUTv/T6s58J7ox4YL2jSG1hlvYtSZN9IiNOJEHJAWie7GW/5KNBlB2lEQkAdP/wmEfKZzgcaluR9QmCAO12O7N1WuoD+5F9KaOr+v2+vrgyPziTKQgCrQMmnT1txmlAFh/PhdMcx9GD0bW1AlZXV3H8+PV45pm3IIoKmbIfffQVKBavz3CEslWVf6nUxP79/2g47SJyGmCDd1Wo8i3UakU9EJecRjzr9XpotVqG0wynGU4znLbl6Xxz2tnmaZKTUudIjDvu+BZOnZrFM8/sxI03fg7Hjt2MbrduOO0icNrKynZ897u3YjQqA7BRLg/x3vd+EVdcsYpGo6rx3XCa4TTDaYbTLqV0Yedoa1hdXdW6Qh1l/6p1qyJqtSk4zo2IogC2HWA47GA0OokgaKPf7+t8+X4+yFraI3WMek4blO/InaysG/lGYqLUL2kXlMMkzJhUL9u2M44Yy0odFCxfOqDkMWvEACbuuOHiubQdWde840ouYOd3OE8KWuf3dCJxh4nM1/craLensXfvELadZPjs9OkeVlZqKJW6iKKB4TPDZ4bPLjCfndXBREHwriUKmEkCHytEsJBGQtC/8srTSJK02OFwBsPhDACgWu2iUPDhOA4WF/soFDD21hURRXEGkMIwGAOaHFynE5F0wpL14ksjZRvZrjiOwaPf1PMxLAtIEhtJst5bm9qIijCTZaqJSSx+T59PEmQ6mwoj/y+9/jLKgDKVExd+r+qatpHfMdKPoCIna1GUHrsgQSttayLyt8dlyF1JRaysbMNoVNLP7969im3bOvoZ6hBJnpEdMpKAXlppfFR+bkUdDAbo9Xr6zFQak/SQ0zAnTRhlfaQ+kCApHzkxzOsN+4P6FkVRRv/yAwBpK7KPHcfRAyc5MKJ3mjrJcvPkzXpwoMOBidTBSYQt82ObJbACKRHZto3hcDiOCE30tknKSHrEWf9qtYpGo6FBiuf6sq0ykiMPYFI35bbMvL2mk+7UNlJcCHUkkQR+GXkrI4nygyEpP0mIEseIL/kIBsuychgV6q2qctDI+uexU9aBkTX8e5JOMU/2r0lnTxtxmmURL54/p1H/GQBhWVX4/iyQu5duaWnyPU1MxeIyGo0H0WiswvNGhtO2lNMsrK0tYDCoaVls397CFVe0dR/LfgdSmxuNhuj1egAMpxlOM5zGMg2nbU26EJx25ZWn4XkhHnlkF06f3pmZp01OCVqtU2g2F7G8fAW63aewsnIVWq2G4bQt5rRWaw5LS3tw8uRVAIDp6Q52717CbbcdgudZsKyyjgA2nGY4zXCa4bRLKW3FHI27FyQWpFhvjY8K2wbbjgEEcJwmoqiHMBwgjjvaxlke7UryAeuXtyPWjYv5URSJnazQdZnEi/J3+b20A6mj8jn5nbRZ+X/pYJKL8WznJMeIxDmJuRID6Bgid5APWGfJZ3lszOOjPMKMeKkWx22EYQlJ4sF1Q9h2kuGzbreHtTULtVoXcTwwfGb4TNfF8NmF4bNNHUwchCZJ6umShiE93yxURhYkSbp1VRqVGtCnxw8wHTr0Shw69EoAwBve8CkUi6cAlOB5RXge9FmSihgwvngtG8GQdiQH+jY8L408o1FRYFRE27b11r9UAW0QF3g8Q145pcLmJ0uMqGMHqf+u9+xTqVhHAJk2SeAiKVLuzIN1Y1Qfn1VHOFhwHBeuq8CZ7QWAILDg+0Emb/6f4AIkcF1PH4WhgEqdt+r7DXziE9+POE7L/YVf+Ar27GnCtl3dLsqFnn/f99FsNrVh2LatCZeGyi2s/X4fnU4HnU5HDw6k4U4yTNVnqSGQwOWWVr4vSVDqs9RjCYZ8h9/xWCwCOPtRRmVQtgQwz/P0uaB5sOd2Ttkmls2LJ+VvMtpDymM0GsFxnHWRKiTEPBnyXQ6YpEykXTOCgO9y8l8sFlGpVPQRIMyTDkXqNaN5WB4jHiqVio48Yp6VSkX3Jdsq20tQlefjBkGQOTM3T/6sF/WCZMn6yIEW25eP2BgMBlo+jMBhfaRu8rxfYp+c1OfBWdozo2V6vZ7Wk3wd2Kfcmm7S2dNGnMYxUb7/+c7ZOI3PeZ6HvXsfxOLiI/jkJ/9vBEEZeczfKI1G83jssV/BjTf+Mer15wynYes4LY4dfO1rb8NgUAHHJu9730O4/fajAKqZC3/ZLtZ1OFRnwhtOM5xmOM1w2lanC8Vpi4sn8LM/+z/wJ3/ya1heXsDmPGbh859/rf7rG994n/6/4bSt5bT77/9+rK5u1/nfccejeOtb70Oj0cjgjOE0w2mG0wynXWppK+ZofDePG8px4ANI1q07ttv74fsNFIuPZpwjXMCWi9K2beu7xlk+F+8dx0WtVkO1WoVt29ruJGZJu5EyYfv5G9+RGCEdJsS7vKOJ9eLuBeIfHW/Ed4nZxF+5U4b1nuTQIDbQdmu1mu4H3oslZc935a4ZOqWIJeTFcrmc4bMkAY4dq6NUCnHFFU0kiYMoyvJZGK6h0Tht+MzwmeEzkS4kn23qYGKnM3NuVWMnyKgn3/e1AvNdCoyfQqGAKIqwffsJ/Mt/+SF86EPvwerq9MSyv/3tN+LYsedw2213oVar6/JmZmZQr9c1GIxGQ/i+Upxms5kRKjE1EccusCMoUMuytBfQsqxxp9satPhOHEdjhbd1fnkQGJcGeY42lVICfAr6KSHQGJXclCOGAARw0qUi8KSRMj8VPQEAafQBf4vjBMWihSQhkKWX5/HCWZYjIwmkIloWwdLR/a883ukdI0wzM7OYnVWATUXnhJIgQ11iu6lXURSh1Wqh0+loEmq32+h2uxro81sN2QdyOyZlLAFU9gX1k3WTbeaxETRi9rWMQshHE8gJZb1ez9gH+69UKqFUKmkZs040VjkYkPpKgJAkJgdEHBTIrbRsOz/9fl+XRQKXv0u55EmwWCwiDENMTU2hUCjAdZVzkWU4joNarYZKpaIvBqSMOGCgHvBsUQI9SaRer2vZs1/piZ/UX8xLbpmVACvPC85H1cgBJrc8swxiQV4uUjbMn4nnrrLNlC/zY7u47VRG+kjikvbmOCrCSV5QSDuV+sZtzbQjkzZPG3GaZfGc67TPng+nAUCj0UClUsFoNEK/P8Q73/k7eOiht+HgwTueVx2ffvq92L79Wdx44z2G07aA006e3IX77nsVhsPSuB99/Lt/dw8WF7sar+v1uuYweUksAD0hMZxmOM1wmuG0rU4XltPq+MAHPokHH7wBX/zi973gOhpO2zpOk20A1J1XU1NTetGE9TCcZjiNMjKcZjjtUklbNUcbDAZYW1vT61KtVmuM8S3U6330+9fCtsuZdccoWkQU7RnjfhPF4jEsLy9rTJNYAqTHXcnF8SQB2u0dmJ4+g0JhqDEn7/ziu9Q5qVMSY9juNNAgdbhN5rPUvkqlks4njtU9K9Km5P8lFiRJkrFR1of1Y11Zd/K3tH25a4sL1XEc650rQOqok/hL51yez266yUGSAJZVMXw2TobPDJ9dTD47JwdTfoBOEOPfUohsCI1EKjq9s57nY+fOZbzylU+h06lrD+XBg9tw6pQ6hqHXa2BpaTueeeZalMulsfFZuPLKw/A8e3wmYQmjUUkPkAGMtzCGSHJHJaRKaUMe0ac6JwRgCcPNbgdUl83KyUna8akScCKjovFsOx5PJFLSYx1knXhkA6Om+ayq1+TtpnmvaN4jKQmIzysQ5PEMLF/Vle8TfPh+6rFNMm0leC4vz+DUqe3ghehTUyO86lWnMTurFu7yZC8BiAYtvbc0kH6/j2azmbmIj55V2fa88cn6Sf2Vz1K2/D0f+UIZlEollMtl/X2erEmElJdsW7VazXiW+T3PCHUcR+uWlDvrlE6W0+2gchIrbVGCmZzQsgzKV06EZZ5BEGTO/5VtZF3ozOV7JEGCvOu6GugJPCQefuTW4jzQy7KDIMjYCICMTkoyY70oL4k3UmZye6i0IepfsVjMDIykTkp9lWXmCZH5SZnn9SZfP+oR+5+XiHqeh3q9jmq1qgcNknTY1iRRl5LyHZPOnjbiNNlHjH57PpxGHWFUh+qXNpLkGObmHslsr+cAimW4rosTJw6g1VoEAAyHc0iSDmZmFBcaTruwnBYERTSbs6LsGHv3tmHb6WDMdd3M5E5is+E0w2mG09JkOG1r04XmtB07OhgOT2A4/G5GN7KcluDo0VcjDMsT62g4bSs5LSt72iE5wXCa4TTDaYbTLtV0PvnMsly0WmWoHbIJGo1gvKhehe9bKBYbsKwAtj2CbZ9Cr1eE76srOcKwC9tW982Uy+XxGmQRpVIZruvDsqpjJ00NSdKBZakjRSV20fayWOACGCCKfL3ILO0rjnnfXpY/+Jz8yHwl7rAeEu9lPnxuI/uQdeJiNLFus36js4X/l+2TiXkSn6SjapIusC6cX0/is2JR7VryfcfwmeEzw2eXAJ+d1cEkPxIouMWQXnMKJAgCbQAsnABArxqffcc7vqWfjeMYH/vYnVhdrY8F6qDdnseDD/6Qro/nDVEqPYy5uSJqtTJqtSpKpbIWdLFYxHCovNSDwTDjdUw7mUYlzyHk5ATak6gG63mQZqdZGcEDCdiHqtMcxLENy0onIDSSYrEA21ZRd7ZNMIiRJFIJ5Bmcab7KqGjAKdgRqNSkTL0jlTCOI/g+xqQZolAg+ChPfmroDmybXuy0XAAIw9TAVCriqad24r77rlaK5EbYu7eNX/3Vh1AoFMbtt7R+8D32t+u6+rg8Gs9gkF68R6Cnh5gAQXCWOsV6EXQIcvK3SZNBy7J0dIQ07FKphFqthunpaQ1m+UsO+RyBW/YFt1Rzuy3ryugZ2km3281M8CRgSa+4bBd1SdqkBBepKxK06YAFUnIHoAlAEqJ8L4qisT0NNMBJ+2YURLVa1fIgWMax2qrJCxIlIcnFdkAtOPT7fY0F1P8oilAqqZ0FzE8Ct2w3Ewcicns1BxaWZektrFIPmI+sFwdHjuOgXC5niEZu784/m5c92+L7vo4YYT3ZxlqthtnZWdTrddTrdWzfvh2NRgONRmNdhAijMXzf17LIl2fS5LQRp/GonjAMx0evvjBO4xZ+13UxNzeHXbtOYjg8hDNnzqDVaunIF/ab4zioVCr40pd+WjuYAMDzSpie3g7HqcL3e/D9PrrdEEkSIElCw2kXhNPSpNqWHknAOm3EadyebzjNcJrhNMNpW5m2gtOuv34JN910t16QGQ6HGU7z/RArK1eh15tHkliI4wLyx8wVi0UsLCyYedoF4rQkAUajBAz4U8c8hSgUrPGcLHvMjuE0w2mG0wynXWrpfPIZUMTqqjpKrVqNMD3dhOsWYFkNRNEsyuUEnhcgSbooFhMcPlxHp+Oi2YzgOG3Ydge2baNUUne8VKtVse6odjRVq2143gkUCifR73e1cygMw8xuER6Rp9p4evxceo9P+lt2JxD1VOI1cUE6UPK7iVgPPp/yj6PlKG2S/5dzIomrEr9o1/y/xAJ+bNvGaDTK8ALfK5VKmZ1JxEzWiYv9clEdSI8WI59FkdKPKAJclw4C2/CZ4TPDZ7j4fLapg4nCZQexEfSqMXNpgLLza7Va5pzBbrero7zprZUOhx/7sUfwznc+jE6njQ9+8F04eHB3pi5BUMTnPvczuOOOb+Gaa46OScXTShcEPqxxtBcAhGGEKArHzjB6tNVEhdFtjmPDcbwxMNBLnQCwhBKpyYDyBKaRA8ow1PEFUteU8qj38ooYx2mEGSP4pDLkSTWvLOMSYFnp8+ybMFT1BtJzLwlUTAp4IzAiznU9qHPLLdCBq/rDy2zhk7rg+zE+9rHXo9NJL0R///vvxitecQStVmoUBAbWg4lg1m63x5NTBfI859SyLHS7XQ3yec869YVATVCUERH8W4JGtn+yH3qLVSREUYOW1FWWz0lafougJHJOljnYIfjQ0KkXbJsEmHTCmd39RTvM61S+jyclGYHBZzcaUEjP/qStoGwL5cX2sK8IyIPBAIPBAK1WC71eb2LUEcsmIcjBlQR6AlqSJBrAPc9bt4WculGpVLSXXUZpJEmidYaRDRxw8CMjHaQ+5fuZ7ZCkQf2hLsqBnhzwWZalIweKxSLm5+exe/duzM3NYXp6Go1GQ7d5MBho+cv+pp5NilAwaeO0EafJdL44TQ5+KpUKut0uVldX0Wq1dL+RtGU6eHARf/zHP4gkSXDjjY/gwIFn8cUvvhO33voFbNv2jOG088hp+YFiHMdoNpuw7fSM7s04jW0ynGY4zXCa4bSLkS4FTnvd634dYZjA96u4664/Qhxnz2fnwpKZp10YTltbm8Lf//2bEATE5xj//J//P9i9O0Szmd5fbDjNcJrhNMNpl3I6X3xmWQFmZp4c85mHKKrhzJlFRFFZ5zk3N0KtFqDT2Y5yuYuVFR8HD5bgut8BMEKS2Oj1btE6O2ndcTTagXa7jkbjKYShr09VoqOD/U+dosOD9ihtW9rbpAV8/p+YyZR3UMnfZf6THEssa9Kicd6mJMYRA/gOnWgSU5Mk0YvrtH06iOS6IJ1HxB+WK22n1+vpvo+iCCdPTqHfL4ztszC2Yxu7dgGOY/jM8Jnhs4vJZ2d1MLExUvAS8PIe9eFwqBsEQG9lpBeUAM3OJaikUQcJarUYP/ADj2Hnzg7uuec6WRsEQQnPPnsVer0yrr32IX12qapjjDBML6HKTjJCMOJM4mcKjlRA/h9w3exWSHW8Qrr9U04y8vkyeo7Rb3yW0XWpo47GKxUJuj6yH/ge6yCNk2Xm3wHSKAcalm1bul5UHv6mIu+QMXbW/4knrkOnUwVgo9utIIoclMsBfuRHHsO1166gWPTR7Ubodrv6HdbPsqxMRMBgMMDy8rIGen4o17zxsT3y4kR69C3LWhdhwXpL4GIeJHcCL/uCEQ6FQmEd0PMcX6n7rIckjCRJMtEP0jilvHnGKwcgMoKFsiLIh2Goo0op10kLo7IfgyBYN8CQE1naXDYyMj2mg/pFAuL7koSknTM6pdvt6vpLjzefoy7wQ/JifaVeSxJg+13X1fWT9eJAVPY985DRLNJ22QYOVth2grCsC8/uJZHJyTgHFdRPuZWX8iZBVSoVTZCzs7OYmprSkVHT09OoVCqoVCpaNpSrJPIUS9a316Szp8mc5o91IspEhQAvntMUrrr6HoR6va6xL72jLu/ksDEaqUHkc8/tRafTgO+Xcfjwy9Hv17Fv3wMwnPbiOU3mKcsoFApIEoVb3W53IqfJSU+z2TScZjjNcJrhtIuSLgVOq1aXx1GnMW655aM4fPi1WF29StfR90dYWloy87QLwGnPPnsFnntuJ3w/3TlmWUC1GqJUSrR9ss8NpxlOM5xmOO1STeeTz0oltZMoijxYVow4PgrHKY4XgT3EsY9+P0KzOYcwnEKhAGzb5qBU2oYw7GI4DHH6dDze+dGH4xzK6EeplCAMCxgOXXS7iyiV1uB5/Yl2nf9b6if/BlJ8kIv9QOpUk5g6SW7ckSGxnd9zVwztTaa8Q4bfcVOBLDtfb9oC32VfSOcI+4tBDbTTvC1KZ42sB/Og7KemAhSLdEKo9WPPSxAEI8SxZfjM8JmWg+Gzreezc7qDiYVmwT7dGik7TnYYFU9WlNv44jjGYDDQnjQKx3HUDphXvvIwpqcDPPXUHlEusLZWx4kTu9BsFlEufxvlcoRiMUC5PNQXgCljscHxfBQ5sG1OOgB5tEEUxbAsjD/Ss5wO7tnmKKLxETR5n4YjJjxyy5+auNBAlEzSM0MJ9GMTzZTPyYvsB7bLsiYvjknCyRuLNFDHscGJFQ1NySnUecexOqbCsmzEsYNut4Jnn70aKyvpXRWVygg7drTxwz/8BIJgAN+P9XZTIHtBHwD9G73MrVZLb02VxCkNn8ZI46D+sE/khE4ClAQUAgT/5tbKcrmsvdTFYhGVSiWzbZeRAHKbqJSzBHLZDwTm/O+yXaPRCL1eLxMhwXL5rIwQkV777GQ6G3nIvOTZpSRCGQnEc2y5DZS6IoklH+Eg9Ul+J+1dDpBk/wHItE/qJwE2nVhnBy7SUy4HTnKxWA7CqNcyUkfqBjGLQJqPWKHOkLDYl8xXlsN28W8O4AjyjNSxLHUJ4+zsrB5QbN++HQsLC5idndUYyQ+jB3zf13VhOWy/7AMZCWTSxmkjTuOCFy+QvRCcViqV9MC5UCig2+2i1+thZWUFpVIb5fIKBoNZEPeZlpYWsbSkjs87ffpKDIc1bN9+GK67Bts2nPZCOI15DwZVDIfpvSGVyghzcz14nqv7fSNO464zOXkxnGY4zXCa4bStTJcip1133RfQbO7POJhGIx+rq6tmnnYBOG15eQYnTqRH7Ko8gVZrCrOzI9Tr6d0MhtNUMpxmOM1w2qWXLjSfOc4AjuPBcYpwXQ9h6MD3i+h0dsBxXDiOjfn5BPX6DoxGLXQ6A6yuRgiCNqLIh+ueQZKUUCi4evHWsmYRRdPw/Rm4boBCIYbrRnoBWrYNWH9kGrHBstJ7cWgbdAIQc/mRi8m0r3wezId2KHcPyXpIHM73A/Nl3annTLJ8+S4xkfWW9sfvJA8TW2VdZLnsT9mv9foIxaKv70ric75v+Iy/Gz4zfHax+OysDqZ8kpVnB7Gx6sI7uaMouw1TCj9JksyZkfV6XQMjz+e85ZYWbrnl77VTYmUlwW/+5i8gCDz0+3P42tf+KQBg27YncfPNn0CpVFxnEKmy21DHIfCohPTD311X7YJKkrS+ruuAC1JK4SyoyUc6OVHJGpexfvKg8k8Jk3lkz6ekt1udNct8JcFaFhBF6ydYzF8aT9pudYa4NDzVP0pxeDaneodeZABIYNtK6ZrNWXziE+9Ypws/9EOP4Z3vfAi9nq/PLaVOUAl51mYcx2i32xmvrQRhRjSwLZOIRW4LZVukAUxSdgI720ljZCRBvV5HrVbT55+qyPUUfAiQ1HGpz9RltWU6PR9Utm8SAVAu+TazLAls0qiZn7QlCeYy0iAvt1qtltniKT3e7KskSb327EOCIMlDOpfzgwrWN0+ItEn5jNQV5k+Qk9t7Gd1AnSBhs218luUy7263q73/UiYAUCgUtE76vq/bLPGMZcjvJYFIucuBML/jQG56ehqFQkHfP7B9+3Z9Rw/Pxi2XywiCQA+S4lhdOMnoGnk8TV5HZMSNSWdPG3NaaWxbqf5eCE4jN3melzkj+Lbb/gY7d96FL37xd8/ahlZrAZ///Afw+tf/P6hWTwMwnPZ8OY31/vKX34ylpQVd5+fDae22DSBBp9NBs9k0nGY4zXCa4bQtT5cqp0lsA9Sdd+1228zTLgCn3Xbbfdi//yA++cm367rGsY0//MMfx3ve8yDe8Y4H9REshtMMpxlOM5x2qaaLwWcKf5TDSdkMsGPHDkRRA/1+H8BhrK010W5HOHhwJ3bsOIVKJUShUBgfyeViNFL1W1mZgesWMTPT0/WRC/qsh3QQyYX0KIq0DgHZY8ak44X/yv9LvpQL0TL/wWCg5SMX3+M4PbJOLiJzwVuWLRfjie+sO78nJtDmpbOmXq/DcdKjvOgIolOD+BFF6b2MlBdx2fCZ4TNZX8Nnlx6fbepgktvnAKwDcPl3HtiZKDhd4LjTKSh2HBtITzefYwOLRQ+LixZ+7de+hH/8x5fjscd26TzX1vbg/vt/XCtfoTDEDTd8ErbNjsgeVSCVTkU0y6i2bBsY2eY4KSGo71MvvARSaQTSmy2NVBmuilBjpBsnPQAQi12r8h1VdzuTr3qGipleGicJQBmMBXYPt1+6rqufI2AFQagnb7Zt49lnX4GTJ69mbcZtj/HTP/05XHnlMoIgPWeWefBiNhoH28CoOQIk3+O70uClnCnjWAiGhk7AZiRl3gPvOA5qtRpmZmb0s9KTXq1WMwAjt0LmBwbSuAkojICgoUk7yAOhnFzKSBI54ZTEzvxoQ3TuyvIlYXKgIAcCcuttHpzl1mC2j1EdjHiQoMYBQRAEmSgINalO9ZRbgBmpyjLzkRDS886oBvYL60OyzeMG3+Uz8kxTykYOengUWZIk6Pf7+qxgGY1DspVefVkPGY0i+5h5uK6LcrmMmZkZ1Go11Go1zM/P67rz3Hup271eD61WKzM4IsFK3SAhjkajTLvYz1JnTNo4bcRpPPqGA6Ot4TQVEX711Vej0+lgx44IMzN/imazieFwiOFwhDiOcOLED+HUqR8UJSqeUjhtOA14fpzGMYHancuFRIzrGZ0zp7Xb6piifr+PTqdjOM1wmuE0w2lbni5VTrv33qlMnmtre/DAA+/DgQN/jVLJh+t6WmfNPO3FcxowxGte80k8/PDr0emkO6HDMNK4aDjNcJrhNMNpl3K6GHwGWOj3+1C7XJVenTjRQKPhoV4volxeQL1+GnHcxMKCj8HARr9fwpkz07AsC6XSEkql4ygUilC7ahO9kM8yeZwakD2ya1IbpNOH7eciL23dsqwMhkpHjMybeih5O899MhEn+H/ZB5Kv+CHeTOJVyeGlUkk7Gfg9sYw7X2Q7oqiAdnsbtm/voVwOM/YkecjwmeEzw2eXJp9t6mBiRqyU7DSpvPJDhZWGIvNjPnmvnfSQS68pgVQBrI0bbjiJI0em0e87OHRoGwALvl/DykpNl+N5fczP74FtA543wvT0yXG9UuNhXdVZ1gmAbP1VUudg8zfHSUmOSiYBXLVVLfxZVnYb6aTJi4ykmyRX/isBX4J2Ws+0jpKoUoO3Ms9Q8fKecWnYcWxjbW0XTpzYmYnwrtW62LnzFF72siOo1XxEkbVOSUkYNDbWixEHBBcaHhWaRkXDIWjK9tOwAAUclUpFn1vqeV7GC868arUapqen9dZUGp3nefqi23yiHCUJScCVoCyBmkYn+0GCgmwPDZ7tlvYmdUC+TyAiidE+2J+S3KkrjBqQ+cn+kh/2G/VgUuQN9YQf2T+0DalncrAhByUkg1KphGq1ikqlooGeZOP7vh6YkMhTPPA0KBLoWSfpYQ+CAEB6WSSxRgI628W6cWJP2cuB2aQBHN8pl8tYXFxErVZDvV7H3NxcxgakDpCASELSDiUZEgfld3yW+ZwtksAklc7OaVk+u/Cclg5iCoU+CoXDWF5eRq/X09EkcbyIKNqOpaXrIZ0ha2s7MRhUkL8XQulHAmJ+NpF3UnugXqYXrMfjSdNxOE4LlxqnhWEZnc5OzMwcRxiW0OvNYfv2M+j3ZzEaTaHVUpfL86x2NWhKdNulnNSdFWk6c6aB7353O/buPQIerbQRpzGynbxmOM1wmuE0w2lbnS5VTrviig6Wlk5l5mnLy/tRr+/D9PRJ1GptcBcQ51eqXsi0BTDzNKb8PI2YaVlAGK7n+gMHjmN2dk3349nmaYbTDKcZTjOcdjHTxeGzAKVSH47jwrYduK4D2/bgOGqhvNGooVDwYVmA4zSxvFxGHAO9ng3fDxFFPQBdJImHJLGRJDZ8P4vbsi3Z4Ak7g03yWbmgLZ+XMpK6yHbx3/yHeW+0nivfkxgoHQmyLqyjdIJIm5PfybmulAttXzpnVF8hw3OTsN7wmeEzJsNnlx6fbepgkkDsuq7+Pyub9zjLCuSNRzaQz8jzLpkvBcwzKmWEG9MP/uBDuPrqw/iP//HHoCYKgFxIC4IKHnroJwEAjcZR3HrrB2FZ8gxPdpCtJzUQZ11LokrJLAV61enpRXQSSOOY2/Ty0pR5ZcFXPyHkkwJ1ljDkhCMI/PGbnIitj+Rg3ZnyxMJ+iuME6r9qghMEBXzrW+9GHHuZNuzb9xx+4if+DuVyFXHMCLkw49GV3mipL/yOZEIDlDoh2z6pztLT67qujhDgWaYSnDiwKJfLmJ2d1dsCJQh6nqeBjdt05SCERsToCPY39ZIkIImJhpknRklmJDlprHyOwJPVQ5WkTUi7kaAl7ZP5SR2S8qR3WoIl+0e2QfaxJGgp53xkgNweLOtIgiXIl8tlVKtVTb6u66LX6+l3ZRQEozld10WlUtGyk8REwpLbqHu9XoYE2B9hGKJcLmugjOM4szVUDvLYTrZRLoCwb8rlsh5UUNeoX8PhMNOnjEaSUQ9S/2XUEH8n4VA32T657d2kjdNGnMYjbHj8wlZzGnWHfV2r1dDr9dDpdFCrfQu7dz+JT3/6/0aSkLItPProWy6kqHDNNX+N+fmHLwCnWQDSZyWn+f5IvC//pX1Y6HQW8e1v/xhe97o/w+rqbjz55Bvx1rf+OZ555uU4ePDmF9Xme++9Bt/97k78xm/8CYBoU06jLpE7DKcZTjOcZjhtq9OlymlynkbsBhwcOvR+7Nv3FTQadyEIIliWiiBXc51zm6dFUbqDV+XNHUYOXDc9uuSlNk9TfZbk3lV593rT+NrX3j2WcwLXjfBzP/cZNBq9zGKd4TTDaYbTDKddquli8FmpFGL37rUN+Wz37iZGowhBUIYKOLBRrfYxPa3maEqf7UzdAWTsTdaX7QKU3UucyeMFF7TDMNS6pvLL7hyR5bKtxDnKaJJjSTom8ovYsh+48C0dHrIcWa58X8qB5Uo7ln3Gdy3LQqEQYXHxFAqFCiwrdVTwecNnhs8Mn13afHZWB5MEijiOdcFUaBoiBSeNgZ3Bd+I41perSQXl9kDXdfX5hfV6XZ8ZmAc627Zx4ECI3/u9v0ccx/jOd3biwx++fWIbut0d+Na3fhU33vgR1OvNHCjSuNQ51lRACYJUHnYW262MKlVcXjQbhoG+KIvbGFleHEd6ImLbDorFAnhhIeVoWYwMBIAEUZRoQOQznkeZWPA8F0B6ZriKvJCkk7aFBiC9sTTcpaVdeOihN4n6WojjrHq8+90fwRVXPAff97UcCYI0fBoYowkkoBFQWIYk2TBMzx2lJ1kSH7dc1mq1TGRfvV7X783NzWn9oGFQZwgW8kOwpLHw7HiSVb/fz5AOjZ99Tt2gbcjBAX9j3xJU5A6uPOFS32grkrRkNCTLoOGzL9XEO8rkx7oUi0UN+MViUevCYDDAysqKBkVJTtQNfvr9vgZURjEkSaLBmWfOzs3NaaBjnQnYErzY/9z62u12dZ6MDKlWqxPszkGpVNLkHYbh+BzkgbbT0WgEx3EQBIEmEwI9Pfckdw7gCL7sE/Yzt3VLWVBWbAcHHgAwGAw0KTabTa3jcuBFEuF7krQBZC6hlP3ruq4edNABwbqYdPa0Eadl72iwcDE4jfYyMzOjdfP06dPodDpYXi7AstLo7q1Ihw69Ha3Wrbjzzv913jhtbW0fnnzy7bmSLHC8lA5qOTBd32by0n33vRdx7CKKPHzpSz8O3y/hxaY777wXd9zxLbTbq0iSeFNO6/XUTqnhcKgHWobTDKcBhtMMp21dupQ5jfO0P/zD1+LIkVld52PHXo1Tp25a15Zzmae5bgFf+9q70OvVxXvfwszMEu6+++244YaPo9E49ZKcpxFLLCs9tqhYLOKhh27HkSNX6fIPHHgab33rp2HbPvr9+JznaYbTDKcZTjOcdjHTpcxntl3D0tIMkiTEzEwPV165oudovV4P7XZb6+xoNEK5XM4cCTdpUb9YLKJWq2mdou3+/9n71xjZsuy+D1xx4nFOvPJx7626VdXVb4rsZlOkKNJjSjbV8mhGIgSOZAkDeQQPhrTgDwIMyYAfMAhDhmXAH+xvNkYwDAxGY883EvogS4MR5ZE85ECWSFGiLIqtFql+V1dVV92b92ZmvCPOOfMh7m/H/6w8mbfYbjYrUmsBgYyMOGfvtdfj/9+Za+99wDsKEBqH3W5hl5eftF6va8Phxk5Pv2Gz2SztTFA+8zsniqJI7aPDgc8OeKXP6mHnB/nAc4nIB/3HtxaWsAW7PNBBd6sod/B3sPoJvsK/tBt8FnwWfPbh5rM7C0wIChBkBB/fedDEYASyvvfVMgCHZNntdrZYLFI1VY1CW3tAq+zx49kLcnliP/ETX0wgtlhk9qu/+nusrjOrqp6tVuf27rs/bNvt1+yVV772ov/Dqgh+KgigIyvdtLCGrlyj33c6Zvvzsfe7gvY7rNjuVllV1dbp7I9d2H98sBlt8VPtz0+qmN3uIbiwv7crNmccu12Z/pjaF3z69vWv/6CVZWXX1w9sPj+94ftPf/pL9sor71un07E333zXJpO51XU3JR+Age+oahKk2Emr0YxVA1tt72OEROPsUoCdP674bjqdNoCevjT+vFDJBdzruk5gAVAebHg4BoRc4H5EJ0WAwsH/h2Mq1LfYEXvxGZ/rRAoCUDvxuepx+Edtc5UI9+o1AJ5OZPhcX20P2yMmR6ORjUYjm06nNp1O06oAJSa/ZZYx6OoTSERX0ij5eF8Q/2aWHpy33TYfXKe2VhwzO4B52+oNH598rjGNLTS+NC+o/Cugc41iDRiIj2iDyaHqzsQEMtB4Cvlg0sZp+89/5ziN+3SCwUNoy7KyH/qhX7Qvf/l32+XlK+2D+g7Ldjux6+vX7Stf+RHbbLaW5+/a2dlv2rfLae+99/12cfFpWy4ftHf4W5TV6iS9XyxuctcHlQcPntrv+l2/YZ1Oxz796X9u0+lTWyzWL+U0fTCtTtSC04LTgtOC077b8mHkNP5O+/znv2q/9msr+9VffcPMzHa7oe12QzeC2t5994ft+fNFI3b2IbjflbPXo2+Xl49ssznc//bbn7bLy1dssTizd975QXv+/GPyd9BB/yzrWlEs7M03/9lv+99pFxffY4vFIzMz+/Snv2Z5vrDdrrKvfvVzVlWHFfIf+cjXbDq9vvXvNHAQ33U6mX3zmz9gZVkYJ2u8//5HbLk8FNwGg62dnz+zqupbWX7wv9OC04LTgtOC0z4M8mHkM7PMqmpgnc7A6rpnm01m4/EytUk86cIFdFd+8HqorrRTVZktlwPr9S6s09mmfNrbYWdFMbd+f2D9fpWeP6N2UjwjVrX4Q15oDnsbkV9gN//U7/f3JxuBZaq7vsAkj5VtBQ/9Z7Z+5oslmrfBZ5bGQSwHnwWffVj47M4Ck09GNTjfa9XT34OxFET9db1eL1U56WP/sL0sOZPqeJYdHnSnCf3xjz+3f/Pf/BV79uyZLRYLe/q0b//sn33GqiqzqspsvR7YN77xr9pmc2rT6dsv+t8/OJZVbejonQs20N9+VfNhm21Zdm4EB+PcVxY1eAD72jqdw/MumkWX5lbNbrdpU7ZlAn4Ej+6wQvd9EHWtqnLLssqyrLQsOzxIbbE4tX/8j3/czG4CoFltRbGxH/7hX7Mf+qF/Yp1O5wVh7kkX0gIAAMvlcpkqzhCDT662GIOwNImJHc4wHQ6HNplM0ooQfMYKkOl0mj43s0RAELYCIy8ATleKeDBD0AsQV4I4+KubklSJnX7xN+3w+WazSSRA3zoB4Do/UdF2sTd6EM/oq2CvEymdBOm4Na78yg4lzzzP7ezszKbTqZ2dnVme542JneYxQK8rHpggsPKmKIr0EE9Wvagv0H+9XttgMEhxd3V1Zcvl0larlS0Wi8YKHXKyLEtbrVaNnON7M0urDMhfzVGNWWLs8I+L5kMzVXdWoHBWL22x8qCu60RY6iPFWyV+zTf1a8jL5TZOq6rmg1Z/pzmN3B0Ohy9ycmmf//xfs/X6xDab/T+SNC7cKO0AW/6fYG3X8aEec2C23fZtsZja//K//LiZmT1+/Ot2evq29ftrOxxPdBun1bbbDV5wa8eqyuwb3/jf2OXlx74NrzUlyyobDne2WPSt16stzyubzbrW75fW6+1a8uFgj73fD+Ou69o+9rG37Cd+4ufNzF5g3gfjtMN711twWnBacFpw2ndJjoHT/sgf+aI9ejSzL37xgdW12W7Xs93O//nZsW9841/9tmzwjW8cdu+89Vb7aRbI+fl79vjxV21/xPfaer3KvrN/p2XW6Yzt3Xd/2N5997NmZvbxj/8Nm0zes6qq7Dd/88dtvT4UxyaTv2UnJ1+3/cJb/umze4HfHdtuzbZbM44k73S69sUv/pitVu2LKvJ8Y3m+k39UfvC/04LTgtOC04LTfiflw8xn+/9/VZZlHSvLvj1/fmbn509efLbXb71ep2ex+JxHL90No3/36d9+223f1uszy/Od9Xr73Qn7f17XVtc7G4+fphyuqq51u720wwU78R5dFO/4bq+32f7fwfzvc//cQ4/tHC9GP36HleYp/+jf7XY2GOx3rdD3Hq/2x9F2u/wfOXth3/0C+kOO8k94Cz4LPgs+s+PhszsLTFpwQXENMBxQlmUDMHXgbNuiish3FEjUUAqGAAWVVAaz2WwSIGtwkixmZg8f7uw/+U/+snU6Hfva116zv/SX/qSZmb333vfb++9/X2OM/KPp8eO37fOf//nUN47Yg/bARqP9WY2Mc7vdpH+kaUV+tzsEz14OhDgY1LbZsJVuZWW5M32ArL14CDlFpF6va71ev0FEg8HAptOJjUYjOz09s263a2W5e7HVcplswsPtvvnNB/ZX/krbH1y11XVbYWkvw+Ha/uP/+L+zBw8yGw7ftH6/b7PZLAHjxcWFrVYrW6/XdnV1lYCScyY1ThAlfK0AZ1mWtghD/nod24zzPE/nZhI7bOUDRJSoiSWAhjiBjAAEnXzotkPIx6w5mVFgZpUKSZ5lWSr0HSZGh6QlOYllCAHypS3ITPXpdruNtgEOtpwCgL1ez8bjcUNvBaDlcplsxbgVRIlf7Aeo7GN474dXXnklPegwz3N7/Phx2kYMaK/X6+RzM7PZbHZjdY22rYQ4n8+THll2WDHCqqJer2dPnjyx7XZ/futsNrPnz58nIO33+yknt9tt8o1/MVFCyHFspbYnxnQrtdkBJwF14pWtttjw8vIy+aLX6zVW18xmswZxg3v9ft+Wy2Wy53K5TGPCvhBlyMvlNk4z48zfMpH9h4nT9nh/an/mz/w9K8tfst1uZ5eXl2mLNytOyBX02mNYbfxdxgo0xjQajdJnOsnZbrf2sz/7R+2f/JPPJNu9995n7eLik/b5z//fbDDYHzdwG6dVVW6/8it/zspykO73R65+u/K93/vM/tP/9O/av//vf95+5EfW9u/8O9+0P/knf8D+8B/+JfuxH/tH9s1vfjP5NcuyBpaenZ2lh3r2er0XtltbVU2/LU7b23S/RT84LTjNLDiN8QanfXfkWDjt9dffs7/wF/6RbTYb+5/+p99vv/ALv/93xF7Pnr1if+2v/ZSZmb3xxhfsB37g5+07+Xfabje2n/u5/4Nttwfu+9t/+3+X+ijLJg/+8i9/3t5779J++qe/YOPxKC16fO+9b9nP//yn7Fd+5Q03gs6NNlT+7J/9f9vHP/52cFpwWnBacNrRyYebz9b26NEz2+0Ox7biU/5GMzvstnn+/LktFotb/0YjhsEns8PfaKenXfuRH9nY8+c/arvdUP7vePjH/PPn/N+xtMnkN63Xa+atmVlRFAn3+Qe4/6d0WQ5sPv9Myqs9ru6LVvv/O/Zf/N9x/OL/jtmL/zOubLlcJXxcryk29awoxmnnz57P3rXFYmmbzZV1u//Q3n33VVuvC9OF/ns8m1qeFzYY7P3GEWRlubPJ5Au23c6Dz4LPgs+OgM/u/I8PiaGD1iRS8GcAWvHDqVplxUmc4Uk/mmwYlGADmBH64ntfrTWrrNvd/wPslVe+Zf/Gv/E/2t/4Gz9ml5dTq+vDljCVZ88e2S//8h+wAy43t+MRdJ3O/sgFrfLtAZ9VAbxnZXlzdTmJd6gIN1fDHYJbH3LbedEvwdlLRMkq7P3RCrtGxdHMbD7PrSz79jJ57bX37cd//B++GP/+AbHTaWlFMUjB3+/3E1DN5/PWM0+JDR0Pfq3rOiWsnj9pZimoSSiSmpUm3MND2gAqYqDT6dhms0n2onLMP0sPvqqSzqwgwK7EZ1tMalybWaqwKwFoxd7/o1h9Qvt6bZY1Vw40if7m+baMi23BrJjBjqwywQ635S0gqP3QV57n6V6IGSA7Pz+38XiczjfN89w6nX1eEIeQtZIdsaLkh/2wT5vdITRIo9PppPOOITrIjnhiwgiZ0p5iD35CiD/+OeJBfjAYWFEUjVUi+A4bY0NiFxLH1lyrK0TY3o5vdLLLObV8zuoVfK74F3K33MZpPIC7rqsPJacd4rS0brdjvV7HdjtWyfSsrvXhrB2r661lWWmDgX+Ia2n7s8z3u1v7/Z4VRWZ53rPT03FjnH/0j37NPvGJtf31v/5DL/TIbLst7Atf+EOWZbuG7p7TzLq22w1v4dvafvRH/6ENhzvrdvd58vrrHXv4sG8PHjywbjdL7ZAHYF1ZlnZ+vrLJpGN/9s++ZW++2bc33ijsZ37mm/boUWUnJw+srmfp2AGz5sNaR6PaxmOzotjbsK7NFovSFotvj9PMzIoiT4W64LTgtOC04LTvphwLp3U6lfX7O+t0dvYDP/BFe/Dgme0LN5Zih7/TXjJi+/Ef/3kbjWbG32m/9mu/154/f2h/8A/+bfs7f+fH7dmzh3fc30l/Ez19+jH79V//39tnPvML1u2ura73u3fRu9frv1jJnVld9+03f/MPWV33bThc2g/+4K9YURR2dfXIvvjF709/p9V1z7bbgdW1Povi9j+1q6pn7703tb/6V3+X/Yk/8dwmk8y63cL+5t88ty9/+bSxSOM2+YN/8Cv2fd/31LrdzD7+8Zn1+8FpZsFpwWnBaccmx8BnnU5lnOKgbWnfvd7+GTx+TMQe95KP/E5uZlll3e7+mX1Zxj+ZR0mnqqpSzO0xrLRu97kVxRObz+eN/1ES53pc2IHjemZW2HZbWJ4PUy5hu/3jOAbW6+U2GAxtNJqk78fj0jabbWMn0QGLuGe/EOPBg8qKYmHLZWHX199r4/HABgP4/zAPyLL+iwUb7BjamVllu93++S/b7Tz4LPgs+OwI+OzOAhNOVsU0cRU0UZif6kD9Z0+3200rDzQhMSyBStAxEPokmLhWdSGoNJnG47n9y//yP7EvfOET9vWv261/vCyXY/vSlz7T+t19lV6vtNde25/9/alPvW8//uO/0SClPB81fH5YpbDfAswfLrr9LssOqz74XWNmPD7sBPME0e0eHiSou8UAeb8aBf/jc93WaHZIYrZAMja/1U9jWcEH4NQ26U+BCtBui39s0fZPYyW5wWCQ7Mj4ARxyQ3MNQMUHjAU9acvbiPcKqkw8Op3Dgx4BLC0qlmWZyPbs7CxVzNGJPnV1hZ+g6coEtZkCro6D+7fbbbKDCsCosYQuGlt+TLTFBIv7fJwpcPuXThDVpzpmHZv3gX8RL6wMYDLCs+UgNfytKwi8XULa5XZOa/fhh5HTzA6Y4WPQ7LD9W/PPYyS5vl6vG3mhev7wDz+z8/Ot/dqvfcreeWdkm03XzDJ7++3P/a/yQadT22c+8yU7PV0mfPue78nsIx8p7M03N4kDGDsT8mfPnqUVPJ1OZn/gD7z/YlXaxH7iJ963+by26+sTOz8/b6z0UZzWvMZW3y6nMVErimFaRRWcFpwWnBac9t2UY+S0j33siX3sY09u/OH7hS98wt5/f92IK9V1305tn/vcP7TJ5Flq9/JyakUxt9/9u/++ffnLH7VOZ92I8243s2639+IfQl179939StvF4tzeemtqb7zxT+3Bg5kNh6V961t9Oz/fmlll19cdOz3d2Hbbs+fPJ/buuz9sVTWw8fjSFouvW1UV9vTpG7f+7djvl/baazP71rfGttn0rNOp7dOf3lm3e8jzd98d2Gw2sH/wDx7bj/7o1srSLMtG9o/+0bmtVm05UNvrr8+s2z2syv29v/ct+5f+pW9at9u1588XtlgEpwWnBacFpx2fHCOf/Xb+jVZV8xdjz6UA1LPdbph2k1RVaavVwLJsbEWRWVleNfAdLBkOty/0NjM7/DPerLCyHFmeF9btZmbWHO/eZjvr9TZpbHt8z60oDrmdZZ20u6vTOfzt1el0XhTb9ovuZ7M3bDQqXW5xhFfz2UWKR7vdyna74LPgs+CzY+CzOwtMOF2V80GPqLNJ2l6vl6qHnG/K57PZLBUbBoNBOkORQGLA8/k89UvQq+MASK3Mk7hqnJ/6qf/BfvmXf8B+9mf/yJ0G+RdJHj1a2H/5X/68DQacd/rRxvZJAnQ+n9tut7OLiwubz+c2n89tsVik7Z51XSe/aIIC8gDJ+fm5nZ6eplUE19fX6R+B+NeTilkTpA6k23yQHv0SY2aWKu3Pnz9P2wK5H6AhmauqsuVymeIG0tDtkUx8Dn+0dhvXK9kpQCnA6y4z9AR8FJj06Cqzw4QEgAFQF4tFshPbdzudjs1mswSOVPqx0Xa7TePu9/t2cnKS7F2WZaqW4zszS9XsLMusKAo7Pz9Pdtkfz7hIpA9IdrvdZBt8hn7cwx/A2INttroCQYXxEC+MA12x09XVlZlZGjs4UxSFnZycJL9ie3zDA/rAH3yC7diqOp1OG3EA9iiBsrKBMQz3h+xbXe+3LF9fXye7Ez+Q8NXVla1Wq7QKAXAH24gb/2DPkLvlNk7rdHoNnDH78HMa+EBcmVnSzU96NL91YnJ5eZlyYDwepzOl2Yr+gz+Y2X/z3/wd+3N/7vfbl7508h3zw8c//tzOzq5Sjs/nPfv61/dHOaDD+fm5TadTGw6HaTLHxJaVQ7pKkPGdnp6m3N7/Q/G3h9PyPDczs0ePHtrrr78enBacFpwWnPZdl/vEaT/1U/+D1bVJjuxPiOBIpP0/qDq2Wi2tLA/5/SM/8tfNzOz6urZ/7V/7yzc4bTKZ2quvvmKDQW5f+tID+/f+vX8l2aSquvb3/t6ftj/xJ96z3/t7r+wv/IXvsT//59+zPK/sr//11+zf/Xd/1b785VP7+3//U+me+fzE/ubf/D+91Ddvvnll/8V/8bfsZ37mf2tf+tID6/fNfu7nntijR+WL4tK79h/9R99rv/iLD8ysY//tf/vmS9vsdMx+5md+0R49unox/sqqqrT5vApOC04LTrPgtGOW+8Rn34m/0er6H78Yf98Gg7ENhxMbDk/t2bPvSzs5zOxFYehNM/sBe/Bgf9zZHmu6CR/gh8POi0PePHw4SDFOfpHbq9XKsuxb1ul07O23axsOhzYajWw6naYjx4uiSH+n7XZbW683tl5f2Xw+t8Gg/8K3lZVlZZPJpFG82u14pv1+Zxh9kmPr9cpWq4X1+wtbr2fBZ8FnwWdHwGcvfQYTQhBgfBzGQPldK2ka8D5xSRp+aiDTF0GiSaHOL8vSiqJoOEwdT3BRpf3c575q//a//f+w3a40+GuvX8fefvsN+xt/4/4Vnz796Wv783/+N5INu91eOtM0y1Zmdnj4mwarBmxV7R/mNx6Pk22pzhOE+B1gIC50FcB0Ok1bGjWwtXCZZVnqU8GbFzGolVcAQicXnU4nVaypvhI3msDoqxVwrlOyoi9dEcBnVHUPE6HD5MgTgP6jUyvu/JPU7LDaRicziB+7Fnt5z/e0wTi1qo69VMe6rtOkRI9FNLP0T11AhnM59/8cnieA7Pf7NhqNGu2Sx/hDVxEwPv3nr+ILkzv84Sd6ZocHPzJ2vQ59iMN+v38jdryoXzVG2eKqE1YFeu8njYHNZpOOKyEGdIUV58FyPUCvYF+WZWNlieaRJ8SQdrmN0zRniclj4DTOyFWiB4eILZ3EbTabRls6adYt/Ez40OOnf/oX7Vd+5eP2V//q7/5Adh4MdvYf/od/x4qisidPJvZf/Vc/amYd++Qnv2k/+ZP/P5tM5kkvHn4JDkynU5vNZmmVDJPUoigafxBUVZUwirzSfPjt5rReb/Iipg7nVAenBacFpwWnfTflfnLaxv3xWlu3+7+O06qqfIEFF/Yf/Afv23//3/9Be++9M2Nl/C/8wrn9g39wYnVt9rM/e27A0M/+7PfaatVN172w6Ev98vt+3//HPve5f2bvvFPan/7Tf9uy7MROT08sy05tvR4kTmvma7Pdn/zJf24/9mNvpX/OkJevv15bluXBaRacFpwWnHaf5Jj4rNPJ7FvfesXM+ul/iTwuA/zq96+sKN77jv2N9uxZblXVt37/mfV6h+PF9B/s+2PG9oszsoxdWPvdQYphVbU/Mp2/q/SZPJp/m83Gvv71jvV6meX5ExuNRun/iGaWdn7l+cD2u5j2fKn/wAd7GA8+0mJLljV9tdttbTjcP6ep0zErioll2S74LPgs+OwI+OylO5h84CogKkDiCBX9ju89aJFM19fXqRDhgZif3KdORQcdtCYmgbbdbi3PZ/bmm/ttX17fbndh3/M9b5gbgh0eqLdrjOcQAAdA2D/7wtL7LNs/44Kk2hNiN31HgB6cxLbHSpLvMFa1wV5nPbtzTx4Ewj5BO/aJT8zte77nW9br9VIllYd37Svth4faoacnNQVOxq5bRtVXXEuy8dA4/WOn0+mkyqgGKPeRPAebHSYRgKP6wE82+AnI81PBREFP/ap96dgYn/4TUwGV3/31ChbopETiQQsBDHe7Xaqq+/gDyH2OMg4+ZzzcrwRyG7mqH6lYQ2hVVaV/CgNY5Kzer2PQl04amRix5ZXJgY5JV6m0kV6bDRSA8zxvTGLQt9vtJj3N9niHf3ye0Q66MkbsyT/D/QRGiwDX19fpAYAah3V9eFAoMcXKje1228gbfK7YFvLB5TZOA8v5Hfmwcxo52cZpZjfPFVYsId7Rg109TC7A7izLbDS6sNdem9mnP11/IE7r97f26U+/Y3le2dnZ0H7wB89suy3tzTffttdf/4qVZWXb7aHAxCqZLMvSykMmRqyQA0vQGx2wL3kHZv52cxrXrdertJIqOC04LTgtOO27KcFpv3VOOz//pn30o2dWVZ+wJ08+YWZmFxcDu7jY9/HOO4fz8t95Z9Lo/803n9h0urJ/+k8/YmYdG4+v7CMf+fqLNh7ZxcUj+/jHv2BvvPFP7eTkq/b8+amdnm5sONwf0/PWW680OO0Tn3jXPvOZtX3xi6/e8O0bb6zsB3/wOu3WhX/qumtlOQhOC04LTgtOu1dyTHy2XxCwtf3zmPb/L2QHTlWVVpY763TW31E+K8tnttt1rCguX+D73laDQW6dTmF1PUn8wj/uKQB1Os1Cx34MtVVVaZtN9eKIrHX6J3SWra3XW1pZrm273dlu17Feb2vX12e2XK7S/xKLonhhw0POgIkUr5pFmsMxcFwDrjFW9GUXUl2XwWfBZ8FnR8RndxaYVqtV2i6nwagVSpylxSb944HrcMaBKOpUfdxutzabzdI/hU5OTmw8HqeCCMFD0OgfKnVdp21+GIrgw+AEFuKTsq5rOzv7mv3xP/7f3Ug6qtB7YD8EJ4FeVVVaMUBCEhB5ntvJyYlNJnvA50FxjAvAA8zRdbVa2Ww2S9+hc1VBAPvPhsNhsk1d12nL6nQ6tZOTk5QUFxe1nZ6eJrIBhOkXu2sAowfJz1myjFGBVX2+2+1SpVPJkkBvW3WiNqcvCEIr3mVZptX6CvAKuPrHK/HG0UokjsYsf4QBOLSnfXiy11UJOgHhGmKfCjLfa5V8MBjYaDRqkKK2RZUeXRmP+lzjsdfrNSYwxJ+COPbVwqT6fLvd2mq1Sg8gHI/HjVUxjJ8twJo/+KkoitQWRUwAX/OT3O319kdckRf4Wm2mhK9Egq35TsdVlmVaOcAf8uACunc6zRUcXrBjURT2yiuvNI7iYlsuNmWiNZvNGtiw2WxSzl5fXzfAnTgpy9KWy2WyidlhRRArY3RFBW2zKkFzN+RuuY3T9vmxX/HFHzjHzmnECXFstp+swys6KeGPrcvLyzs47TftJ3/y731gTtvtattua+t2L+2nf/qridOeP7+d03a7nT179iyN4TZOq+vfeU4j/y8uLmw4/FZwWnBacFpw2nddgtO+PU77Pb/nyzad/j77hV/46Rft71egZ1llVcUqZt53zKy2LKvs85//gv2u3/WO/Wf/2f/R6trsjTe+bv/6v/7/NDOzX/qlz9vf//v/iv3hP/x/t6pa2PX1yzntD/2hr9mbb75q//l//oes0zGrqv1CxW63tqLYY2pwWnAa/gtOa0pw2v2SY+KzPC9tOn37Vj7jOXnb7XeOz6rq0qrqS7Ze3/wbbbt9YPP5R2085m+zwvK8sMGgn+JvtVqnxRwHu1S22axf7Eo6FMMmkwsbjZ5Yt3v4G63XG9rTpx+zzSa3ut7rOxqNXrwOu2fruk7YtPflfhH8druzPZd2E57v7XHIRTOOBmNnUtf2x+NmwWcWfIYNgs8+3Hx2Z4EJ42klE0X8HwR+MkvVSz9DaRKfnwShJi4TcQbnwViDW4MHoTqoCUrfSjxawex0Oo0q5nq9TjoTwEpmCtD6D7LVapWqj4vFIgGXribgjy10Y/w4kDMuPZjptsvlcpkCu9Pp2LNnz1Jgn56epnNRJ5NJGpcGPC98Q7AAjFr1RC++0+2J2AMd+bnb7WyxWDRi5bB1tddYjaDVXq5nXCQHuvG7VpX5RyXVbuzSRuK8J2nNDrvBAJIsy1LlW4kIf2jf2+22UZXWXPFERgycnJw0/mEKKWgc6yRAx6PxCyAQb+iox1tp3OMHbII/GL+evakrULrdbiOHdGswJIWtASBPMEqagC9tszVW/Y0e2+023a92N2sCosYPdtCY5DMtynIOK9+BN7TNGJnEQBTENRi2XC5ttVrZ9fV1Anq1CfmgE0z07XQ6NhwOk4/RTX/XLaq0qZMlb5eQdrmN0zyumwWnBafdzWnL5ahxXXBacFpwWnDad1uC0759Tnv48Bfsj//xX7U8z+0f/+OftG9844ftT//p/6v9/M//Kavrof2pP/VX7C//5f+LXVw8sH5/Y//Wv/WX7OxsZuv1zv7Mn/mntlwuLcvWdnm558fv+Z7/0T760f+vbTZXvyVOGwzetr/4F9+yhw8f2v/8P3/Kfu7nPmP/9X/9d+38/NouL1fBacFpwWnBaf9CSPDZt89nWTa3weB963YLMxvYdju099//pL322qX1+5m9/fbrRoGuLHcy/o2dnHzFut2FdTql5TmFnMrm892Nv9Hq+sr6/f2xdftxD2w+H9mTJ5+1fn+Qjggj7/mHNli93W6s08ms282SHvsiyqEwsB939QJbdrbZrK3bXQSfSawFnwWffZj57M4Ckw9wFKZxlAZU9R5VRh2tQE5lUMkBR69WqwYAq9G0qqaO9AnBT78qjuAzs4bxaYvgBhS5Xo1J9VMdR6ITnGzL80mhlVn68Dr7oFDdeR22sR62mTIGkgigXy6XabU5gEJCadUecKFirzryEwJAP2yFjUgkbKDBqKvdNbY0fjRpSRDaxKaMEyAEFLS6rnYjxjRWEE10jV/u1Xb0Xq1caxzqdtR+v9/IIwAZ8uVzBbM8z208HifQ1wmTxiDERfuqq5KRBz/NOa3q419e2F2r9/hSVz5oLEAyVVWlfzpzn+aUrs6hbd2t5wlJY01XqUBA6KoxRQ4oWTN+zTWNFU/U+JOYZxUFbTMmtqNqbmi8abzoJIPreDaZtzO6qY8Vw3x8hdwtt3Pa/ntWUQWnHdoOTmvnNCbk/f6ez4LTgtOC04LTvtsSnPbtc1pdz63bXViWDezVV/+u5flbVlVft4997G/b/h90X7FPfvL/Za++OrReb2dl+c/t2TP+oVhbv7978Q8DWp9bp1PZZvPtcNqFFcVDOz+/tN/3+96z5fI3bL3eBqdh7eC04LQXEpx2fyX47Nvns91uY2W5NbO17XY963RmttvVdn29tG43s/X62na7MtmQ/rOstG738tv+G23/CJGdleWXraoyK8uumQ1tMDizqipstxvYdruxsqTf2vr9gZl1X1x/2KVSVXv99nYpLctq6/cr63QOO4HwbfBZ8BntB599+PjspTuYVHTw+juATgDo9Rp8tFnXdarOYSy9T52uwEFCY5Q2PTAKTvefNwmrTsGEYfWfZ1mWNcBKE5sgg7gUfJ8/f54cXpbN8yr1vYKl2lW35/pAV+ezooCEpzq7/4NnlQiu3+/b1dVVOieV6in+Gg6HyR+shiM51Lb6h5EGopKxgoBuC+12uwngi6Kw4XDYWHWn46cirj5nLAAf+vC7X5HCT00y3us/P32CKCD4WFFd8IEmuwI91/MwSP4wxBasYFHfE09KhGVZpq2j9KOxgK1UR/1e40vvoT0lSJ3Y1HWd+tU4wC6snNDYz7IsrUy4LXaxX7/fT/+01n9ca5vorStasJ+uREFv9ZuSqJmlP86xvbcTwpZRdOeM0ouLi4a+tINPlbg0132ucD190b9imh+r2lKJ2U+KQl4ut3FaVTGxG9p2++DF+4FVVc/qemCcc12Wmekzh8xyyzIIurS6zqyuO1bX2QscNKtrs+Wyst2ub+v1wLbbgXU6me23/FvitH5/8OIhqc1zfQ9xvT+TmpxA/wP+7Y860En8PrZ61uux3Tu39bpnu902/dHE8T/7GO3a/mHqHD9QWllubbO5TBPY5h9o+2OEOp3aOp39Q1mbfzjV1umYDQa5ZRm4UL+wVZPT6rq2xWKZ+uj3ey9W9u3tvduVVlU7Myut39/Ze+9lVhSLBqeZ2Yvt70Pr9fYPmt1smmegH7C8sm43s/0fOR2bzye2WvVtsynSH2H7PNtfs9s1J2HL5djMDkdEBKcFpwWnBad9tyU47TvDaePxP7fp9Mv23nsde/jwV63T6dg3vpHZG2/8Ysqrd9/97nDa7/k937QvfjE4jbaD04LTgtP+xZDgs+/k32gd63QubLMh1je3/o22Wn0n+Oy5VVVpZh1bLHpWljMrivw78DfavrC4XI5tteoFnwWfBZ8dAZ/dWWDyRlCnefGG46Xi76eIwwB9glbVvlJ8OKPzUKWlADIajRpJoKDA5+r0uq4biakvtpnilG63m5zHZ1pZxei6OqLT2Z+vyL04Ts/E5DO1GW3iNIBAv9dKZFVV6UgiTTYVCGG9XtvFxUUKCr4jUNbrdWqbiiwFPw3mxWKRxkTi5HmeiKOub249pq1+v2/j8diGw2HjmRkKoOpzCl4a/PhH40j782Toi4y0vd1ub/UnIEbFH5t4H2p/bYK/2IKpQM+9HHtBHLM6hXOA0YmC5WGycchLfleQpn18qitpsDu20TjU+CHmVbRqrWSsW8Sr6rDCxZMP+cvqS1bdsAoT/Xy/2J8VFZqzADE25ydkotjA94Cqxpe3IatDsYlui9dc1DiA6PiMfKBN7Q+fE2uTySSNke2wGuO6QoE4Ja9Go5FNJs0HUYfcLm2cxmfz+U/ZfP5/dtff2tK30be/t3a/v+y+F3fdDj2/TcKE8S57fAd6kXG9vB/mCzft0XbvB7PZYZxt7TU/75jZnqvBlOC04LTgtOC077YEp307Epx28/PgtOC04LTgtN9ZCT77diT47ObnwWfBZ8Fnv1N8dmeBSataiE8ijKoJSZCpYBR1pLajBidIyrJMSU97AEBVVemZDhSqcIDfEkblj+Rtq3AiuqvIzBoVUxKEfgCf9XqdHGtmjQqnrwDieMar96EzgYa+WojzlXTGqlVH2qJtv9pC76E/71c+00qv2gw/0J5urVWfopeexYqNOXfWzBqFvTYA0pf6B58Q9GpP7qVoRlzqNRrrjJFYUWDXz73dsTHtQ2CdTiftDlMbaIxic3/WJeDpx64A4/OG9swO2yDxBwCUZftzZzUmsSNbTAFqxqDjhAjV/73e4Yxjfchl20u3E2M3nsnFGDTezczm83lq08e4/q5kked5Ol8XHfv9fppoaGyovvq7ThTUVp1OJz1ATycSahvuIbYAcJ3AaDxoP9vt1haLRWpTz2TleEmeRXN2dvZiZdWBeENul9s4rdMxe/ToL1qWjROn7fNHJ4T71WOdjlmnk73Iq77tV63t2zjELfxTW12zXX5//371Vv6iLT3r+bDq6dAnx9B1bb9KizHsXnzXtW63l1al7f+46Nh+5VonccZ+9d/+e12xw7j252E3OQ19FFcOmKxb23eNFXltnIbdsqxrg0Hfut2e7VcVdmy3275YyX04N1kn4LS5799e2L+T3mMnPj/wSCar3/D1IQ622+Y511nWMVburdcbK0vlvObuY/J2MOjb+fnXgtOC04LTgtN+RyQ4LTiNcQSnBacFpwWnHbMEnwWfMY7gs+Cz4LPj5bOXFpjozIORdzjAo1VLAkvbIFi0Dw1c7ccLRlGn4Pw9kBy2rnK/joHv1LBtgKSOZ0y0wXU6bp9U6INNSFJIQW2h/ZD0tOvtrMGPjTzwtCUXdtD7vH30c72PwNezOhVQfHt8rgSEPTQ+1Oe+Da5rS0aKc4gWDkejUWNygm/2E4emvoxPq/va557cmluJ28ajgj806QAbPlfQb05SDqCiFWwPuD53bk7CmtVtbKQxTlW8OYE5nKeL3dROxLMHYvqjCs5L40/zSc+JRWclDXyihKZ+0PzkWh+zSvwQFERK25rbWhxFf9XR7LDKgPY9iPO5+l4nJhrHmsdqT67jAc1mls6T5SGNrBoA7CeTSWsshrTLbZyWZWbj8d9KWKsrlPwET/OF40zBEOJRfUxs0k6e5zYcDlNbxJ/GuHIaMaDtsXMW8iev2jhNvyMHiXEdFztiN5uNzefzG7xkZrdyGqtdbuM0JnLYDCxEP10p81vhND8PUc5UnlY8JAbaOI1FIKvVKi3m8GNBt16vl1aKlWVzAs7P4LTgNHwSnBac9tshwWnBacFpwWm0EZwWnHbMEnwWfBZ8FnxGG8Fnx8tnH6jAhGiyoLASAIMmeAAihIouAadt6ueANgnBNdouutEf96rBuMfssA2xLMtGewoeOpZO51DkUXv4BMrz/AYgqK4axPSzWq3S+axqCyUtxlSWh2dCDYfDBPZa2dQkbSMoTyoeNLCJ+guCpB2zQwJ2u91U8QSU9HMlFcaI/3XywM4o4kkJREEXcCBZEEDCrHl+qiexPM8bcesTv9PpJMLiGh2HjkH920bUJCXx03avipIRMUglX9skJpTI8IH6n74UlDnm0BMkPsW2SJZlaTsn4IStiWntS4Gd+z3Iqz+VYJgEaOwoFnQ6neQ/xqN2ZRya7+CArgLCfsQQtq6qKm2VhWQ589TniMcUxqgrDABq7vXEoRNZ9fl8Pk8TJ+Kfc4Nfe+01e/jwoZ2entpoNEr2AEfQOeTlEpwWnBacFpwWnBacdl8kOC04LTgtOC04LTjtPkjwWfBZ8FnwWfDZ8fPZSwtMBLs6J8sOu5IwhF8VoC8cipPVkYAcYNTp7M8IHI1GKXlXq1Uylm5p1KSiD6q1CnSbzaZR+VegB8zQC4cBlhjeJx+VToJNK5+6soKgQkdWO4zH46QDeuk5kRr8VOrX63XagscD63zAoiN26Xb3qxEgiaqqbDabpXGt12tbLpfppyYgJEvAoi9+1HNFPZgBCFot16TerygoG0mj4Ig9aYfg94DlyZQxazVa71Mfcy2fkbDr9drW67Vtt9sEsroKRAlW+0UXHuYGsat9GB/2YOx6DCG5pvnEz7o+bPGF5CB9VmrgZ/Rkeyb9L5eHhzSenZ2l2CjL0mazWcMHmis8iK4sy8ZZnZoLehYoZMR3fnJDjPX7fSuKokFkjL/f79vJyUnyJ6C2Xq/TM8E0FrgPsAUn8jxvPE9stVqldrgXHCIXdZKqhOyJUVczcL/mCO3zEEVdvaLAzkoBfoIrWZbZfD5PukIey+UyPbBPJ+Eht0twWnBacFpwGjoGpwWnHbsEpwWnBacFp6FjcFpw2jFL8FnwWfBZ8Bk6Bp8dL5/dWWAiuRGtDqrgNIJPE0AriRrkJKACIiBDAGkb3sBamcNYBKueiVlVVUp8JZbbxqvO1Aq2rhbodA7b63Aw46JPPlO7cS0Ao/bRSqr+JFGUkAhgDXJtSwGMJAJ80F+TrSiKdK8SE6IAyfhIBvTzQKqEr6KEoHby9mqbSLAFWclNJx9tVWVtH/DTSYbXD90VtAEyjT3Vl4Ts9XopUbEROaRgTaytVqsGuPHS7Zwe7PmMFRKaWwA/sUF8eDuQawCptq9xwjgBOQAJ36jQJu1rvz4u0dGv5uDFOFgFMRqN0jVMSAB9Pxb1ofoZOyGbzcY2m02yE3m+WCwaee/zwMefxoX6kft15ZLGN3nDdbrKClsTQ4vFopE3+AFMUNIPuVuC04LTuD44LTgtOC047dglOC04jeuD04LTgtOC045Zgs+Cz7g++Cz4LPjsePnspQUmBWdVVAGZJNOAxxhtwcG9GlA4AcDjHiUWBqQrBNheSBBwZmBRFI2EyPM8jUEr2AQ37VFBprKL8TXx9Cf3oTufqV0UpPmcoGTsnjwIYO1XP9/tdgkoqMjquFhlgB/YCrfdblP1EV2m02kCUrbtlWVpV1dXqb1e7/BAtbreV5W1P616KpFwPS++V0LT92aHLc2Mn5UQw+EwVZwJdK0EDwaD1I4HS2yGr9rOzeVakk/BXImRlQ20D1gNh0N7+PChdbv7B/Utl8u0CgY/MS5iTe3HuObzedJfiUxtmmVZImg+J47MLLVNDmhVnNURTH5YWQBY0sZisbDFYtHYCqk5hZRl2TjjV6vbOgnUFTd+pYoSOw8oZAWAnvdJjne73bTqgXGS4+ivKzV0+6kHS+y93W5tNpul9nRyqPimpAFIa260xQu5DbHQB/kPpmXZfpXD9fV1Q3euybKsgSOsjlD/htwuwWnBacFph8+D04LTgtOOW4LTgtOC0w6fB6cFpwWnHa8EnwWfBZ8dPg8+Cz47Vj67s8CkiYojcDID0y1hvjM1lpIGA6CNsixTO71erxHI0+k03VuWpV1eXqZtW5psZoezAZfLpS0Wi1SJJLgJJH0VRZEcRmURIRjQl3EqCGmg4Sh1OjZjXNil0+m0khmBxHXorcREYpHMEIf2XVVVaosgV1BFh263mwB0MpnYcDhM1WxdYZBlWWO77mg0Sj7h+2738EBF+lEi9omIvrTDe2yhJINtuF8Tn/ZJIlY40Adkq6Sqkwu1hcY+YMwEgYTVrahme1LVbdNUqdm2iI12u10DKDR+q6pqbI3W8eNPJjoah+jKihE+YzsrtlYQYusqdp3P52l75XA4bNiQz9leDagsFotGTo9GIzPbgz4rHLC9PjCO+/3E4Pr6OsUoKwcAe7XZ9fW1XV9f22w2s8vLy2QTf2YyEyEFRbUZcYaPyPW2Cave5/OHvvzkju91zDqxpS0lQkSxEeKDiBTvaFcnKiF3S3BacFpwWnBacFpw2n2R4LTgtOC04LTgtOC0+yDBZ8FnwWfBZ8Fnx89nL93B5MHJDwyh0phl+21ymsyqPKJJZ3Z4WFqW7Stpi8UiVWfV+OPxOBmIcwY1iGl7vV6n5MdRVI9VD3RQYPYBod8pOSkwUeVTh9GuVm3Rl8DCxpqQJDUJjJ5KeAATunF+qp4tSf8KvEpOOqb1em2r1SolqfeXni3riQjwAwyIgeVymZJXwRXdCXBdxbDdbhsPP6vrOgEodlMbQj5KZJok+JCJiq7WINHxUZ7njeqz+t/sQDS6agMdIEeq7upbhJUPXOvzS/2jomDBxAUbcrYmPkdHgJ4x6PmcjFFjUHOVGNK8ArSwW1EUSV98722K/TW2tSKu+cA5q5o7SurY7OrqylarVbIjYynL/QoM7iHufZVdc6+qDufF0p/aWidYGvt1XTfAW3NLf1ff13WdbMQERyfIxBRt6HZZneQprkKeGq8hd0twWnAaEpwWnBacFpx27BKcFpyGBKcFpwWnBacdswSfBZ8hwWfBZ8Fnx8tnLy0wkRQKYN4hZoeH5flKmiaWBw8dKFVNs/2KgOVymYCeCinBBhipAwlmBT7GgMMJPgV2DI7x0AWwxfj0oT+53wMY7SEknYIIiadO1DYVAHWM2pcnI5KnKIr0ML+qqhK5aaW9jaSWy2XSlf4hCb8dT32Hj/I8t+l0almWpZUhVHO5T8EDn2pyYm+tmmo1X9tCd9pT//p28Y3GscZMGxlzjcYuBNXpdJKOrADgPgU63foJ0OtWaPrxwKq5pdV4vyIFYNIKfb/fb2zvZLKjEzRigfHjc3RGb41nyIgtn9h+OBymuFGCVmCnss/WXfyPD9VmOoEjRonb2WyW+lCyKMsyxYlfDcBWdJ0o6cRJY1HtqDEGOai/NM91FYj2o/Gk5MtPXZ2ikyns4SdWGtt+lUXIyyU4LTgtOC04LTgtOO2+SHBacFpwWnBacFpw2n2Q4LPgs+Cz4LPgs+Pns5cekacBwGdqBJ94VPBRhnMLCTraUaBBUe4BdBT09KX6aXJoQHa73dQ2lV1PMp3O4QxGDNfpHB5sV5al9Xq9tJ2NraEEP7qo3gi6s8KC+wgAdAJQ0RV9eHl9uU+3XnIfYJNl+/NNCSyuo8pN4vsgUv3NLAEZ9hyNRgksuRew0YAGLFhVQFJtNptkJ40HtT9Bjq06nU56WGCv10sVZ006wG21WjUICL102yY+px3GRsKpvfAPoLbZbKzX66WzddGJpGT8/pxQ9AXkdcLAfcS/B3pWCKAfwhjYqkueYIuzs7M0ySIvdUw6Vm8zgN7MGuQHCO92uxuVdM7GxRasquB68o/VJfyODbArRKb5oEDvK/9qdyVl/UwnEIoz/FRdsKOfrKGDEgN22W63aZs8Y9O40vu13zzPk63xBe3rdmd8gN/4HTzQOAq5W4LTgtOC04LTgtOC0+6LBKcFpwWnBacFpwWn3QcJPgs+Cz4LPgs+O34+u7PApNsWcYY6XRXEWLo1VJO/qqpGoHKfBjXGxandbte2222qBBJAOiiM7wfr2+OBWuv1OlXgCGytHtJ+XdcJYOq6brShwQYxkESMjYCjusi1JB998L0KenONJqZWkv3ng8EgJSpJRjsQRKfTsel0mvQhMM2sAeLoDHB3u4dtkYCJEkMb2QGy+KfbPZxJCukoWNP/eDxOVXbiTFekaJLzO6RM8gBcWbavoLONEwIxO2yPRgAqJXpInRhnNYBuEbxtAlGWZaqcK9FhV+7Dvup3/JjnuZ2cnJjZHizm83kD/BRsdSLx/Pnzho80PxRsdNIC+Oj3Gjf9fj/ZBmBnMqPbknXFjOZGXddpm+lisbiVaIl51U+3p9K2mSV9NXaJe8aguKOTQWyDjpqHXneNx06nkya0kBy6ZVlmw+EwTQZ4OKROjBiz4icTMPKj291vnR4OhzYajRp4ppPr6XTa2LIacrcEpwWnBacFpwWnBafdFwlOC04LTgtOC04LTrsPEnwWfBZ8FnwWfHb8fHZngcmLB281llbxfDVOyUKJAmcpQNMWBtaKMcbX66nMatIAqNyjFU4lrbo+nE/Y9tIA6ff7jcoy46VPHaf/XZ2s4Mx3OkaCQEXBLcuyBqEQMAAD9+q2XeykwEkCKbmpPxV4NBlJKIIUkEUfjQEFF8aAP6iUamVWSVfjRH+22cQnL/erPnotPtDk0XjTVSf4mfG0jasthom59Xqd7sFO/h7VDX3xKS9i3+vgSYZ2dQsu9qB9Xd3T5mPVTWOIMa1WK5vP58lW6/Xarq6uEsASX8Sqjh9Q5GF9+ILv8Tt+BDN2u8OZs+hF/4yFXMYumsee3BTHAFiAnjGDLQA9bZAzxAeTFnJrNBqlSSIPJtXJsPpb+1Jb4Yt+v2/D4bCBs8RBXdeJCIbDoYX81iU4LTgtOC04LTgtOO2+SHBacFpwWnBacFpw2n2Q4LPgs+Cz4LPgs+PjszsLTCivCYGSKKWG1+u4VsFV26AdTShPAhrUmpAafAzYJ5r2rdV8r7dWBBVk9TOEqmWv10uEoyTTBvSaiICZbnFkG6xep2Px9tfvNWgJZqqft9kEgkHPXq/XADiq00VRNEAYYMeenDuJz9ALu3KNEg4Ajz9UPxJmu902zr4lRjzpm1mDfLrdbuOhd6wSUSLQWPFASVwALKwyIKkUZAEkBU/ap39e2A9fqWArfKMrdTw4q//wA3lI24PBoKELfed5nrYZdzqdtJUWXypRqy93u11azVDX+4r5crm0q6srm81mDXCcz+e2Wq2srusU39iQXFoulzafz9MWTPW92eFcUAVpQJAVHsQN8aTX+mo6tgI41Q8aU0rGntjoY7fbJf+QJ9wDTg4GAzs5ObEHDx40cEQnF7q1VIkFUsGv2AK88DEDiQ+HQxsOh+ns45C7JTgtOI1+gtOC04LTgtOOXYLTgtPoJzgtOC04LTjtmCX4LPiMfoLPgs+Cz46Xz+4sMGkgAkwYrQ1IGJhXHtlut8nYDITrNbEJuN1u1zizks9IeAUggov7qY5XVZXAlK2My+Wy4XTdFsj5nZxBiR04m9XMGsFflqXleZ50wsH0y1mV6E9QDYdD63QO2zDNLI2BgKBqi920+k8SlOV+G6FuOcX2vK+qKp25OBgMbDgcNgIJe9d13djeSMV3uVzabDZrVPkJbHTBL4AtgIdO6K3bSDVWsJ0nNNrV2NDVHzquk5OT1kT1RIluJCs+ACix7Wq1sqIo0gQBfy4WiwbAKGiwQoK2VW/1t1/BooRFPCqoY7t+v5/6Y5LCfcRRlmUJANjuOB6Pk++2260tFotErGofrapXVdU44/P58+c2m83s+vraFouFmVmaFPR6vZQ7jBVAJf+wGwDF5A4wns/njUmMzxklaI1tjRWNHfCAHPQYw++KG4wdnbU9xR106HQ6NplM7PT01IqisKIoUiyYWfKr5qgCusYleaeTgt1uv0Wez3q9np2dnTW2sIZ8cAlOC04LTgtOC04LTrsvEpwWnBacFpwWnBacdh8k+Cz4LPgs+Cz47Pj57M4CEwoR6N7QCJ35Tkk6vV6dS9v+Pv2dKh5CABL8ClIYAuDR/n0VWMeiBqZ6amYNQvNj4R5NVgUrrVB7ewIaej+OBLQQCEttooHD2LUtHRP3suVtMpmkqrLai2s9+WIzbKFkwneaBGprksjHSLfbbbTDS5O+LcYAQa24k0gKgtqmVovRAfDRxG3zp7ePVr81bqks73a7RNC+yq2TCn56UWLAP3o/JEds63cQN9cVRdE4k5PvmATw8Eiq4PgCouK1WCwScUEOEAQ2I6eULMAMbE0co4MCnuYm+KBjVzLSuMYP6msfU2pHbKYY5PvSGGmbyGrf9M/Y6UNXyfR6vaQLNm2LO2ygsY4uxHiv17OiKGw8HttoNLLJZJImQoozIbdLcFpwWnBacFpwWlOC045XgtOC04LTgtOC05oSnHacEnwWfBZ8FnwWfNaUY+SzOwtMtwGcGplB6udqhDawZ2C+Lw/4fK4OBPRpm+Tw1WZtC6NqQLc5EVLR6zQIuUcTGD24HgcpmfkqKMmmzua8S+xAgHCWotpUq/f6mZ4FyefIaDSy6XRqDx48SBXftjH6aj7JByhou3ynQapj0CTiPk1wTw4aS3znSXA8HjeA3oOd2pkk0K3KGjcaX22kpLGkdjJrnhULcbPSQG2iY/AEotfoShS2kiKMVcertkYn9AX0FdzZMlyWpV1dXdl8Pk8+9ROQ1Wplq9XKZrNZeogeYK6TJcZEPpAb9MXvfkUHn2k8cQ2TDU/89EWFXXMWG+jWT801BXK+R9R+bfijsas5oxMSYg3SVKBXUsOuOsH0BKkEoYQPyI9GIxuPx+mlpB3ycglOC04LTgtOC04LTrsvEpwWnBacFpwWnBacdh8k+Cz4LPgs+Cz47Pj57M4C0+PHj20+n6ftoiiz2+1S52pMFMMwHnQ04AEPTXLaHAwGjYq/r/RxHdtF6WM2myXDZFnW2IIIWahhCcqq2m+f2263aRtrp7M/M1KTmSTqdrtpGys2UdBSUiShGQfbIbfbrc3n8wQQu93OZrOZme1B5Pz83EajkZ2dnd0ADmQymSS76RbUbrd7A2SKorDz83N77bXXbDQapaS+vr62q6srW61WycdauSTAiqJobBnkfqrMBK1PavU9IAZoa+LQNnbT82HLskwrLFiFgM08qan9PRDynvggDnVLta5OWa1WDR0Z13A4bJAI2zlJdk1SQJB4NNtvvVwul43qr5Lcbrez6+vr1Hee52mlhB8jKx2II81BgHS9XiewKcsygTerA66urmy9XicyXK/XtlqtbLFYNHyntiG2yVGdqPV6vWQbjVtsrXGDXviXFS8aY0qwGlcIMecBViddkLgSkE7YiF/Fg36/38g32vBjgrS4ZzqdprFeX18nQuWsWF4ei/Q7zkDlQXrj8TitDOn3+8kf2MQTYEi7BKcFpwWnBacFp1kal94bnHZ8EpwWnBacFpwWnGZpXHpvcNpxSfBZ8FnwWfBZ8Jmlcem9x8RndxaYPv3pT9u3vvUte/LkiV1cXDSMjGG06ofBFGBUeQ/YbUmpDtMqGv2pcXyS0ybE4b/Tnz5ocJiODYerzrwAdnU6dmBrYL/fbwR1p9NpnLdJUGlFnjFRldZxEmwanAjB5oGMsbHt8NmzZ40HpVGFJLiKokhjpm8AToOchAUw1GbYEN8p0PkKqcYH4xoMBikGqmpfgUWwFy/vrzZg90mEbUgOBSnu8clDv7vdrrF9uNPpNCY8fjKiYA7I0p6f/NAGW0GR3W6XEl9jRCdCuqWWvnjpaoCyLBOoM7G5urpKMepzED297fRMV+yv8UL7fpLCeyUe8lWxQO2t8Y9vVRRH9HcmKaenp8lH3h46rizLGttiyQNPZBrnmhO73S7lEj7Al0r43q6MVeOJrcaTycSm06lNp1MbjUbpwYlMmHRbccjLJTgtOC04rdlvcFpwWnDa8UpwWnBacFqz3+C04LTgtOOU4LPgs+CzZr/BZ8Fnx8hndxaYvvd7vzdVrwASHKMgqeDjpQ1YNak8WDNwDKukocGk33nRRNfrte82sPT6eCLS9qmSksj0qasYeAiZgiTCeLieKj56ETTYgmvVFm1ESRsK1lRkZ7NZg0joE7DgfE5tC3BgCyPVX+738aATACV9/9J+uIYVDFRLSXQlRHTSGMGe6qPbYg4foZ/3C2PR9wAE+nCuKqCn41Z9FCg0bohxrcx7m6u+ntzUXmVZNgBXYxp9l8tlWk0AAOO/9XqdVrR4QvT5ojalT622MwHgPFgIUdvVcWieKuABwsSX2kN97HXSGMiyLFXgJ5PJDXt4EsF2ZVkm3yE+13xsMS5yBLIwa57drPHh8cXM0sqdwWBgZ2dnaTvq2dmZTSYTG4/HjRU16AvRhLxcgtOC04LTgtOwo9o0OC047RglOC04LTgtOA07qk2D04LTjk2Cz4LPgs+Cz7Cj2jT47Lj47M4C0x/7Y3/Mrq+v7eLiwn7913/dfvVXf9Xeeecde//995MjO52OnZ+fJ8NQSQM82oJZq4sYUkFMP9NKvkpd12lr3m0Eowk7GAwaZIFeCkgEKOKrodoPZ5cSnAqyZVnaaDRqOJuVAZqMVFBxHDoRhAoqBAFb4sz2WxGzbL9yYTqd2muvvWbj8djyPE+kXJb7yulisbD5fG4XFxcpGAlq9FaQpVIMwV9fX9uzZ88aDwvTCrYGPEGb53k6d7XT6TS22LGKQO1ZVVWq+PJQNx72ho60obFCLOAjJVv8wRmqnU4nAW5ZHqrqmvjYDftyD9/h99VqlWzIZ2aHbc39fj+da8q9WkkHBPA1NlQSIB70rEt0opJMv0pqukoC8AWMiHN8BeHjBwT/aq5g9/F4nD7HT/pTx6Q2VJLWPuq6tsVikQiG1SlsD9UYVX0gGtpkwnJycmJnZ2dpPE+ePLHlcmmr1SqtJKjr/eoZxSwIE1sx/sFgkM54VQzpdrsNmzI5A78UJ/AdPzVf8jy3hw8f2nQ6tdPTU/vIRz5i4/HYiqJIW3jZ9k7MsPWV/A95uQSnBacFpwWnBacFp90XCU4LTgtOC04LTgtOuw8SfBZ8FnwWfBZ8dvx8dmeBqa5rG41GlmWZffSjH7WnT58mI89mM1utVmmblG5BJBl0Sx/OZtAEAWCoQIkBATd9iJwabDQaJQd5AvEPn6Jd9KQdTTB01QQhGRVYVN+6PjzUDb2qar81dD6fW7/fT0FPHzgZW5KUtA9oaTCR9CQMNiQwAdXJZJKCEqKYzWapyrlYLBJA0Q7bUwE47UtJigTBHiSXbqHEDvgDHwN8fosjviFmsBOJQ3KprbVd9Yf6WAmIhON6tucCTAqQ6OMnD5CLbkHWa/FrlmVJZ+LAx5bqpJMi4o148nnDi1jp9Xopx2iX99jZ55xW6vG/TqZ03Eqc+I54zPM8rfJYrVY2n88bY1XwVTsp0SmpQYr4nv4BfB2b2lpxZDKZNCYS8/ncrq6ubLFY2JMnTxLZ6dZ3Yt7jj+YaP/3kRPFI45PrtT2whsmp+qbT6VhRFDaZTBJBKcCrvXQVBhNKYj7k5RKcFpwWnGbJD8FpwWnBacctwWnBacFplvwQnBacFpx2vBJ8FnwWfGbJD8FnwWfHymd3frtYLFIl7ezszD72sY/ZcDi009NTm81mdnl5ac+fP0/b4DTJdeAKbBqsGlxtwcE1CrC0gaPU4JpM6gAMr8CCI30S+UDVqjlOVxDzffE7Ky200sm9JDF6aKKZWbIjwKLEp8Hlk3ixWCSgvri4SCTMFtPbEpLx0aaOQ9sHZLEl9qESrePk3rbVHm3+R2/uJSl3u10jwRRUNaHqum4Eu0/CzWaTrudhcv7sWF4KlrfFIXYg5vh9u92fL8rnWjlXG7TFvlb6/XWeLMgFiNHbli2NjEftqfHjX5oXtKPkrCS62+23ffLabrcpTxQDeN8GlOpX/bzNVupXxkTuANoKhuDPcrlsbLPXXPR9qV4K9LrCCJ10nOSP11mvIW/4jtU2vV7PHj16ZI8ePbLpdJoma/hOCUTjSfNA7RpyuwSnBacFp7XHYXBacFpw2vFJcFpwWnBaexwGpwWnBacdlwSfBZ8Fn7XHYfBZ8Nkx8dmdBab333/fhsNh2vr1Qz/0Q1aWZarAvv322/bVr37VfuM3fsPef/99m8/nDUehmE8SM0ug5Q2C0f09CooIIKCGbXvvgcYnNoClwK4O8qQA8KlOWn03O5w/qpVQbEMbVJP1fiUxKo4kLVVFSIQq8mazsaurK/vKV76SVg/M5/PUFqsZVqtVOg9Vg40tghpY6E/1GCD19gQ4kbquU9Veid6vhPCVeO9vXU2hvoMQ+A6g63Q6CWS0Gk9isx1RyamqqmRXXWUCmDFuBQetqmt1nSrxbDaz6+vrVMVeLpcNgofEISXGpuTDwwXbwJKtr4zDxxS2GAwGlud5spWuZMHW+NznpY5NfTMYDNL3bH9mjDrJ0MkYPvPEiy15aQ5qfPpJkPpjtVo1YosJh5dOZ1+px266vRc/+7Y1H8gpxRpvN10Zo23pgyLLskzxkmWZTSaTtHrgs5/9bAL4uq7Tdt/1et3YoqqTG8hWYyXkbglOC04LTgtOC04LTrsvEpwWnBacFpwWnBacdh8k+Cz4LPgs+Cz47Pj57M4C03w+T9WuPM/T2acEOyC+2+1sNBrZxcWFPX36NFWuAUOSSA2FAxEPHgQN1UmcTWWewWkw6uf0w4tEXK1WNhwOU8By9if66XbBqqoaeum5qEpgJBOJRFJDREo6XAfIaGAroeBA2seGCpRKjOjnt3dWVZUqqWxfzfM86UUwkYyqgwKUB12CWsdMf3od1xLctKk/FUQ8WapOZpYAnL4J/iw7VJKVkLEv/QMs2ICfGjfan+pOnEDOjJPYIV7Z2gsJK8ASO/iEfok37K320+q/B3/1F+PX2Fdy1fFxDW0QX9Pp1PI8T/7wNiRm5vN52vYMcKo/IEtEJw+MjzHo+NpESUH1ASN4EX/YWLcra7xofKrO3O8njD62lYjQTVcvEAd+pQq+NNtPNqfTqT148MAePnxoDx8+TMSiq39UX48jTCzI65CXS3BacFpwWnBacFpw2n2R4LTgtOC04LTgtOC0+yDBZ8FnwWfBZ8Fnx89nL93BRKfT6bRxFqKZ2enpafr91VdftcvLS3vnnXfs+vraZrOZPXv2zGazmVVVlc5L1eCj+qaDUOMDQmaH4PRJgJPbAl3b4zsCiz4JyH6/b5PJJBkRAzPWsizTNs+yLBtb/xaLRQP4eM/vbaIBmmX76nVRFIm4AHIfyFptVcEufM/9jB19uZZg5cxRPtPkhPjQV+2mIKNg7xNQgQkdFZzV91pp9+Cmqy0AeyUYPy5NQm9vTRiInd91XBqvvV4v2cmfzwqwbDb7hxJy1qpub1TgZVwK7tqm2sYTqa6c0LZ8DrHiR22hMekJgZhHD1aAqH0V8K6vr9OD9fCZgqfep6RJm5642kTzln5VFIt8jitAc6/XC3tqPOhPBX1tw9ucuNXJHTFR17UNh0PL89zOz8/tlVdeseFwaEVRpPfD4TBNImmDGCdniH9ye7vdJrJVG4XcLcFpwWnBacFpPpaD04LTjlWC04LTgtOC03wsB6cFpx2jBJ8FnwWfBZ/5WA4+Oz4+u7PA9O6779p8PrfpdGqPHj2y8/Pz9GC2brebtlo9fPjQlsulzWYz++Y3v2nPnz+3J0+e2Ne+9jV76623UkCw3YuKJECvIKlO06oh1xCACpAYXQNBg07bwWEE53a7tcFgYIPBwM7Ozmw4HKZ7dfXDcrm0i4uLxnbC5XJpVVXZbDZrBAeVZnTzQU+SYguuxel5nt84uxGnAzY6Vg+ikCTXadJnWdYAVKqQ3EciojNBxDbZNv09GNC2Xy2CjtqG/kQv9Y+CLw9jW61WDX8qWJHkrErwL3RScNZ7FER1hQD6KHFip8VikSYlV1dXjZUvCpaMhcqzxqLXo22yAADTlvpKK9boqNuBtaqugOhjdLlcpi2g6qtOp9Mgptls1iCz2yZrPhdvA1E/Xv+iLSVrJmmAn1bT/bjagFBjFT31/FuwRu2mWKKTntVqlVYq6YqYbrdro9HIhsOhffzjH7dPfepTdnp6asPh0CaTidX1/rzgZ8+epdz2sYGfmHjx0Mxnz54lolL7hdwuwWnBacFpwWnBacFp90WC04LTgtOC04LTgtPugwSfBZ8FnwWfBZ8dP5/dWWB6+vRpY0XAcrm0k5OTdGYfW9p4MFS3u9/6t16v7fnz5/bgwQPL89zeffdde/r0aTIUQUP1TKuB3vEK7BiVgeV5ntrTgWpgYTCSAOObHaqtrCTQYGIsgDZjpvK+Wq3S6+LiIgV+Ve2rsbrqQoFT9WM8WoXEcQC9f2Ez7leSVIAl4QkWrUBr/9hD7UKbbYlKGxr0flwKrGoHfuc6fwapjpHA7na7VhRFqr4CLN1ut3WLI/20+buu60Rs+BGfkeQAMVtS8ZHaAv3Zjnh5edkgSyZDfgKDLvgJH+m2ZAVWfKDx40lEwVf9y3UKlm1ATMwh+EInKoiSHfHkibRNtCqucaV+8kCMLf34+U4nCVznbefH6z9Tn6C7xoq3qY5ZyZgt/MTQaDRKuDiZTOzx48f2+PFj+9znPmfj8Tj55urqqhHz5LaOg5glTtfrdcJizp72JBtyuwSnBad5ewanBacFpwWnHasEpwWneXsGpwWnBacFpx2jBJ8Fn3l7Bp8FnwWfHR+fvfQZTPP53LIss2fPntlbb71leZ7bZDKxoijs0aNH9vjxY/vkJz+ZKvDd7n6b1mw2s9dff91ee+01e/r0qT19+tS+9rWvpTbff/99WywWCVCp4NGGGlLBviybD/RSg7Q5Uj/jGs6gROgnyzJbrVbJ8VT19bxVzogsiiLpuFwu7dmzZzafz225XCanECi+6m12eOgXIMzKBB7+poGmW9dUXwKPdofDYXowH8FPEGVZlkDOJ72uxsCuWsWkUss1mtQ+QZVgCF6uHwwGNh6Pk90AKciN8elZrZ1Ox1arVSJlbKOkoUCAfiQmL8ZDfySY6s/ZlcTXcDhMY+RavtOq72q1aujAA/bMDuBoZg3wVb2wF7orsaiNPZiqj+iL1RoedNVWeu1ut7ux6gJfsO0SnbyNGY/qo7lGDKofuNc/zI+t1Zoj2o7qRpusHlBg5BoV/KaiK2SYqBIzuqJF7aIrYdRHOp5+v2+PHj2y8Xhs4/HYzs7O7PT01E5OTlI80fbV1VUDGzTfV6tVeqjpfD5PejFJ0dgJ+eASnBacFpwWnBacFpx2XyQ4LTgtOC04LTgtOO0+SPBZ8FnwWfBZ8Nnx89mdBSYFGM40JQGGw6HNZjPb7Xb24MEDm0wmNhwO7fT01Hq9np2cnNh4PLbz83O7vr6258+f28nJiT19+tSePHliy+XSrq6uUhUT8CGocAxBp8BHtU8riL4N7vW/q0H5DDBiPAQ3RqXirVtrB4NBo7o9Go3Sw8fYtnbbQ7MAec6RxAacF6uBV9d1AhXVDbuos0k8EkjHDYhhS2zD9yQuW/8AKIJb7Uxwa4Lp50oCkGqW7Su+jM0ThfpDE4nAxw9MKJRwNOn0fnywXC4blX5eej06aHLrKhBNaiq6CtAIBMd92NHbSolHY1v9oTopoWly6zj0c29PD06aOzpetQUgyvV39evBSicLiiMaNzo+PxnyYN/mKyU1jQW9h/dt4I8/deyQh18pwQoQ78csy9JKJM5TfvXVV20ymSSgL4rC8jxPky7GO5/PUz4yQeaa6+vrtP0fnCReNBZ9PITcLcFpwWnBacFpXB+cFpx27BKcFpwWnBacxvXBacFpxyzBZ8FnwWfBZ1wffHa8fHZngUmTXhMeQFsul/bkyRN7++237bXXXrPHjx/b933f99nZ2Zn1+3179dVX7WMf+1hS/nOf+5w9efLEvvnNb9ov/dIv2Ze+9CV7/vy5XV9fp9UCKN7pdNJWRQIL45Zl2TCMXofB9ezHtm2DGhxKZtvttlE93Ww2KQj0jNOzs7M0LjNLD9Lqdrs2GAxsu93abDZLDywEzKmIP3/+3J49e2abzcY2m41l2b5SSVWX6j6OVVBQsOaasixtuVw2xso4sQ028VVbbK7bQ/v9fmNFhD/fkXMouR7/YHvIjHNLsT0kRAVXz5sF0GezWVo9ABD0+33L8zydXUv/Cijoh00413O5XDa2uKp48vdgqDGkMQXg13Vto9EoVbZns1kCN9rRuNMKsE9SrewrOEMW6ie+U9ApiiJdQ7ypL3UlRafTSefv0h460JeuKsGfgCC2wzbedrTBWOmPFUPr9frGCgydxCiQ6ucab4oDbBdVeyoo64oD9CDPlGiYpOkYuU5XGRDngPsrr7xip6en9vDhQ5tMJik+e71ewpbLy8uGfsQlEznOWWYFAbG7XC5v4JaPYR/XIe0SnBacFpwWnBacFpx2XyQ4LTgtOC04LTgtOO0+SPBZ8FnwWfBZ8Nnx89mdBSatpmrVDhDCYe+9914KktFoZBcXFzYcDu3k5MQ++tGP2snJifV6vXQu4Ouvv26TycQ+8YlP2FtvvWVf/epX7Wtf+1pKbJKWQbTpVdd1SjZ1MMGlWyQBFYJEjeX7on+AgofUmdkN5+HsXq+XqoVFUSRCzLIsbVnTCjSOW6/XlmVZqkqbWUpErUZrwkAIJKImgtqLa5XglEQ1SQn+oijSVlCtmgKSSg74od/vpzErACrQcD1Jvtvtkl2xEz7bbrcpuPEtY8R+TAq0Asy41F7z+TwlDSsatD3Vzdun1+sl/+i49D7sxIuE9bGEYBdASvsGbLAr/ep3qqMmO/oxRq5XANf7e71eitXJZNIgACroGkPaj8aBTpYQT2DqX3Qgl7Qfn/tK2lyrBEb/2qefKPqxq9+9D70eYAPXKOb0+3178OCBDQYDK4rCXnvtNTs/P7fJZGInJycJa/AJ8QG54WfynnEtFos0OSHOISife+iqMRzycglOC04LTgtO0340DoLTgtOOTYLTgtOC04LTtB+Ng+C04LRjkuCz4LPgs+Az7UfjIPjsePjszgITIK9O9D9Rcj6fW7fbta9+9auW57mNRiM7OzuzbrdrZ2dnNhqNrK73leSzszP7zGc+YycnJ/b48WM7Pz+3Xq+XKmmz2awBJjjKBzvJpYDQprvep2PRe/jdG06BQ8e+XC7TCoBer2ebzcYGg0FyptnhYWwELu10Op10LijfbTab1Lae1aq6KJBRddZk88CF+GTkGgBNAxs9SX6uA5x8AgMmqgP2Zmsp7ehqBIgOXQhqSADA8cRMNVeJhTZ0tQWkwEP1NAbwM/by/laduE7HzXWsAqFfEtq/R3wceh+p7zTBFSQVzNR3Pnbrum6sfOl0Dlubu939+cO8dAwa8351Bv5XvbGf2l3F56v6zJNs2z1qc80BtZ/GH9drP0pMuipBY0b7VqGdfr9v4/E4Ydvjx4/TA0c563Q0GqWH6eFDMG273aazkiEWtScTwPV6neKfHG+ze5utQ14uwWnBaVwXnBacFpwWnHbsEpwWnMZ1wWnBacFpwWnHLMFnwWdcF3wWfBZ8drx89tIj8nBIXdeNc/wwAgFGBexb3/qWdbv781LPzs5sNpvZo0eP7Pz83IbDoY1GIxuNRvbZz37WPvWpT9nTp0/T9tarqyu7urqyr3/962lbXFmWjfckgJmlqiJO1oqjAhuJh84awIPBoJHoeo4oDvMkY7avAmpg6NY13UqodtKE6Ha7qerY6/XSA7UIDvoHHNW5VH3RodvtprNZ9VoFtX6/n5Kcz3lh28Vi0bAhfdEf56lyPwlVFEUap4Jnt9u10WjUaEcTcLFYNHzsiZzrOIvW7OaDElVIIgUI7dfbTxNfyZj7uUZ9CGC1bSPkOlZEsH3Sk7UCEPeqn9mu6CcqxDrtDAaDBnjqAyABJ4hKt1kSp/iKmFC9WM2i+mvua3yZHbbZ6lZvtRc+9is4NK4ZF/eofdRP6kNdCaME4gkd29EOfWo8+ckQ9w8GgwTuDx48sFdffdVee+21tN21LEsrisKKokgTWiaCz58/T+B9dXXV2IKtMcl2fWyl5KgkqnbDdrpaJeRuCU4LTgtOC04LTgtOuy8SnBacFpwWnBacFpx2HyT4LPgs+Cz4LPjs+PnszgITYELFVI2lSipYEHg44Z133rHFYmHPnz+309NTG4/HNp1OzewAqpPJxD75yU/a9fW1XV9fW1EUtlgsUlV+NpslHbT67ANAwUs/U+epkzXJCAgN0jzP03tAUoOebYdmlqqF6Md1CqpeOPOT8xd98vgE0MBtq6ajI3piAx+8vEcgQhJV7UM72Ilr6W+9Xtv19XWKC31QYJZlNhqNGmDAdk78S78KIj5pSUh+ep9jK00K1UEJCBuzUgFi9jbs9/s2Go1SMqOX9oUuTD78C2mLAU8O+gKE/aQFQGMCMBqNUhvYmrFoHGk/gB7n3DKJwq8AEWCjdlSQZPsy9obYNptNYwu3xrGSuNoBv3tdNTZ9fnOv2lZXcWh+K1YpiFZV1SAz9f9gMLCqqtJ21IcPH9qjR49sOp3acDi0PM/TSg18lmVZOueYh+R99atftfV63SB+jQnifz6fpxxQ2xNfijXqSyWEkJdLcFpwWnBacFpwWnDafZHgtOC04LTgtOC04LT7IMFnwWfBZ8FnwWfHz2d3FphIRAJJq8Qou91urd/vpwdUqeGXy6VdXl7abrc/528+n9toNLLhcJgAnTYGg4FNJpMU5FRpLy8vrSiKBIiXl5eNwWoAaCBBQAgA4wGSdtTRVVWlCiGAAoAxbm9YTQLVwQOQ9qcVRU9I3IcOCuAkfJ7nyeEkvj7sToNbx68g4MfBGBmLB1NNVL4D6Ghbz4xVckV0bGobrgGIPVApGdCGJ3fu8XorUSq4cP4r466q/RmirMxgMuL1U1LnM7WfXttG8rTpwb+u6+Rr2vAEpi+A2wOZAh5g4u3P6hCNVQBFCUaJUXNNJyF6j+aHr4r7nG0DKJ8L+MXHS5ZljRUYqrv+1DNqaVNjRvNKV1pMJhMbj8c2mUzs1VdftfF4nHBAJygQHOf4Xlxc2GKxSBNcxQEfy0rmHlu5Tlf/qN2V6DhnOORuCU4LTgtOC04LTgtOuy8SnBacFpwWnBacFpx2HyT4LPgs+Cz4LPjs+PnszgIT1UKzmysI1PgEjFZUsyxLqwDW67X1+32bz+epAvf06dN0tiAVuaIo0hmhq9XK5vO5DQYDGwwGiShwGgnRBiAebNEVR5JIOF1BgWAkmRm3VmjVOQSZfk8wqk4abNyHTt1uNzleE1X7QVcIRQOWgMd+tMfKAA1EdGC8u90uAR3ttyWE+t0Dv9pM2/Ggxxg1aT0J6nV6n/arfWtsKuhBhkrerBBQkKUqfBvQ6zmigB3+Jh/8pEJ19J/7RFahHY0rbIqdNFarqkoPRwRwaIPfFcz9BI3vVDfVixjVvGojF3TxYISfuZZJi5/wtPWtn2tf+IDJl/pY80ZtamaNs4aVIAB1nSDwAMlXX33Vzs/P05nN9KOxUNd1AvTZbGaXl5d2cXHROM9UidsTkJKNYga660SvLW6U8EJeLsFpwWnBacFpwWnBafdFgtOC04LTgtOC04LT7oMEnwWfBZ8FnwWfHT+f3fktZy0yMO1UgWi5XCaFzQ5nqO52O5vNZinpnz9/nkAgz3N7+PChnZ2d2cOHD9M5nr1ez4bDoa3XaxsOhyk5F4uFDYdDq+vaVqtVMjR6YST004TQ4PHgwwPH+JyAqOs6JQFB6wOQNtCDwIUYcBT9elIyOxALfSsIZllmeZ43gNgHMb9vt1vr9XqpEqrncS6XS1ssFinpsCvkqjoxTh0rCeyTj89IGvyqEwJsyHv8Bci0rabQFQ/aj5k19KVd9YO3D3owdj9ZWS6XaVu1r8ZmWZYq8ErCdV2n7YeelLRqrxMRBVofQ96eSp4IYIQOu92uca6mTn40xhVsGRPj2e12tlwu0+c6mVPg07Y0F/j8Lj/oJIeYzrIsnZnqicXnLJigOnlb+QmR6qft+AmAjlEnOCcnJ/bw4UP77Gc/aycnJzYej+3s7Cy1vd1u7cmTJ7ZYLGy1WtmzZ8/s2bNnCeivr68bttOVNQAyEwq1U9uDIbG7YhrtejuEvFyC04LT+Dw4LTgtOC047dglOC04jc+D04LTgtOC045Zgs+Cz/g8+Cz4LPjsePnspcsp1IiAA9VLRJVQZwNcalTaYssXg+Khd1mW2Xg8TgHV7/ft5OTERqORnZyc2HA4tKurq2RUwIzEUJDVFxXDNqMThGpkqrZ8zxmhGJdgQfw4+YzgIqh3u10COezG9lyCiBUMGljqB95rNVN1xq46FuxN4vDeJ6fep4nE+BANOtpkPBqo6oPVanUDFPnOJ7HGH98rCCmpootvg/EBdGVZpgcUKvjked7Y+qsExjggUOILkOU6AJ4zRj3IKiEyLu1L8wcybCN1vZa4VEEn4sZXp9HDzNL2ZiUw7veA4v2En7GhTrT0fjCDuK+qKhE8scO4tbqPLpx/zLUas7SrcaZxo1jDBKTb3T8IFHtmWZYmPlmW2fn5ub3yyiv2+PFj+/jHP57uy7LMNpuNrVYre/78uf3Gb/yGXV9f22KxsGfPniUbMC5i0+zwIM7BYNDYTsvkEDuzMoK28JWe18t3jLmNBEPuluC04LTgtOC04LTgtPsiwWnBacFpwWnBacFp90GCz4LPgs+Cz4LPjpvP7iwwabIoSJIQGBSHEnw4ifc4QI1b1/sVAbRLtZzBEDQkF7pMJpMUNLPZLLWr1VICB6MQDBjOV/AYB0mj4ML3ClAa7J7gtE0FPE189Kct9GpbAdFGWD6Yads/iFDBRMFdv/cgq+KBxn8HOQEs6gv9qUCvAK3tMw7s6sUDIjoTG3qNB3w/0QAgdZLQ7/dTkqqfFUT6/b71+30bDAZpJYGCJMmrtuVzjSXVt01HJXe1PzanPfWJJ0AlC0C57VrAyBNTWxy0TQqIZ/zmx+T97EEO8OUeBfM2QNTc1bExPm+HXm+/hbsoipT7kBsTI77LsiyB/OPHj+3s7Czprw/+fPr0qb399tsJ6GezWcpfJsLexndNSJR8Fbe63W562B9xriunwAOPGSG3S3BacJrq4b8LTgtOC04LTjsmCU4LTlM9/HfBacFpwWnBacciwWfBZ6qH/y74LPgs+Ow4+OzOAhONoDhnBJ6cnKRAw1jqSE10zlwkGBhct9u1sixT8o9Go6TwbrdLDgdMCC7OD0SX1WqVvlsul61Ao4DPi4Q1sxRoZVk2gq4t0DUYbwMctYMnPT2PVNumjX6/b3meJ531rEraxd70r23QpgKnWfNcWwXMsizTKg4FWn3PdVRBdQIAMfMdPvek7sFSVx3Qtx+HJ1TuVdJlvCSG9wHtsA21DcTxhwK9AiO6bTYb6/f7ttlsbLFYpNgmRhmLB7u2cXky0xjXarG/R5Nak15jk/YAGNWP9hkn+UROcr8HbeyhZKZttoGX2s/MUr4R1zrRUHsoqbNagPvaiIjPFTOIlaIo7OzsLIF5t7vfTq16c35wlmV2enqaHqqHfbbbrV1eXtq7775rT58+tXfeecfefvvtdDazAjlnO2NnbKkTWCUtfkJgqheTauyG7RVX6FsngCG3S3DaXoLTgtOC04LTgtOOX4LT9hKcFpwWnBacFpx23BJ8tpfgs+Cz4LPgs2Pmszu/pcpPNW0wGKQkNWue+2hmyYhsDSuKIiXDdru16+vrxlYttllVVWXL5TINlCqqOpvAqet9VW+9XjcSVavC6EsgArRZtn8AIOeG9vv99B7dtS9NPpzlA4trFJAJHAKKB29REfUrGUg2bDkcDpNDfeVQwYv7uQ4wx6YKavit2+2mLcTYc7VaNUBEiULBStvjer1H7c1PvZcx8ru+RxcFCES3GeJ3HSc6KIGoTfCNJpyuAlAbK+Do9mkSCxLTh7cRH1oVJ07athMDbv46BeI28IZw2+zl/aPn52rM3UbGCP7yqwPoC7vSLn2pTogCvNrdzJI+SjL4a7lcJl+Rm75NdKNtJotgz8nJiRVFYXme22g0auhHjhM/xEy/37fJZGJmZldXV/b06VO7vr62y8tL+8pXvmJf//rXbbFYpG3mOjGjXcUrzpLWHGA7KltX8Qefaaz5XIZIyF36V4IOuVuC04LTgtOC04LTgtPuiwSnBacFpwWnBacFp90HCT4LPgs+Cz4LPjt+PruzwKSgAqBo4isQ4SyqXgSXVso0wFgtYHZ40BSDJsgIIpyJTrz8dsO2YNBEUFDWhNNKnIKYBoe2qeTh+2E8ajfVmaCEJDwgch8vFZ/M/jv8hQ5cS59t33EfdtB+lQy0T9+3JwdEbejvUxLVhFd/qT/8ygPt1wMn9xNvkANjVDJCz9vGpQnkJyJK1v56vsPHOs42u+ETtRftqH3UJ23xSYyhq7dZlmWJVGhL40bbU/DSz/w4fJxovN01WeCnz1HA1+zmCh1t00829KUxQ656Gymu0d98Prf5fG5lWdpsNrPZbGZXV1f2zjvv2LNnzxLBs31U7UN8eXJUMmOyDEFAbmCQxrTez7jb+gr54BKcFpym9gxOC04LTgtOO2YJTgtOU3sGpwWnBacFpx2rBJ8Fn6k9g8+Cz4LPjpPP7iwwsQ3PzNIDsqhEq2M2m00C5TzPk5E3m01SXqt+VOFpY7vd2mq1SgGYlJPzBfV6+mVFwWazubEVU8HK/67bKj0A1HXdOJdUDatVPq4lGAlIgIDvWUmhfREIOlZNBh2n/q4/9R5NYiUPrsGXbYmmQMn9+A97cZ8CogakAoGCJ2OkDZ+0njjaCE6T+LagVrDkXl1dgg/Uh9jBV6qVnNoq2B6M6Vfv1WuVFBQgsRuikyofG9he9eceTyLav8arns/pbaeibapv+M6Dj5KF9qtASj8+3nW8alv/0hy+jQjNLNm6qqq0UgFsIv6ZgBKLYBFAu1wubb1e23K5tKurq3QG6uXlZYMg2uKYNnQlE/YA5AF6Vjzga803MMRPaPClTgx1JVfIyyU4LTgtOO3Qh/alnwenBaeZBacdgwSnBacFpx360L708+C04DSz4LQPuwSfBZ8Fnx360L708+Cz4DOzDzef3Vlgev78eQNsUZLBslLA7LBlcr1eJ/Cfz+cJPFghoKBvtgeg5XLZ2P5mdjiHlYqeBhbVvsvLy7QN1AeTgp5PCB5KxxjYkuu3ybHVjQex0QfBgQOzLLuhgwYk9qMtrlcgqar9dt3tdmuz2SwFpgaNJjP2ZmwaUGZmo9Eo6cOD4bh2NBo17KvBSruedFntoQBJHHgAB1TMLCWaBq0HLU826K59+y2q/GQi0el00nZn7LNYLBp2bCN7fQCg6tXpdG6AtPaNbflc/a/f0b+KByb0Q0d/va9OMz4dG9ujPSAAekqCTDy0beziyUR958m/zRbe171eL+W/ThT8pEe3cZNb6h8+43PVA9ut1+ukz/vvv5/ONM3zvBF7qhNnKaO3bivXyWNVVcnG+Il863a7jQf2IVmWNR5SWhRFysHVamWr1SqNQf2l+FLXddpej16KPS+bBIU0JTgtOA0JTgtOC04LTjt2CU4LTkOC04LTgtOC045Zgs+Cz5Dgs+Cz4LPj5bM7C0yAth8AAaEO5jMFHgbA9yipwaUDQGkCwQMJfWJAHnymgKQBoMHH57y0LwVIHZOee+mNCpipwblXq/Y+SDVpSEwzS+NXslJdFFT43I+TzzXRsJcHV/yi17W1qd/pagjt0+urNtG48TGj4/EErSDSpqvaCr+QLIDger1uVIrVD8SegpbaSIG1za7oxHjVRqpvm431p47fX6Mxqtf5iYS/FrsA5r5/CMbbU32j/SkQ0rbGrp+IeJ1viyfu17zw91RV1cABtbWO2+xwvir9KOZ4W2FHPcO4LYa8XhofOgHWlUTERJZlaXLnJ2x1fVixpDqp3YmjzWaTyFlXEbTlRcjdEpwWnBacFpwWnNa8NzjteCU4LTgtOC04LTiteW9w2nFK8FnwWfBZ8FnwWfPeY+Szlx6R5xOgDRQ08DQQq6pqVLBxmiYnIK+VTgVU/Qzj8nCvoiiSo9C1TScfOG2B7MkCwFYb0L9Wr71tNAkIOK0Ea8C0OZjrvT29eFC47Xvfng84nwT+frWd//02AtIqtYI9JNPWl4KTTzBd7aAko37TxKYKzMoHtkhT0dXxUb3141S/tIGQJqyCDzq0AdFt4kFQ7eLbRl8FWQ+IbTqq3dWefiVJ2/WqS5v//D2a53yvP/09fvLgJxAKqhor2F5zUT/XePI6cN1tZKi66oQJ/NGJgz6AVHVm6z54oSBtdliZoT7U1QT85Kxn708f/yEvl+C04DS1nf89OC04re2e4LTgtA+rBKcFp6nt/O/BacFpbfcEpwWnfRgl+Cz4TG3nfw8+Cz5ruyf47MPHZ3cWmDAuW7GGw2FKFAAII2pA67Y9qmB8hnHYBqbJqUGgwM5PdOr3+zYcDi3Pc+t2u7ZardJ5qJAGqyCovqkDtFLMeHa7na1Wq8Y1Kjq+uq6THv1+387PzxsO9UTQlvQQCY5k62S/37cHDx7Y5eWlLZdLWywWZmbp/ERtQ0GcQNIAZRwQohIG49PE6PV6jQeAdTqdxoMM+/1+4x5InPHgHw1m1UO3t2Jz4qjb7aY40C2xVGO1iqxgjp/Rj+DH9/1+30ajkZ2enlqe52nb4mazsfV6bdfX1+mcX00agF79aGYJONjiaGZJP29XBVEdvwdhREldAZ4Y8SsAsBm6Yze1gY9hcgkfcB0+I5/bJhdss26r6Pt+lIzwuSdSP1nie7UbuMN3ijcK8gr0VVWl7ejYSPNOc8ivkuJM5rIsG9tmO52ODYdDG41GNp1Ords9nJ2s4wXceZEzxInmw2azscViYbPZzBaLRSMGFei9zYghVsncNlkLuSnBaQcJTgtOMwtOC04LTjtmCU47SHBacJpZcFpwWnDasUrw2UGCz4LPzILPgs+Ok8/uLDCZHYB4u91aURRm1qwQZ1mWEhZFNJDUsEoAJLcHdA+GDJJ7GDi/0x/nG/KdH7wCruqFAX2wqX56ndqEn4PBwHa7XboeoL9tXLxXXfW6fr+fbEVyK3iqndXu6KZjIzk0cTWZ1D9KGnq/Jq3er30RC5yXy3i8v9SOfO4TV+9jTKqrJ2700b6IU9rivF1decLL20z943+qvvTjq/H8VPF2VDCmPeJGwawNtFU/7LPdbhPZ0L6KxiRbOrGR18nbgp9qZ80XHZcHI22bSrqCJ+817rQNnTRVVZUmZt62Gs9tonigRIW9deLKGHV84AtjpQ2fG/SF3fxEU+N2NpulM1H13FU/WVHbMEkmr3UyHvLBJDgtOC04LTgtOC047b5IcFpwWnBacFpwWnDafZDgs+Cz4LPgs+Cz4+azOwtMgIgPAK0Wk3jecW2B0pYIfM65gFmWpYfbQTBUtc0OW1LNDiDIPd1uN93He5KA9xgPg7bprOKDT52BXajg0o6ufmB7pJKfAr8PcmQwGDSCD6EPvU8BSMelunrA4nO9Rn+2kTbJoXGh42HFiY5N36vdSFx90S/go0BKnyQX/erDH/U7tQGVf8BQyYJVHGoL2vPAzGd+xYbqrvZTcFE/qR1uE/Vpm263AbG/VnVvm0j5vvy9t+mvdtbxt7V9W17Rh4IZedOWb216ab+QALnofcF1nGPsAVUJG/BUfEBXzkZW2+kYyFdiVsFeV4PwEExwTkVJpaqqhg70qxPnu2Ip5CDBacFpwWnBaW3tB6cFpx2jBKcFpwWnBae1tR+cFpx2bBJ8FnwWfBZ81tZ+8Nlx8dkHegaTN5ivEpsdtrXq1i9NYk8GHih6vV7a+jkcDm2z2aQqG9sRGbgmIkBaFIUVRZGMrlvO1uu1LZfLtD1R9SZAFEB9kurYNbB4zzZZBTXGlOd5qq5TFSUgIDScqe/Zmjoej1P7EArtqJMJBl014PVUm/OZtqFkiH55nt+whQIg1+mYdMsv55AST7oVEl1JBOIKAEen9Xqd+lYyxfcEvQI3OjJR0KT1dlO7m1njIWh8v91uWycDJLImnZ652mZfftfkVSAlt7gXaRuD6qEgQj5qbBADVMnNDltvlWz1Wk8CfN5G0P4evtNVMupD+gfkFEM01rBtWzxzX7/ft6IoUl/b7dZWq1WyIz4FZ/DHYrFokD2+A+x9zpdlabPZLNkQHTQndGKGPYhj3bqv29tXq1XD/hon6I/d1fc6CQ95uQSnHSQ47SDBac24CE4LTgtOOw4JTjtIcNpBgtOacRGcFpwWnPbhl+CzgwSfHST4rBkXwWfBZx92PnvpEXlUzpbLZToLlbMf6Zwz/rge46K0rjzAiZpcdV2n6p4SAcbmHE+cjgMGg0EyslbzARwFPqp/qre2gdEgpLreb0cD9DTBNajRUUFTgW6z2SSwJyD56QGU/nnf7XZTMGw2m2RD3WKJ/SAc9NM29XdNOOx/FyHwu57paHY4/5OxKCDqCg8lT50gcL3vE7Dmc0DfB7NeqwmBL3a7XSJ1JV8FSdVB29Ck95Mb7y99j6356YlVbU3Cql/a2tF7sCFtaRvaZ9tEhUmb5oaCFKL5Rx74NvicXGZiAkjqdk8lAo0Fs8MKEf/5brdr6L9er5MenAesYwbYVR9vXx0XOMLWZZ0YEC+a04iucCDH8Z3msydBbEFM8vlisUjtbbfbxmQQUmHcxCl5qDkV8luT4LTgtOC04DRtIzgtOO2YJTgtOC04LThN2whOC047Vgk+Cz4LPgs+0zaCz46Pz+4sMFFhpyJJtYvKrFb/B4NBo1JOEmFIfWgbwYyTcQSGXiwWtl6vU+VNt5ryk8TTQASk+VwNnGVZCgTtU6uter9PLAVhnKjBy5j8ixUGADe2w2Z+BYYPfgW4brd7Y3uafq/g2naNJygP5jo2DTBPKJ1OJ1XwaV/vhSCxmb4HBLA392tiEMye9NVnfjWL2tmPC3LUz9U+2pd+j94aY3qffsd79aHvR9tpA2O1sf9cQcrb3YvqZNYkd28jncwQx8QQucG12p+CHvdgZ79iQseludJGAtq3vkeYmBGD4I5fOaHt8hm66WRpvV6n2PDEraDbFhvqH+xsZg0beJ+pzTQ+8JF+rjmqMeQnLapXyN0SnBacFpwWnBacFpx2XyQ4LTgtOC04LTgtOO0+SPBZ8FnwWfBZ8Nnx89mdBSaq3jiVraOdzmEb43A4tLOzs/SeszB3u13aLrper9OWU6p2OIXEZVDb7dYWi0U6uxLA16qkAq0aEV0BRA8gPiBwjjofg0MM6Kh9qxPbEgLRFRU65izbr2Co6zqtFICEsAHig0Ydq0HQlkyMnQowCaNgr8Gi7XgQU/DXQNOg5Hf68aDO1kH1H+3rGFVPb1P8AhDQlq4+oE1NPE1mJe82AuQ7T6r6uo0EbgNf35+vVGubalf9zrfjY0JJS8lB88tfyyQDO+rKC41r9EIPBfpO53Dep+YpQn55f+MLrY7rllLdVsq4uLbb7dpwOLTxeJxwajab2WazubF1nP40z9me6n3s7a2+aCN2RFfxMCFmfOpzYrIsy0Yc+n5U0AXdaSvP8+Sv+MPlg0lwWnCa/xybBacFpwWnBacdmwSnBaf5z7FZcFpwWnBacNoxSfBZ8Jn/HJsFnwWfBZ8dD5/dWWBSkEe5TudQRS7L/dbE5XKZKuFFUViWZZbnufX7/Qa462qCwWBgw+Gw4XgqzbPZLAG9Vv8wiH9hDF4Kchp0GKTbbT6IT+/T67zB+U6rfthFv2dMqgurFBgD9zBm2gPoveMUmAkYTUQAXYnIj0uvTQHwwm9qM+2TewBKJWQPkFr1VQBW/dRWuuIBPSEEJS4FFU2gLMvS1litXqsfEAUXBXcFTOzAvd6GSmpqT40F1bFN1F6a6N7ebfe3fadV9rbvfZ54smLrpI5DCVSF33W86le9R+OU64gbbx/wgoliURQ2GAxsMBhYURQN8B+Pxw0f6NbVTqdjy+UygS1ADm5pHLStbPL578mf7fB1Xae8IW50u6ueSauTZfKeHPU2ULL1q2xUL51k+XwOuVuC04LTgtOC05DgtOC0Y5fgtOC04LTgNCQ4LTjtmCX4LPgs+Cz4DAk+O14+e2mBSTvTYCAgFJifPXtmT548SVXAwWBgVVXZarWyzWaTwE6TgYHxPZ8TKNyjyahJ6gGZ69QgtyUdAIYRcYo6l++9sVUX/Zz3PoCwoYIqJMjqDA0IAontcr5yjt3UH+v1OgGfPqCOcWBL2tIkRHQM6h9NDOKCBOQcW0+wgDb60b+ep8s9y+Uy6dmWcOhJzOlKEeKA1Rlteni/YAd/Fq4SjpKFxpXqpImt4K0xqi9szr0+TnTyoEmswEr7HkjRnfu63W4jn1RvRFcZqD60p9drbKj99XuNV84HpV2/goPPer2enZ6e2snJiQ2Hw8a5wUVR2GQySfrrxIRJ5mq1suVyabPZzHa7w/mmutXd271tUtc2KeT63W53Y4WLvx5bKvlq+/pQP50sgXNKFmp39aPaFnz9oGAfEpxmFpwWnHYzroLTgtOC045TgtOC04LTbsZVcFpwWnDa8UnwWfBZ8NnNuAo+Cz47Nj67s8CkhtGOSAKUI6HVcTiJ6hmBq0mr1XyqdCQzAaOApkFfVYcH72nyKaj7oKZfPzZAzz+ETMftE8yDvQckrvVEqeSj5EU1FYIE9HXVAAGkdvOJyO9asfcApkHP520+V6D3dlMg0KTxYKF6AKyDwcBGo1GyA9ewMqBtVYD3A/cRU9hI7a/fefLHnnmeJ72JU/rCz20AoADmyV77VZ01jhVEdSwe/LVf/9NPmvzEgnbwt/95m57av8aXVq81p7E7tsW+5DD36cPkGBvXAvRFUTQmO6ww8HHO/QD89fV1WoHCxES3rWrM61ja/Kg20Fj2ua6+UNLV+3TlBHipOKr3KNBrXxrXPn6ZLN02mQ1pl+C04LTgtKaNg9OC04LTjleC04LTgtOaNg5OC04LTjtOCT4LPgs+a9o4+Cz47Jj47M4CE0ACCGkQoXBZlmm7mILMdru1+XyewB7H0YZWzhTMAVyqa4vFIlUGqboTkP1+34bDYWPbGDrQtjoQY+pWMgKLoFKAWq1WjeDVgPDAocmqlWucrdt1zfagNx6PLc/zBPDT6dSm06mNx+O0KoAVARr4tMMKBPohgADULMvSeJWoPYizWkITHttxL9sEsTWB54mCgAewh8NhshFbEZW00P/k5CRVfq+vr225XDa2Gmri+NUWXjqdw4PY8JHXE53abIdOrJAgptEBMuC6yWSS2sDeHgi4n23JVVWlVQx1XTce+sYDLD0ge7BnBY4SqicCtY/aBb9zzrC3X5tdlVAUgDjHV3Xkeo3dLMtSXGpfxAOriZS4u92uXVxc3JjI1XVtm83GLi8vbblcphc66hZ3PWdYX0qSXmfVD9ntdo081BU5nAFdlqXNZrMUy5qj+Iz+i6K4YVclBWzDqgFvF3BL8ynkbglOC04LTgtOQ4LTgtOOXYLTgtOC04LTkOC04LRjluCz4LPgs+AzJPjsePnszgLTbrdLRlIDUA3EeGXZfHCU2SHhVXn/ItFIRO1Dq7lUyzS5tC/aU/FJrJ+ro9lWy4sgISFxJDrgfByG3lodJpCVzJQcIEol0izLbLVaJTtqoHK/H6+SC7ZijBAIvlGw8cGMAKT0QzBr/9zLaoder5fAm7EQ3Lq9FVsr+KgfADl0J7DpQ2NCSV3Bdb1eN+zv7aUrInxc+usRYq3X6yVbZ1mWElFjUWOAe9t8pT7wtuA+xqi+4XOvs499s+YZqdjeE6z2oe1rzGMzbMVPJVnVRXNZY5Lxs3WZyQVxen193dgqTtsQrq7qgNg2m43N5/PGmcm6ysaTt9pewdSDv66AQAfVgxUo+p5x+9Uo6i8fM5rfujJD+1Q/6Uvtja9CXi7BacFpwWnBacFpwWn3RYLTgtOC04LTgtOC0+6DBJ8FnwWfBZ8Fnx0/n91ZYNIAIjDpxHeuztYA1ft8cHqF9fs2cvABiuPp5zYA43ttR0nGjxVnmx0I67ag1mBgDDjd21L7MzsQwWazsU5n/6AwDQi1t7chv3sw8N8TiD4AfcDpPfwcj8c3xk1QjUaj9GC06XTasPP19bUtFgvrdDq22WzSfTp2ry86ZNl+xQErRHipD0lcM2uc/TqfzxvxyHhJICXrtlhTnfSzNjurXSAp+lKw96CnxOyvJVnbklaT/jZwV/t6QPfkyHd+Iqfx7sGGPnyOezvRBj/VH7ryQYGbCRFVc40Z+izLMoE656Aul8tkR/WD+tJjjdfZTz6rqroB8lq1V9tgOyaH6KQrYOhP76Efj223/WzDVMat2BlytwSnBacFpwWnBacFp90XCU4LTgtOC04LTgtOuw8SfBZ8FnwWfBZ8dvx8dmeBCSfogHTgvsqvgK3G5n2/328YGUMoaPId7el2Ra7XlQx6Hiiggn6qq27X1ADq9XopQLQSz3Y0JRa1iZk1bEDbgLc6Vp3pt81Rbef9fD5PINrr9W48AFDb9hVSvkMPbNHv9206nSbdNIj1vk6nk1YesDV1NBqlKjrn1fZ6PXvw4EHaWvvgwYPU9nK5tGfPntlsNrPRaGTz+TzZRmNCk7yqqnReZa/Xs9FoZI8fP25sa1WC0nHP53Nbr9e2Xq9tNpul9tCHLb34Gx9rv1p95js+9+d38tK4p2qtbbSBKvGvOvJdlh3O4tV4U3sxdo0f4sPHiY9Z4p/fiXNdLeJJVMmXvnWcujJD+9VVJjpeHrqp92w2G1sulzaZTFIfg8Eg2Z3t4/h4uVymLaCXl5cpJv0kSseggKrk1TbR88CpIK9nJZNjkA9bUxeLhc3n84Qzuo3Xky/nRysZq291HIjmAbnoiSDkdglOC04LTgtOC04LTrsvEpwWnBacFpwWnBacdh8k+Cz4LPgs+Cz47Pj57KU7mFAwy7IU0AAwRiFpGbxWI7XypkGqFVUNPh0oAaVArzoRQN5JOEYNQHtVVTUSQr/bbDYNEtGE4TN1OgkGEFZV1QBb2tTqoiYZ2/0UFADLsixTRZV+AQcFEAVL7KPX0U9RFDf0Qw/OKyWRea9nl242m/S9bk8ty9Kurq5uADYP1MM2u90unREJeWMP9Uuv17OqqqwoirTl1RMlCVFVlc3n85T49K1BD3D4xNY48/pgA+yj1xK7xLzeoz5Rn2qb+MavFun3++k+rtX7va/pT3OMs1sVlHUsOrnQ39UePv/RT20HQOv19Mk5ruikuKCA3OkczmXN89xOT0/t9PTUxuOxTafTRozP5/P0UL3FYpFW3+AzHSftK6hiOx9HrAzw+GF2OM9Z8UyJmUkl96ELbbeRC1vh1e/81Piif9pmsqPX6cSvzXch7RKcFpwWnBacFpwWnHZfJDgtOC04LTgtOC047T5I8FnwWfBZ8Fnw2fHz2Z0Fpjanm1njgWcoQcDvdrv0HYbkxcOl2pTSih9OVADTqiXGGQwGja1vnhwUoH1lXUFWyUIDAyFoVDTwVU/uw7E4qNvtpgfooQtCQqg+ONBsv0JivV43Ks2qI/1gW6r+6EjyKUhpwCgxe5IjgPVBaGVZpuTz8QFwAr74p6qq9GA3rb7yHUFblqX1ej178uRJIl1PWn6c2L0syxtjwE6awPzU1Qb0rUlEcuuKF+5R0NVYybLmlmuNCY0xnQD5az0h+XhTUKA/JhU6diU8BU4lH80t1U9B05OWXq8TF9rzMU674AB+zPO8sd2Zh08Sd6wCuby8tMViYcvlshFDPo+w3W2E2GZLjylKFD4vGB/3efLnM7WX+lrtrhMtHwdm1jiT2BOy3gOm+HgJebkEpzUlOC04LTgtOC047XglOK0pwWnBacFpwWnBaccpwWdNCT4LPgs+Cz47Fj67s8CkgUJH6miSDqACnMqyTBW6oigsy7JUMaRNb3gSTStyGMYHlSY9yanX67V6Pc4bDAYJ6CEmvV4dyL0+yXCcD25NVN3mSN9UvNuCVPvAntoeyUHgmFla1UDVHv9wriqE0waAGmwEZ1vAlWVp6/U62aGqqvSQMwCbdqmW7na7tH1P7aoBr9t0leS63W4iET7Df4xR7aN66TghWd6rrbGTAr2CFterTxXoqWCjYxuocr3a2cevtqvXalsKFP5F7OrqAITxK0Gih4KhX/3igU4nS4y/TVc/Xh0rOhCvRVGkM29Ho1F66B7jYQXO1dWVXVxc2Gazsc1m09gWjF/Ur/Tt80ttoTFOG+pDjTXVX22ksUvfGgueNMAatTvXqJ01djxZaSwzHt0aHfJyCU4LTgtOC04LTgtOuy8SnBacFpwWnBacFpx2HyT4LPgs+Cz4LPjs+PnspTuYtAGt3jF4AL0t6AkizoC8rf0sO1RsMZ4frAdivZ8XSdkG8mpU72gFG0hGne4Dmn5IGDNLqx8YA/2y3ZTrdGsrQauV6m632zgzVhMam6vt8zy38XhsZ2dnNp1OE8jOZrOUlHpGrJIWejFGX/31wIavsTWfK3ENBoPUHqtH1D/4uq5rW61WDX8roPgYwVe6ckATm/bVd/gav7fFla6SUEAuy7LxoEBWYvixeOBQsCOmVNBZP+c9/SrpMHZAR8eMrRQctE36QUcFKQ+Qmjfo0baChus88Sggqd1VnzzPbTqdphUEo9Eo5UNVVWlliZ57en19nbZua7Ve4wVRH/tJjW5dV1voJEN9orGgq3Q0J9BH/aATO11JpDijevm+uJbP6Qtwp01WFSkZhXwwCU4LTgtOC07z1wanBacdqwSnBacFpwWn+WuD04LTjlGCz4LPgs+Cz/y1wWfHxWd3FpiqqkoVaRTFoBiVJOChWOoIBZyqqlIVUe9Xx1LRvb6+ttVqlUDEVzTRa7FYNIyV53kyolZ6cXSvtz97Ed34qQCjJNHr9RIYa/8APA8w470PRMBMg0STgHbQU/sfDAbpoWMQBr/7xN5sNnZ1dZV8w89er5fOOaUv/Ei/tAWw+a2fvEdvfKakRjsEno5XX6xogFA18fCPVklph+oyOioQKkiRYIxVk5oxehLRmFUiom0FASUw/M09Ghtcpwmo8aL+573GAZMDP6Hx5OaFmM6yLJ1Vq5MovV8nDlplx58K5m3+9JMmrgHwaL/b7dp4PE6ET95zlqoSEaA2n89T7q/X6xQfrAJSfNHqvBIs+Qs2kRcaw7x0C7gSLA8NVBtpDmu+4lfwUP3qVwTRvidbtSM2Rv+iKNLPyWSSzs5Vwg15uQSnBacFpwWntfkzOC047RglOC04LTgtOK3Nn8FpwWnHJsFnwWfBZ8Fnbf4MPjsuPntpgUm3kOIYtiOSgDjQzBogwU+MOBqNWhOZai5bH1erVePMQwUIAhqdNFC4ThMDI3CdAp4mt+ru70dPdZACpAc9DSJWJtCGDx76VQdj017v8FA7gF63atLfbrez1WqVAlu3EENu+Kptu5yunvDjoQ9d3QBoaYAxJvrmPgVWHbPaUYEPgtIY1AoxAIDuOhZ08KCkY9KxKjirrhqbCvJaUe50Omliop9pu219IHqN2lXbUfvyU2Pai9raxyl+B4i1f80trtd27gIRPwGjHyVISBzdyTEdK+S53W5tsVg08ENtiGg8qH3UNviM1S16L3nvc1JtCAEoKSuJEH/6uQK92eFMU8UV7xuNU7UrtptMJjYajdL2XnAU/PP3htwuwWnBacFpwWnBacFp90WC04LTgtOC04LTgtPugwSfBZ8FnwWfBZ8dP5/dWWDSh5Ap0DMItlyqYt1uN31GQrLlTx/ORtLgTM4IpZrtB6vkQGIryCuQc69PCB8YGN8HoDqJfne7XaPiiv7cT58AnE9UFQJbq5N6D5Jl+4owAE9wafAr8LKlT7eL0o4SznA4THrzMDzaUoAHVBXwFDh1LLpSw2wPOuv1OsWNggt9MWEgWTTplaA9cCshogN9EyuADmPwQK+x4gHex1OWZY1VHWb7aj9nvepWYJXbQE37xkdKqjr58OLjWScgfnUAuaGxYtbcwltV1Y3zdXWC58lGJ3F+EkXbek9VHR6yCKjqwx99LmkueN0VzPgdjNHxeaBXW3oyJXbIR1YqMJY2f/oxt/lH85JVJ57sNZa9fzgfdjQa2YMHDyzP85TXOgldr9d3EnFIU4LTgtOC04LTgtOa/gxOO14JTgtOC04LTgtOa/ozOO04Jfgs+Cz4LPgs+Kzpz2PkszsLTAR2VVWN7VcYEYAGXLSKrQakCg5osN2w0+nYer222Wxmz549Swm52Wwa1XYCi/51YDhZr1GH6rUEBuAD8KMnxKUJ2u/3k5Oo6jP+uq5TYGtFWxPBJz+VVk0EJRqcd3FxYfP53FarlRVFkV7D4bBxP/YAWBkj3wEACugkWVVViRw0iTUBaNsTgraPbTXx2GbIlkS9jzbpAyClPQU9bKSA7QObMVIRBkzRty0+vF+4BgLSRFb/oLMSm67OYLJC3x4sPOiqLdFZiccDigc7QAddiAet6OsKGz8ufKbA73VUGwM0ECbkqStJiIGy3K8CyvPchsNheuCmrsjAr8vlMoHicrlM9yvhqE0QVij4iQd5qUTAvXmeN3ysBKkrGLAL12Af2lSg1kmQz2nuJ5/aCJptqJAg58SytR+bgzmdzn4b63g8voGJIbdLcFpwWnBacJraODgtOO2YJTgtOC04LThNbRycFpx2rBJ8FnwWfBZ8pjYOPjtOPruzwIRD2gJOARVHa5JpIkEAunqAMzdXq5UtFot09qkHGw0+TQZNPPRRAyoY+s9JFj7XQGi7ttPZb0vU7Zr6PQGpenkwRjwBKMB7INRtexrsfkwaPAq2WknmQWb4lX590BJ4kB7t+f4V5H3gYg9WEvhr+F3jRW2D/ZC2ZNfrSTK1OfcpOamviR0fq3qdVsUhT9oHKLrdbmOMJLrZYRun79vHp9rzNpDV921J7T9TYvN9+7G2EafXve37Nlvo+36/b8Ph0MbjcQIufI1PfazwQn/eo6+3k49dtZeOmVgDpxSvfBzrpE0xRyedPq705SduXrRP7MRWVLASgMdebeSgmNw2cQm5KcFpwWnBacFpwWnBafdFgtOC04LTgtOC04LT7oMEnwWfBZ8FnwWfHT+f3Vlg8o20BaMahaTzhkQhtlqSOLPZzFarla1Wq1R99ca5Lag9kLaBoH9pe/zkoVUElY6ZxMUWCqC+4qgB6UnmrqRR/RSYuZ4EILAgHtUN/fiO6zzQq72UcNSPrATRlSKMT4meRFc96IvkZQunihK2xpEmtyY0Y0F3XSFAX3zHqgXuV4K4jVgBJo1xnVjQP0RJsqt/iQvGTIxoxdxPKLSf2+zCvXclscaCgo9W1nXipACt/ud3XcHjbc/1Cj5mZkVRpBxnlQoPiRuNRjaZTNL2aFYOsAKJ31ldxO8eV/SlvqTaD2D6+Dc7PHhSt6TeBqIK5D6udaLqSUV9gW/VRvhI+2X1S57nNp1OG+CueaWrUcBRSBP/tOFkyE0JTgtOC04LTgtOC067LxKcFpwWnBacFpwWnHYfJPgs+Cz4LPgs+Oz4+ewD7WBicKqwGr7bPZw9SdXZkwJgQ/JRPWR7mlbF1Lg6CA1WXhoICnw4FcMsFosEmGyT5TvGiU6MlW1w9MG4VNeqqm4ADPfzvVaa6U+Tk5ee5anBCxiVZZnOky3L0ubzeQq6brdrk8kk9c913W43bW8lgRlPVe3PqcRHVbXfkrxarRLx6Bg8+OE73Y4KUfhEAQQ0Mdh6533MFj1spnFoZinG+J771A8+0YbD4Q1CUTt4EOReBYDVapWuy/O8YTdeKrTtCUyTlftVb41jyB5w8n1oPLIVnPs8mSL6OeTuJxhc57eoq156D3Hb7/dtMpmkbdW6eqCqqjSxm81maTvzer1OK2b8pAefqN8UB9Bdx8zn/M6KBg+iOmnipStrFLizLEvxVZZlo1/8Qp4oOfJ9p9NJNkGP4XCYtqgOh8M0ZjBF8YlxYWuu0/wPebkEpwWn6RiC04LTVK/gtOC0Y5PgtOA0HUNwWnCa6hWcFpx2TBJ8FnymYwg+C/SV7CwAAKC9SURBVD5TvYLPjofP7iwwYQwdOAb1L4y23W4biqlB+EyDxAO8BrAXTUoGR3uaaNouVUQFOA0inODBxv9ER08oPiA0WdrISsenLyUvHbv2zdh5r+2SDFxLvxo0av82fbTKif/V3zpeHb/aQ9+jO2NEfwVPJcjbfA4xtr28Lh7otV0/DrMm6Km/IGfvrzbBF37M2o9+pza5jVj0Mw/ubfp4P3iQ8vnriVB18zZqe9+WR7p6gEkLk7GqqtLWa8CeScJms2mQlM8/JUDiAcDT8frtnUwkmJSwukDtrPGOjvgTkqKvNixUe3g9zawx2eS8WvCIB+gxHnzCpKzNx/57dFRbhNwuwWnBaepvHa+OPzgtOC04LTjtGCQ4LThN/a3j1fEHpwWnBacFp33YJfgs+Ez9rePV8QefBZ8Fn324+ey3VGBSENEBqjMwLsrrdwrMWhlVEFXHtYkO0jtJzyP1CUMwqm5KMuoYhOvbwMInste3DTxUfIKhowKiVqj5XfVVvTqdTkp0Txa73S5tIbxNB09SOla93o9Lbejvx+Z6v6+aqw5tdm2LMV0FcJuNtS2dDPgxtQF4G2EwYVDC1GtVH9+O+lknFgq2qpv3sbblJwFt5MWY1Uf63W12aOvPj8n3AwjysE0FL8bHVuf1ep0eILlcLhtbU3X1kbevzwPG1+12b6xWaSMrthBjEx2X4pTZAfPquk7g2Zbb3p4+FtQPxE6/328AvT5I77aY1vY9AepkNlbGfTAJTgtOC04LTvMSnBacdqwSnBacFpwWnOYlOC047Rgl+Cz4LPgs+MxL8Nnx8dmdBSatqjEQBSaUxLF8rsDE2ZB1XdtoNEqKsU3PDxJDaaB6wuAzEo/vzCydqVhV+6qgbllToMdI9N/t7s8lVJAkENX5GFjPa8QmHlg0CNURVKlVD+zG9j21E7oNBoO05VIBgfFrwmhA1HXdOGtSgYXf0VEfKqfA5MEC+w+HQ5tOp8lGbENmgqA2YOxqNz7Hvj648akKbdMuttetxT5J1Qfap4KHv15BRrcpQ5p8v16vG9tf1acan35iwnZiHW+n07HxeJx8pVuENb7QRVd76PiUfD0B6u+9Xq9xjq8CB5MntQtg3ukcthQXRdF4UFxVVQnMy7JMqwUWi4XN5/O0xVlzHv8BhkxcWHWg+hKTxBq66RZmXVXBuNsmZ4oD6h+zQ9xhez+RUmJRkkL/brdrRVGkcfHQQXALPMLP4IHGtJIl77kf7G2bSIa0S3BacFpwWnCa2iU4LTjtmCU4LTgtOC04Te0SnBacdqwSfBZ8FnwWfKZ2CT47Tj67s8Ckya0gS9BiPA16rifpzQ5BTCDgKA902o8CKNerMfmc9nXbo4IEenjwSwaQrbTq7DbD0T8O8jbw+tOvkqACmQ90PlOb0Sa6LZfLdJ1u70MXks/7RfVWXwHs3K8+9GCpYOTJV18K9Go7rve2RVffLn3qPZ5w1D5+K6G2531Kmx4ANGn9WPHpdrtNNtaEVV38ONps4EmQtjTZPTn58WnbbUCvNlM7+rhV8tM21R4AVp7nZmYp1vRsTh6kt1wubbFYpM92u116qKZOmvwkSXFDJ1hqG/Wh6ucni4ovPrbbJi/63W2xgT31Hh/nrBxgYobdiqJoYJnmjI85HzdtcVSWZZoUelwLaZfgtKYEpwWnBacFpwWnHa8EpzUlOC04LTgtOC047Tgl+KwpwWfBZ8FnwWfHyGd3Fph0oL4hn0BKAm3f13WdVhVkWZYAAeOTRHyuRvMB1+Zcvd87jz71M/2p4K7t+cDwRkcXH2BtYKbX854AaSNKDzDYRCv9GiS0ocTpwcyPi3Z94PnrGaMCPaBS14fVCICXVtXVZm0B60GxDZDaxqHix6Vx2+ZvL3d97kGTMWoM+kmIJ0Ydrx+T9oOo3domCNqGXufPw7wtf71ttQ99AViMidUsRVGkuCBuqLrXdd0475RqP9tFdXKifXKvxrquLvETPrMDgfHZbXGD3XwcMQa1k5KlJ1P9vM3/2JJtuzzkj3NQIUTNE/TjTFP1heZUm/+YdHj/htwuwWnBacFpwWnBaXZD3+C045TgtOC04LTgtOA0u6FvcNrxSfBZ8FnwWfBZ8Jnd0PfY+Oylz2DCwLz43BvztqDWJFksFuk7XQXAQ7cACIKh0+mkLa2qgyYAAE8g0YZuDdWqtgcOBbrNZtNwgo5FwRPDa6CzTY722F7qnYNNVH/d9krwaYLVdZ0qtGaH7Xoa+LTZ6XQa20F1DPjEE6z6SP2z3W4b9+hWvTzPGwR+G1iqtIGTT3Ku0aRQwfa8SBYlybbKtK+sQ17qG72G77hWq87Ek/pHwZH7NYYYs29fbcNnXi8lVbaQEhP0yRZPrlMft/lXfZHneQO49HO2Wna7XRsOh5bnueV5btvt1pbLpa3Xa9tsNrZarVKMsHJAt4yTGxpD+lLfE0/oAUjqig21NWPWlRlZliXfqx+wjfcH7/M8T335+NRYxGZZdti6jOi23eFwmNrSCVBV7bcqg3Gj0ahxNipxwLZezV/egwnYOuTlEpx2GEtwWnBacFpwGhKcdpwSnHYYS3BacFpwWnAaEpx2fBJ8dhhL8FnwWfBZ8BlybHx2Z4FJjaYdeSNhCP1cwRQnq5K810ojBtjtdg0wJEA6nU5yQq/XS1vm+HwwGCTDqHE8KJJIBDW/UxFXYlBQ0bHq2DRI1WY4mM+VFJU4fPuAhNezTRfVsdfrtQa3+oRxojPEYGYJMJTAPIjSjlaO20CezxU41G5Knh4k2+xt1jyDUr/XBNd71S86Bj5THzB+/73GoY6TuCUGtA2Nb283/d3r26Y773XCRUwB/Oov7mdVgY9N+mEiMRwOU7Xb5zpnnHItZwVzDef2Ajh+wkIcK2AD1uhFnKst1A5qA7Wv+qVtUml2yGf9jkmE3qc/dUKpttBrFMB1QqE+hFAAc/pWf+j5p+Bb24TU5yJn7zKpZOIV8sEkOC04LTgtOC04LTjtvkhwWnBacFpwWnBacNp9kOCz4LPgs+Cz4LPj5rOXFpi0U5x2V4VUpSzLRnUfg2NArQyqk9oM0+Z8HNaWzPo5bfLS4MuyLBnM30e/fqxtYONBi+qo6q3XoK9WWFV/tXEb0Xo9fRAROL5f1VHbUb/wmRKC2l31U6L0PvIA438HwNqu0QRXOzIWBX1NHD9On/zoq7FHPPi+ubYNiG7LBw/u3m6ebO7S1dtSfao2wE581jZmBTGtdLP1VCdQnlTQRSdjVbV/oJ6uJGAFgOqk+asg70HNxx99ai75uGC8bXmu39F2G9aoHXUiqeNXu/DS84bJX53Y6GSvzR783u12rSzLGysXuF+3H9MuKzO4T2Ml5INJcFpzrMFpwWnBacFpwWnHK8FpzbEGpwWnBacFpwWnHacEnzXHGnwWfBZ8Fnx2THz20gKTB5Gqap6f2ZYAmgjL5bLhCF/VV6Oo4X3ieZDSKqUmgoK4HwOfE7De2TiTfhTM2q5Dfx9IajMFJB0nuu52u7S1T/vQgNA+PVBqInlCVHu1fYau/O6DksowY/RE20bEbYSs+qoOShLqYwBNgZZ7SDKNIwVtjRfa4zrtT2NDYxsQIk7UJ75q7xNXE1/715/aj96nevrXbStbiCUPdN7PVVWlB77leW5nZ2c2HA7TyoDBYJBWkQwGgwR2VbU/4xQgWywW6fdOp2PL5dKWy6VtNpv00+zwAEslPfxEHGBb/d7nEff0er3GaiPfNrmvwKo5p/Hhpa7rhr4K6GyR5UF5ig/4D9/opMgTq8aT6qF5x3d8j6/Ywqr3gH3q79vGF3JTgtOC04LTgtOC04LT7osEpwWnBacFpwWnBafdBwk+Cz4LPgs+Cz47bj77QDuYzPaBDiDxgCetONKZKkKAqPBZWR4eFFVV++1XChp5nqe2dRA4HqNrFZ7gUaLA4d7ACqYAi94H0GlCacVRXwSfgriSogaOXqPBXRRF0k8Tm37bPseebaSiAY+NNZCwA4HO2NfrdWpvtVolGyq5dTr7B68xPiVcjQdNRvTnc3RSHX011oM3umpF2/ua2AOoVBRgFUw0HnysoJNORNTXnkSxgSccBXv60HGiu9lhhQzCWNpISmNIAUtt3ev1rCgKGwwGlud5Ann8pPas68NDMdl+ig3W67Wt12tbLpfp/GIeoKekCBHQvifa20iJMXEP5INNzSytVvATDyXxqrp5FrL3lZ/oQOK6HXc0GiW86ff7KbY8URdFYZ3OYevucrlMY4A4syy7Qdij0aihh36vPtd4JD912y9bYkN+axKcFpwWnNaMH70fCU4LTgtOOw4JTgtOC05rxo/ejwSnBacFp334Jfgs+Cz4rBk/ej8SfBZ89mHkszsLTApYdV03HqKlRvMO1ETCQG1BqmDHddyrKwI0yfR+DKaJo99pMiIEvuoJ4PA5v3swVqDTVQZKCAocvGdbIC91HDr5ROB3/U7tq7ZqIzE/Pr3e24MAI2i4XoEXX0Ii2JwAJpl935pkahPvS9pVv2nyKgDQjn7nSR2i5hoFBp/wPm6whQf+tvhqAytd0aI20Ht0q6Pv18c+AO5XTChoKFhpLPM5D5GjLb/ihjaYcDGhA+iZjK3X68YD3nRsxACfoR95pDHu/aptAGLeZ2aHVQrko8aWJ2id7Gh8qs17vV5jUqmrVPCRmTUmlOADojmC7cAFckrzBX/oeaqLxaKR+8Tvcrm8MVkuiiLdx2TMXxPSLsFpwWnBacFpwWnBafdFgtOC04LTgtOC04LT7oMEnwWfBZ8FnwWfHT+ffaAj8jCc3y6mCeGBRwHPgwtCIuHgLMsSwOig1dEa4PzuAbENTGhDA8Tfr4FTVVUjUBSkdJw4RNsmQPR3Akj7VjtiYw8cfvxe79vGp3oqaHubaoIQMG3+NLM0Lg+S3kZ3kZDaUOPL7ACCAJqCtAJ9Wyx4WyhZcb0HZtXL2xjfqK7ap97jf1e7e73Qg5e3k9qO371ubeNV+yoZQxSQNH1CRrSl/QHyxDYgx4P1qOajq8acB1wfK9jK+x9gRl+/3VUnYx5bEN0KSpwD2nqfxjf9cZ3mqOaM95PGobbHe/ymKyzAAbahggmqr/qB1VYaLxAQEyxPnCEvl+C04LTgtOC04LTgtPsiwWnBacFpwWnBacFp90GCz4LPgs+Cz4LPjpvP7iwwjcfj5HjOQNTE4jsNFhRVR7QFpYq/B6ersTVZqdB5oFEQwlkYTAOxDWx9klRVlc4g1MTwY6nruhFg6M25kkVRtAIMKwpwpp6JqslNe56I9Ce2IEF8ELIl2JNgG5jomNmGCFgSUN1ut7FCgN89mejKCU3W23Qg0fi9DSzbSPo2YlJfo78/j1IJHGEsus0Yvbzd/Qs/0oYnX3yhvuV3PtMKtpK0n3BwPe83m026hu2TGp9t5IKfdcz0WZZl4yxU3aJO9V311BhSX+rEg3YWi0UDqIiXPM/TuaN+Ukc/jJEtoH5yiB60N5lM0rirqmrENb8rXugqCY2VwWCQclrzmnNgd7udrVarhDfE/2g0svF4bI8ePbI8z5POV1dXiTyxpY6ZMWAPVofQz2w2a8RryMslOC04LTgtOC04bS/BaccvwWnBacFpwWnBaXsJTjtuCT4LPgs+Cz4LPtvLMfPZnQWm9Xqd3mvStAGEJhrXtyWCBjUD1iTjGq1yck9dH7Z/0Ydu40Mn1Ut1VuBQsORV13UjOAl6AlTb1TZVT9qmeol++p0mf1mW1uv1GknKeYcKmh4c9fO2KqjqhA7eR3eNgbGipyYC1U3vNyVV1cPrrDbE7rSttmnTx+uuhKCAr3HlSV8JBwD0MQLosapB2/J6KcFpnCjAmzXPslWA1ljtdA5bpHVcbfZmTL4f2uB7fx1xR9sAPb7VsSiAKunpSgX1TVs8AsjkL/qxWgGSb5vIqI8AOp20+bzjPlZPsE1cbaZ2V5+Q/zoOjQndoq/kD9DTlhLdeDy28/NzOzk5sVdeeSXZlS2mamv1j/qXs2kh7OVyaavVyq6vrxt+DHm5BKcFpwWnBacFpwWn3RcJTgtOC04LTgtOC067DxJ8FnwWfBZ8Fnx2/Hz20gKTDyAFG3765PMG9IIhPfjq91rF8w69rU0MQ1Dfdp0HT3+tB1SfnIxPx6LBhx4eVNr08G0Q8D552mzcNjatKHqSVPvcZk/tV0HwNuBRW3k/3vY59/gxqp9JIg/mGhdtAN82qdDx+Hi97XudPKhd1Y6apGpb2tU2dLxtfruNgH2bvg0Ahb7wk199oPYCaIlPXT0AmChp+Zzx7bXlb1uMas6pXQBYBWaNBYivLYeo+OuqDYAee61Wq8Y9PoaUqNW+GpOMHzvpCht+9nq9tNoAv/CQw26329AVO2M/P+FUvVjRouPZbDa2XC5bJ28ht0twWnBacFpwWnBacNp9keC04LTgtOC04LTgtPsgwWfBZ8FnwWfBZ8fPZ3cWmBaLRaMqiIO00ulJAENhELNmRdWDg567qMbVgFEHan84QZOB4NQ+Paj5oFVw5nsPeAQifWLkqjpUQWlHA0fB21eV1fHYihUItKGBwLg1ELU9xq5gr2P0yYwo0fmk0vcKuEpsnohUJ0+UamvVQ8fFCgy9Ft3bkhS9/KoHjTcFI0/yvFcf8DkJzZZsBUkVBQptW8EXm3g7aez5yYLmE/miYKSVcirfZoczOLUCTxs8UM8TpLez3qM6q608wHQ6nYZfGYvPMd1WrT81T5SE1MYaj5zLWtf1jTNNAcXr6+ukg1+RoRMy2sMHbCtVXzH21WrV8DOAzrZUYgg/LJdLWy6XDXup7XVSgf/0pb5RvT35hdwtwWnBacFpwWnBacFp90WC04LTgtOC04LTgtPugwSfBZ8FnwWfBZ8dP5/dWWDS8zoZFAGBghjeB0tbAnuANTsEp24LJWm5Rx9mp0moDkcgI29If70HSQUIrteHYWnCcD2go0SoegAUbHHTIGPMHqgIJLWhkh328gRHYrSRqYKt2ofveK/+0u98MJHMkB4/EXT3ZKF6mFnD56PR6AbAq17erhon/E5yazIgWp3Wz/U62law6Ha7jQfMcX2/379hHwUm8oa48HnQNjbNJQUf7YNc4DONcZ1g6EQIsKAP2qYNdGQyoBM69QW6MTFryy36wXYAsPpJYxT9tB8Fte122yAmtV1VNbfWcj4rGKF20ZhnnExYGJuO3084zQ553+/3bTgcJnJSouIhelyvcdPv9xt+XSwWjcmiPtgQG/uJmdcXTACbQu6W4LTgNO9Dfg9OC04LTgtOOzYJTgtO8z7k9+C04LTgtOC0Y5Lgs+Az70N+Dz4LPgs+Ox4+eynb6SCR26qod4GDApMHPQJLAUODh2vbAF6dr/1qQGjge1AiyDCYioK/Ol3bbAN4TW5NKAJFAUWDzgOyBpwHcLWLt5e3vbeRjkEr896Ger8nQq7zPvF294ntSR4b+dUpnrRYxcG12MkDjh+nH5cCoI5V9fJxot9pW23+0lUgHlTVFt5O9Mu422xsdjP3sAEAgTCGtrjRPr3PABZevh2u85M91RVbcY0Cuca+2n632yXAVF02m01jq6lOdNTO9MdPn5P6ud7n7a9Y48dWFIXleW7D4dAmk0mKSfULkzvVUX2ugNzr9Wy9Xtt6vbblcmnr9brhx7ZJiSdmjYGQDybBacFpwWnBacFpwWn3RYLTgtOC04LTgtOC0+6DBJ8FnwWfBZ8Fnx03n91ZYFJnakM+CdVg+h3K8lNBBWD1ya9tYAzvAA16D2IYQ5OrrusbjvO60Y4ar22MgDLftwWEBrsPKiqdGtRm+wox1UNtwwNsG3FpILcBOuCI3XW8fruoH7MHQE8snlzURvThkxu99bqqqtK2ULPmdlHe9/v9tLVZt++1xYCO/zZ/Mqa22MInep36V4mZuEHHLNuvMtH4pJ1+v39jEuDjx9uYcz2JnzY/AxBtQHob6HsiJk42m00CHeK0Ldb85AS/9fv9BujSl9qvrfKtua5Av16v05gU1NtWaniwpy+PQ2q7tnxT/+DX0Whko9HIptOpnZycJH3Rkbjcbrep/aqqGrGc53nyZ6fTsdlsZlVV2cXFha1WqxTXrEYgttSXunIJH7RNAEJuSnBacFpwWnBacFpw2n2R4LTgtOC04LTgtOC0+yDBZ8FnwWfBZ8Fnx89nv6X9uj5oNSkUhPiO5FCDedDSdrQNtnfqWY+eVHw/t+lIgPAdAOv7NWueBarbDgFnglDHptXiTuewYkDHqvpzj1bPIQQSRbe/6bhJaHTQQF+tVjcAz/fdBogItuI++iFAB4NBGpP2yzX+HhWShfHwmQc9+iZBdcujgoYHJxLbA4/GBX1qcuv5ttxLFV2B3uursadA4QGMrZvezm3+UDBSIU74zsdrm138mBXAtBoPCOI7JhxaGdc2+Uz9wnZMvVbt0la598QBqPsYNLMG0ah/syxL21PJBb7fbDZJV09oup31Njvhy16vZ/1+36bTqY3HYxsMBtbv9xO4a3+73c42m02KRXKk1+vZcDi08/NzOz09tV6vZ1VV2Ww2s81mY2VZ2nq9TkDvibrNr7yIX78KKuSDSXBacFpwWnBacFpw2n2R4LTgtOC04LTgtOC0+yDBZ8FnwWfBZ8Fnx8dnLy0waVDSmBqIlzqG9yitzlfhvrYqmCYy5z9iNE1KTTr6VcMQwN6Jeq1WRT34MCYF1Sw7VMFJDq8HQafg05YM2i4P91LAaWtbA1Hb12DQMXa73QQIHgQhHO6h0m1mDbuTVOinAObb1CRXf3It+g8GgwYREeAkr45HfTUYDNJYSDBWRChQeZ8xXo1lv63Qx5bGugcpxqrxo33gUx+X9KX9+bFqTqGvgpNOGDQ3NL48kTAmHYs/1xR7+BjzxK1+xR/8riCu+aP6+Jj1ea2xNBgMGmNS22u+avveFp6cPfkRKwrufEbs1/X+IZvz+TxNAmlTfTkYDJK/ptOpjUaj9DC+5XKZgP1b3/qWPX361C4uLhptMmlDH+xFX0VRWL/ft36/b6enp+kBfyEfTILTgtOC04LTgtOC0+6LBKcFpwWnBacFpwWn3QcJPgs+Cz4LPgs+O24+u7PA5IFJDaXXqNMVeLQN367ed1s7Hqi1wuiB/oOMo00HkoVkvS1wcCbjJ2F8spo1gc8DvR8jAalVc9UHQFXA9YTr7ak66viVzPi+LMvGA9LagB7hOx2P9yMJ3WZrfXGtJ69ut5tWLLSBvE929M6yrGG3uj485E3JU4HGkzyExzWQhNdbdW+zhcaJJqpeqysA1L/aP98Tl34Co9e2gZq3PaLjhHA0XxXg2kBe+8ROOg4FUSU3+ladfKyqzkr+ftwaM7SheuH7Nv2V/DXvAHlWC+j9WbbfkspEQlcZKTbqqgcmTrS12WxsNpulVQPvvvuuXV5e2tXVVcMuKn5ybLYnsaIobDwe28OHD+3k5MROTk5uXBdyU4LTgtOC04LTgtOC0+6LBKcFpwWnBacFpwWn3QcJPgs+Cz4LPgs+O34+e2mBSR2KaAUNQ3lHAVLqML2XQWBkNVxb3wpyAJRep9KmiwIBn/tERyffrl6r12igq756rU9eHY++bxO9V3UkKNWWOk4N9Lqu09mM2Ho0GjV8xlZDQBYA0DMdCUTfHy/aVt3r+nDWqn9l2eHBcvhTx0k11pOqbtVVXfjJuAaDQSJR7vHEoklsZun8SS+MUUGMyre3E2PXWFU9/eQB8bpBvvStY/bjVz0VGFRfr4eCo65GAKzwpZ+gqN5t+mhO68RMQRFs0BzxgK+f6dZtb0uzA9F7Xflc9fDj6Ha7VhRFmuT4FRu0iQ/MDme2Elfb7Tbdo3FR17U9ffrUrq6urNPp2HK5tGfPntlqtbLNZmPX19fpDFWPIZ6glIzOzs7s9PTUHj58aK+88oqNx2MbjUY34iHkpgSn2Y1r9ZrgtOC04LTgtOC045HgNLtxrV4TnBacFpwWnBacdhwSfGY3rtVrgs+Cz4LPgs+Ogc/uLDDpg8F00Cr8TkDqw9sYMMpjJBK7qipbr9fpmoZirqrH/doeYKKOwykkGQ7A8D6ofSLcVvnV9nmvQc14NPF9PziMYPf9bDablHzYcr1e3wAZ2saW/X7f8jy3PM8b/kJXAlhBA9sQtGaHraqaiDomtoBqgmmMKFAxBu8j2lyv1w1SVXKiHwUL+tfzST1B0ob6BzuvVqukj67eKIriBqHq1ldicDgcNoBwNBql+66vr5Mf8ZnGm9pRfa56Yyc/ScCWXKNxo7nA520TDN8vYwaYiA0/kSDftE8ldgVldFRQbVs1oOTdhiWMk+3KxJjXU8kKfdsewMjkRe2htm7LAWJAcUntBSZ5XRjzfD639Xpty+XSLi8vb0xSEOKPyRV27vV6NplMbDgc2mQysUePHtnJyYlNJpP0sD/OJkZ3JcGQ2yU4LTgtOC04LTgtOO2+SHBacFpwWnBacFpw2n2Q4LPgs+Cz4LPgs+Pns5c+gwnj6E/9nEFgXBJRA9MHBPdq4LSJBy0FD/89v2ugewDSvrUNFQ1q/ekB25NEm64e+DRxNTGrqrpBRvpT9bwNJPShcV5v9OQzH8DajiamJzpNIj+m20T7Rdr08LorOJF4AL3XV39XIfH96gN85qvMHiTqum4AiQI99jYzK4oi3c846FvH2OY3Hb+uptDrfa6of7Rt+ud3nfhoO4xLbQDAaVz6vKVv+tGc0HjnOiVaJQMzS6Cteql9dQupVvjbJl9thO8Jsi3O28bGfd6++EiJAbzxkzy2ovIi/nTbOQREu2rTwWBgo9HITk5O7MGDB/bqq68moC+KIp2FqnnnbRBytwSnBaeprsFpwWnBacFpxyzBacFpqmtwWnBacFpw2rFK8FnwmeoafBZ8Fnx2XHx2Z4HJVyB9tcqDvFbRNYkwvAKJDhiDaWADTgqMJC1t4XxPKupg9LzNGOrYsiwbY9brCTx0UvEJrAFOuz7gdBxlWVpRFElXAqKu6wbAcK0CL33qdklvX01K+qYfX0n1vvTA30YE3tYegNr0Yiw+Pvgdu6GnvtT+ntD0c+7XcdC2xid+ZwWDJj0x4oFex6Xkjw0AbvrxoK+TG6+bgrnKXTbW3PKTArXZbTnIeLEVMauiRMP3jE9jgu983inAarypPlmWNSrkSkQaJ14XXcGkfdKH6uABkoeCqi1VRyVH+idWyrJsgPl2u7XlcpkmJX6Spf0qJqot8zxPW1Ffe+01Oz8/t6IoGg+hvAvTQm6X4LTgtOC04DTVOTgtOO2YJTgtOC04LThNdQ5OC047Vgk+Cz4LPgs+U52Dz46Tz15aYFIFASBEE52KX57nNwxKghD8bUGsYM5D3riH8wUhG+8IxDtGg0gDRw2E/m3BQoDpdjna1zbaAFlBlYd0KRHyPUC72WzS+KvqsBVQQdoHryZWVVUNO7UlM7q2JY4mIe2v1+sbgKJB7+3OeHRrIOesoqcPbr4jMTqd/RZSvzqFuIHc8aUnOwCV/lQnlfV6bZvNJvWl8YfPaXO73dpsNmvEgtpF+9Wtxb1ez/I8b0wmFKT9dmCfE4jmggKzzyWNE/oEjNRneh/xp7GhpKTtDwaDG4RV17WtVqtGv/qdtoH9er1eqoSjH3Gi+ajv2bruY5z7zA5nlLbl4W2TH2KIvnmQHj7VF9uyAXd0xKbEGvlDe8Rvp3M4oxcdsMV0OrXz83ObTqd2dnZmb7zxhk2nUxuPxzafzxuTER3HdDpNW2dDXi7BacFpwWnBacFpwWn3RYLTgtOC04LTgtOC0+6DBJ8FnwWfBZ8Fnx0/n32gI/I0iDCIDwL9Xq9Tg/Id9ypY8TkJQiJ7MFew87+rXhrMSkZ6jyYN19wmOu42u2iVUds0O1SisYGOW5MBZ1Lh13Z11UTbeAksPgdEsG1bcBO02r+O965zR1UUbM2sUYXW8frrPCmhm6/oa0K19avjV4LQezQ+dUxcx2fqM9VRJyAQoPcFiYhtlVS1nbZX2+Snzeb6mbarcaTXKmn5dtQeOvFBH105pDnSBuZt+X8bcXkMUYLVCQdx6/OLcevEUW2jwO9XG6n+mnOMeblcpgfgef8wgSEWfHz5eFGC9WSNX8bjsZ2cnNhHPvIRe/DgQTrv9OTkJD1wEiKmH2zMRKSu6xuT8JCXS3BacNptEpwWnBacFpx2bBKcFpx2mwSnBacFpwWnHZMEnwWf3SbBZ8FnwWcfbj77wAUm7UQH5APeJ5Leq2CqDvZAr071fWnw3OZgrvMV4rYg5XcNBNVZg8/r33avb0Md7YNAgbktQDw5qV3a+tGfWkHmc4Lek4uCpPpCdfSk29Y/Y9Rr1ccQiQdRAlVXO7SRYZuv22JJ70eHtsr4bYCkbWj7fusuNuj1ejeIBhDSbaL6veqjumAf7btNNAZ0rG3+4cFw/j4Fd0RBT/2oAONzU6/zOaj96D0+N83sBiizAkRjF+L07aj9sCFArysMGKP6gRVEVVXZbDazzWZjm82mYRsdg49PtYnGFC++Y9UAOo3HYzs/P7dHjx7Zxz72MTs9PbXhcNiwg56f6vvhe1bohPzWJTgtOE1tE5wWnBacFpx2zBKcFpymtglOC04LTgtOO1YJPgs+U9sEnwWfBZ8dB5+9tMDEgLSiReM62M1mY1mWpS1TGFENrEGjg2E7F47CQQDAbQAP8GrCqQHQT/XVarkakPvbtqOqvmoTBUFPOnof7en1/X7fttutrVYrWy6XSZ9+v5+C24O7vq/rulFhRK/kWFm5UNd12hrY7XZv2IbrsGcb+JE4HujwKcmjKyG63W6DSNrOscU26MJ3bcCh9lXy8KCuRM89ahvvH7WvghErBxBWCrCdEX+wPRsbqW74tk2/wWBwY+KhRK2Aq9d4e/hVJgoK3e5ha20byemqDS+sCOHV7/fTdk3vO22Pz9GBvKT6rlV49Hvw4EEjZgFFgM/HNf0NBoMUd+pDjXOwCfuvVqsE5svlsoExADzjJT50O2hVVbZcLm8Qu/qB/tgOnee5PX782B48eGDj8djG47G9+eabNhqNUvxst1u7vLw0M0t2ZsKAfr1eL60wwFZshw/5YBKcFpyGzsFpwWnBacFpxy7BacFp6BycFpwWnBacdswSfBZ8hs7BZ8FnwWfHyWd3FpjoSION9wxOE4NO+V2vUQBW0DNrggj98j3X+kTnPk0CPsNJAAx9I21BrQmm46FP/14rjL59DX4FEu2b9vRMSA8GOl7tR/tT+2M3Xp5Y1V6dzuHBfaqTAkZZlinJNLFJLPWtApgmpuqjROcJV1dMkFAesL09lIw06XQ8mnT6mdqQGLnNf9hC9Vd9PRgr8atdNO78Z+qbtjG36asx6EXjV4HR64G91C7oouMwO0zAfOypXrrFsyzLlNcKgOAHbQ4GgwRexKXGoI6dSQTtaKx5G3Gm6WazaeQtD8Pje28rSKYoitQPY2eMjAFM07gApM3M8jy30Whk0+nUHj9+bK+99lrajvro0aNkYwhHyZ22lYjUbt1uN22tXa/XN2Ig5KYEpwWnBacFpyHBacFpxy7BacFpwWnBaUhwWnDaMUvwWfBZ8FnwGRJ8drx8dmeByYONB2c1EO9ve/lEUgBuIwwGrImrzvSJrm1rX3ptGzh6QPcg4dvW8fo+td229umDcQG0Gvh+vKrvbe9Vf8BCk7TNvmob34Yft9rN6+rFt+mTSMnB3+P9qX7jegVUBa82sGv7rE3Ptvj2fiMJ1Q6qg5KFJ2eEe9R+fuw6Jh2bB15PLm321LHcNhnRSQU6apWaa/npc9GTKOPy5A5p6oTB7PCwST8h5BoFem8/P3nT+6iu81P9xOdt8cv4WVWkMQt5KPBq/tIeqw76/b5NJhM7PT2109NTe/311+38/NyKorCiKNIDIKuqSiTCGHTVkvcxZLPZbOzq6sqWy6WtVqsbYwm5KcFpwWnaT3BacFpwWnDaMUtwWnCa9hOcFpwWnBacdqwSfBZ8pv0EnwWfBZ8dJ5/dWWBi6yCDpFMNBE0yHKPBg/JqFA1sXw2kDSp8Ppi1ysn1fKfEoIGMc7Isa61Sa9CpXrolzMwaW8baKr1cRzvaP7ZjeyljpCqPTuhPm7TVlqi6vdfssCrBbyNE/JbETqdjg8GgsT3Pk7vez5ZDrfgqWWnF1QOW6qcJTT86FirQfIfNGJvqlmVZI061P79awftKk9iDHuPT6q62oTZuO69SYxSde71e2uoK0TMmnWAQM7SJXck5nQSpn7VPbauu67RdVX3mJ1zoiE1Wq9Wtucy4uFZ1U5zAVoPBwMbjcXpgHe2pvxTAwRxAGeBlZQLXkkfr9TrZdLfb2Xq9bmxVVpsQS9het87yuU7EwA98x3Zc2h4OhylOyrK04XBo0+nUzs/P7fu///vt9ddft7OzMxsOh7ZYLJJv1+t18u10Ok14QLv0rf5hC+5ms7HZbGbvvfeeLRaLxnb3kNslOC04LTgtOC04LTjtvkhwWnBacFpwWnBacNp9kOCz4LPgs+Cz4LPj57OX7mACZEhgFYJKE68t4TCAJrZex2BoQ8HIi0863QLn9fagTOAQnGYHENE21Kh6rU8M2t/tdml8HozM7AaY89lms0ngqwlE0mqV1VdhPWAzPvWLJrO2zTWMAxBQ0T5o3wMY5Klkr7akD01+1VHPclV7co+2pYSrY9GY07NaPRjqxET7VJDScZPYiMaXisZC23e+Sq5EolsuPSkqwetY2/p5mfi+lSAYmwdrvveEjF3VZmojvZaJFed9EmcAlc8lzh71scTn2ES3JAPsAK3GmscnxqSxRJveJmaHyQfxgP3Zyko7OgEqisIeP35sk8nEJpOJvfbaazYej9OERH2w3W5T/2BIW7z6yStbU2ezmS2Xy0SGIS+X4LTgNG0/OO2mBKcdJDgtOO3DLsFpwWnafnDaTQlOO0hwWnDah1mCz4LPtP3gs5sSfHaQ4LMPL5/dWWDyyaFOV+OpEj7pPegq0Kv439tIpC3QCRYFHE8o3mia4EoUtwWGFwVO7vX96n1874nPzBoADiFUVdWowhJMbUGJzj7JPNB5glXdPTGQTP46vYYk5uFy+p3aQgFSq6QeHL099TolXE1y3+dtNm97YSMPNmo737aP0bviAh3a+tCYQXwfPgZVp9tE/e39rqsHPOGorbUt7wcf221xD7kxRmJEAc2TOPfoeagqRVGkzwFfVhAoCOp7b18/9rZVNW125Ds93xSy6Ha7icgmk4kNh0M7OTmxk5OTNG4e6FeWZeN8ZtXVxzS2xV4K5lW1f1ggQL9YLG6sbgi5XYLTgtP0Or0mOO2mBKcFpwWnfbglOC04Ta/Ta4LTbkpwWnBacNqHV4LPgs/0Or0m+OymBJ8Fn31Y+ezOAlO3272x3Q+n4GwFhrZA1eD3pKG/a1BhRG1DA1iDXYNGt7RptTDLsgSibA8kyLTS64NDg1KDWldFAJS3BRZ2xHEanBAN31Fd9aCnY9JgUNDg5QNJ2/GApvdqEDNu37aCn++TYFXb+JcngTaw1thRoOczHpLW5heNC+KUGNlsNo0E1rhuAyzaUID1K2k0hjVxuVbH6Mflbe5FCbItv7S9tkmQ3+KoOaj3t42ba/xKD373Ewg/ySJ/WQkD8KuN/AoLtrESUzopKIqiAdDqUxVPsmxL14kVdqJy7yc0vi1yFODW8Xe7Xcvz3F555RV7/PixnZ2d2YMHD8zM0vbZ6+trW6/XaauuTkA0Tz1Qk+ubzcaeP39ui8Uibcddr9e2Wq1sNps1tsiGvFyC04LTgtOC07w9g9OC045VgtOC04LTgtO8PYPTgtOOUYLPgs+Cz4LPvD2Dz46Pz156RB4O6fV6yTDz+TwlO8HPTx0E92NAX0nWfryxffAT+Gx3w1k4Utvne93iRsVbE6KqqrTdEp20YugJQ4GJrYycu6i6M2YPWLShQOD1HQwGDbBdLpcNYG2zJ58T2Go/TTw+16pyVVXJx0q2VHWViJSENDA9OVHt5R4FE60o6/Y/1dmTvibHarVK+hN/gI2OS8ejAIPt+I6tjnymcam+ybLMttttg/x0/IwFX6jNfRwo0SloeiLCHmonTWglDB8b+MhPrgBs9a3aGF+bWQJe7tOtm+jHe2J3PB7bZDJpxDf20lUFCsJ1XdtoNEoPoCMH8M1qtbLlcmnL5dKur68b21NV+L1tkqUPtpvNZg3/6n3olue5nZycpC22w+HQJpNJGsN4PE4kNhqNUlxfX1+nM2TNLJ2TCnbmed6YBLAKYLlcNmKDcc9mM7u4uLD1em3b7dayLEvvOUvV42nI7RKcFpwWnBacFpwWnHZfJDgtOC04LTgtOC047T5I8FnwWfBZ8Fnw2fHz2Z0FJg9QOIVg0O98Z6o0RvRtt733n+G0tgT01WQCSoGCBPAPyPLj00Rq0422uJZ2AWhNOhLOg6O2o22RNEoqBIImlNrfzBrgopVdtY3qpYCqyU/y0aeviut734b3q/eRjwPe+/vV5qq7/t4GWjpmbVerqwqaXh/9DlHf6Fi8D3l/2wTGt4Wv/CTHg6fGEAK4tdmMfPSxq3GhKxo0XzVnePkYoC89y1PvZ5LBSycH3mbEGp/ptlZ08yTChEoBnvceo7BpVVU3cMrbuy03AfXxeGynp6cJ2AeDgY1GozS+PM+TLwFxP8nIssyKokirbDqdjm02m0YOAPTr9bqRW8vl0tbrtc3nc5vP52nil2VZGr/60q9yCWmX4LTgtOA0u/E+OC04LTjtOCU4LTgtOM1uvA9OC04LTjs+CT4LPgs+sxvvg8+Cz46Nz15aYNLg8ECvooPk9w/athdv/Lbv25Jeq5ea0Ojk22sjIW2XcSpged3oUyvauqUNJ6sTuU7ByNuL9vQ+r58COsF2F9goIOoYsixr9OeB0o9b+25rT/UkGdr69ATlCYA+NYHbRHXQe70dNDa8LVUHXiRRm/jYIVY0/hVcVVevpwdWBQolbg9Yt7WtOmLju8ahkwEPnNqmxgPjJN71pX70eaiA7snFn22qKwnW67VtNhvbbrcp1j2x6WROV9nQn/cL3yluTCYTm06nNp1O7ezsrJHXw+EwkZgSthKg2hF7YLO6rtOWaeKLFUnL5TKNu6oqm8/naQUFq2doH/2ZaKpdQ+6W4LTgNL3P+yA4LTgtOC047ZgkOC04Te/zPghOC04LTgtOOxYJPgs+0/u8D4LPgs+Cz46Dz+4sMPltcR509L0GrQKwDyANpDag1c89SPvk1e2X6Et/6ggNKn7npeCoSZFlh7MU0YV7drudbTabBJDj8fjG1jwdpwd5gpgKaZY1z2plbFmWNSqWtwGhBxUF2DaQZ1zcR2Wy2+0m/9V13ahucp+uOIDEVNjSrPGA7mwXZHxsSVRA12DnM7boYW/1n4Ih12sfug0Xm3v7aax4W2qMqb2pYjNmjRP86M+U7Xa7jW2xKjqONlL1Mc2Y/DZjPwYmIVqx13vacoz71M7YQkGM7a66pTXLssYWUvrTuFE/0Z6CJTYnzzgHtI0IdFKyWCwaduv3+2ls2v5gMLDhcGjD4dCKorDT01MbDAbW7/ft8ePHdnJyYtPp1IbDYepvvV43+vP+oR/NaTOz9Xqdxr1arez6+rqxxZ2xECuMZT6f22azsc1mY6vVqmHPoijSi1VML5tYh+wlOC04LTgtOC04LTjtvkhwWnBacFpwWnBacNp9kOCz4LPgs+Cz4LPj57OX7mBSkGkDx5eJD2jfPtdgPAJdA5HP9T4F5jSYFwFMAJrdPKdSg9UbR8fWBhh6323jUhC4y0YkL/oo0KstCIS2RFfbaABqsrb160FFz5TlOxL4tpUQtwGEJ2P9XpMbAOBaJTnAUe8lsUk2xPfl/aq20W2OOplQW6jf1MZeND6UTLvdbsNvnvQZo5+UZFnW8AftK4i3jVXj2U/GVAd9KTF536GH79uPs+2c4E6nk86LpT2/QqXNpreNWc9BZQUBfWlek7fEseaI2WFSga2Hw6GdnZ01Vg1ACqenpzYej9P5pjpp8SsGNM7Qn0mJ+pXVAjwkj4dqAvTcp/m/2WxSnDIhYixFUVie51YURcJAnZSH3C7BaXsJTgtO8xKcFpwWnHZ8Epy2l+C04DQvwWnBacFpxyXBZ3sJPgs+8xJ8Fnx2THz2gQpM6sy2a/R9W7K0JZG/pw3INRD0ew1aHEHQE3wabLcBpzrMg75Z8+F9fgue6qZ2AZDUfm3j0gBS0tJk9WDkAQjdCZY2v3gy0KT2ttV+O53DgwdpX/XXsXmdsTFj414P4NqeJpC3m1kz9rwtvC9oq83v/js/pttI0ut5G7Hp/R7UvA8Yb1v867Xe/7fJbXHZFtttOmMX+lUSQdc2e/jYU3Jl5QcPyfSkpSTu449qO5V31Q2g5zP6UtvneZ7OaeXhkb1ez05OTuzhw4c2nU5tPB5bnufJ5qyy8ZV9r6faj9zVa9VOrIjgfFMmGzo2gF77oW3GZXYgLT7jfl29EXK7BKcFpwWnNe0ZnHbTHsFpwWnHIsFpwWnBaU17BqfdtEdwWnDaMUjwWfBZ8FnTnsFnN+0RfPbh57M7C0wKnKpEm8P5TIGLIFalfcC2OVkrnRpItKtgrhU0dPP68rlvRwWd/EoDjKjj9sC+2WwaumpF04/X96s29AnC9YzZf0aV0ie1BmJbwnuQ90GrQN+2wkHH4SvZZpbOe2wjLq0sa0Vf/Yh/VU/tW1d1qA011jwBoj86sO1R40HPrcTPCthtdvTxyZj8ZENfbRMmFc0l7VNjms/a8ullk5GqqhqAqySuRImtfcy2tad9A2TkQZ7nlue5jUajG0Sl+uoWVXymwIg/eRFnyGg0Su8Hg4GNx2MbDAaW57mdnJxYnuc2HA7t1VdftYcPH9p4PLZer5cAl5/L5dIWi0Vj7NilLA/bnbEZ8ajkg231c4Ae28zn83TW6XK5bExeut1uIihdBaHAz8P4eGBfyMslOC04LTgtOC04LTjtvkhwWnBacFpwWnBacNp9kOCz4LPgs+Cz4LPj57M7C0zqRBTXANbPNWExiCbybffjKPrRoMKY3nCArg9CnAE4axJoEGKw2wBfExuCUAcqcHGtJrEHEB2vklpbomZZls4M9d+ZWaqMApi9Xi+NCR09+bF9UINF7eX97Ykc0NXP+Yz7vU3LsrxxLqr26/tWsGnTiesVoHSMfK++0e+9QJw6MSGetB0+Uzt40G0DQtXBx4naXgmxTTRGqcrrePT7ts/byFJzDf3bdMU+mtOIblXVVTY+R8wsgePm/9/eu/zYtp1n3e9atS5121cfH5/jhNjGOjg4dowhckAiMpc0aIEQIhI0aASJBhL/AUoHmmkguYtEixatCBGJS2gAQnEEcoIwSD7EgsQ2vuTsc87eu3bVun+N+p5Zv/nUmKv2aX1n1fe80tJateacY7zjvf1Gacwx13LZA5L7Sf2wsOs7febERm3oDgH9ON7R0VGvyJ+dndUnPvGJ7vmhjx49qtPT024io7Y3m+vnqapdPuOV9qnqP1uVEz3dFSA7ej4x5nT3ACcgrTzl5MUnELLDUPxEbkuYFqaFaWFamBam3RcJ08K0MC1MC9PCtPsg4Vl4Fp6FZ+HZYfNs7wITHauG6UAvePyO4kni7fl3XthpZL+Gic/EceH3LErUmUHtyesJpjbG4/4PwN1l8CHd3V6ut1/nRVE6MEha9lBA6TMDln1qzEPjor+HIDKZTLoVexbIVqH3ojRke09yBrn68L81VtqMMcVJCnUVsNSG36Xg9vViQHjpe4HDx+8TAOomf9G/9KderUkZ2/OY3WenIZ/Qxux3aPLHcxaLRTPv/LOKbusYY20ymXQ/mHd6elqf/OQnazab1WQyqclk0hX6Bw8edD+op++n02nzhwrZj+LVc01xwfyQ6Jja0F0RihPdraAf0dNx+oPjJuy8VrrvIndLmHYjYVqYFqaFabJLmHaYEqbdSJgWpoVpYZrsEqYdnoRnNxKehWfhWXgmuxwaz/YuMHlCMonZMZN8n8N5jSvH6zabTa+YtYq5jMDVNxncA9H70gooDaX2PNFpC+mgROGLY/AEGXKEB30r0dmvdNOYWMy9APjqLsfF1Ux9Zp/SgYWPBYCx4D5226qobbf9H7vTdj9dP5vNejqyPy8IPgHxsfNcn4y4P6grE6qqOr8SUt42Qch4dSjrnXcwDD1v06HO+PLC5DkiG/I8twFf3rf6Yl7Rl+qDbezzB6F3dXXVPV/X/eN3HelOIG5b199qfzqd1tOnT+vhw4f15MmT7m4B3V0znU67baqnp6c9u/EuiKFaIx1ZyBnT/hzS8Xjc/UClxqUfzZPNXr58WcvlstsSy3rSmtD4Nl19Vn98Re6WMC1MC9PCNPpSfYRpYdohSpgWpoVpYRp9qT7CtDDt0CQ8C8/Cs/CMvlQf4dlh8ezOR+R54fbv1bk68619nqwODl2joGdQtgon2xuNRj2jsgDLMBL1pUB20flyiAzt+jAg9OJzNd0+Pn4mH3XZ7Xa3ns+pfmhTOtuTQAXE9WDB3+1unt2qxOExbvHlmLzQq7h5EaFPWsc0Vj9OP9JOOk4AuN9cN2+DvmUbo9Go2/Yr+7fiSfqxIDC+WCC0bdihqMJAf3OV2YW2WiwWXR/Hx8c9PbTqzkkO+9B5/vxYgpxjlj1VwFUQCRt95+Nkmw4f6cdaoWOtuyg4kam6LszT6bR7HR8f19OnT+tTn/pUnZ+f1/n5eZ2dnXWxfHV11Y3hww8/rOVy2dUG/Qif8kcFVVtpPdbkq8Vi0cXQ5eVlN1b69dWrV7fuMpCdVNxV62hTTiZov8lkcgsA0su340deT8K0MM39IXuFaWFamBamHZqEaWGa+0P2CtPCtDAtTDskCc/CM/eH7BWehWfh2WHw7M4FJjrFHaBO9Z2ExdKDwJNcA5UxOCAOkPp4wOq9VZzZlr5jAfNixnZ0XOPUZ43FhUnAcbcKzGh0s42N5ylx6WjamImtfggdTzwJQcrgUCFj4vFankcfMUEZzFwRdd+5X+h3jY0x1QKDANmaQHD12+PMQUud5EvqPwRu6u1j0WfXjzHDIsDr+OJYpLP7zQHu4FcM+cSIbfr39IfHkdvD25cPlstlb2zuA49rxqD00rHJZFLHx8c1nU7r5OSkTk9P6+nTp90W1PPz817/L1++7Gy8XC67nJhOp71J4Hq9rpcvX3YFXsV3tVr18nez2XR3ClxdXdXV1VUX26xlsr/ukKGPFa+Kb58cSAeeL1vqmMcsc80nlJG7JUwL08K0MC1MC9Pui4RpYVqYFqaFaWHafZDwLDwLz8Kz8OywefZaO5gYVHSY3tmhkma73TaDhYHv7baSg98zsIaKgYqOG8cLWOt8L5Q0vpziQGFSugM4DtnD9fdzmFRMDF1DGNAmCmIWbNrPx0K/eEHwIu8v+pM20ZhaEKTtvW0WF/cp+6J4YLd0ph6t4sW4aMXYUJzqPNqxVehHo1G3yiy/tYDrccdYZPEfEi8S+m4oV9RHC+JDkwXPGwcE9XD/u16ym2JWL+kyn8/r6OioZrNZnZ6e1tnZWT148KAePnxYb7zxRu95pxcXF7VarWq1WtXV1VWXGyraapd3Eux2u1vbTH1ionP03NJXr17VYrFoxrYK8lChb9UP9SF9PZ913P3Iz7wTwetdZFjCtDAtTAvTwrQw7b5ImBamhWlhWpgWpt0HCc/Cs/AsPAvPDptnexeYWHz4nScQV7u13bKVtK2BDgUwv/ck5WC5yuoFslUw2K+CUgZT8Ohabk1zfRxGDovWOdS5ZV86TwWBdxvoWneqJxt9xOTVltfWSnfLtgrYFpA8LiS6plVYFdCEDYuFr4yyL0KKq7gap9+54vaRXSVKyNY59BHHyXj2QswxspjxOwc3pQXY1l0WrQmUhHl312RK49WPz1HfVtFjbrlNqm4/O3VIdL3uRFDOcfvoyclJzWazOj4+rocPH9bjx4/rwYMHdX5+3t3psVwu6/nz57e2hmrMjPWq6rb7ut1kA9lPMF6tVnVxcVHL5bKWy2VdXFzc2k5PYf1xO/AaxsJyuewAUXVdfwQj2qp194vEr4nslzAtTAvTwrQwLUy7LxKmhWlhWpgWpoVp90HCs/AsPAvPwrPD59neBSYZVS8F3Ww2u9WpgkMJt9v1n6vZCnAmCwuagkb9ungxdT3ksKqbwJZBtKKndyapdFDbLCjUSzq0EkKrj7THbDbrnrnJvtQ/7wCoui7yCj7azZ/VqVVRFgCBwIs5E3m9XvcCz4uAzvMC5gWftvDkJ5AcZPqeY5afFXMSL7o6R7ZkgfG4kH3cX5vNpouHVtK6TxkT0oN/057MAca+XrPZ7FZ8aVw+Pp2neHVf00byJb9TzOmdQJWtZCfaTXmnlXaf7Hjcqx6owHrd0PkqqpPJpE5PT3t3Cjx58qT7cbzz8/NbuldVbzWfE5pWXVKx1rsKv8BGnxBei8WiXrx4US9evKiLi4uuL24Z9RyaTqed//TMVMYa+xm6u0XneRyy9uoar7lDE5zIbQnTwjR9DtPCtDAtTDt0CdPCNH0O08K0MC1MO2QJz8IzfQ7PwrPw7HB5ducOJhbOocTwAGDnrrz/7cKBqM2WPt6WEqvVP1dkWexlQJ5LI/oxOox9MAAY2Cx4LCxemFn8vEAx2VuB4v7yMfsx6ujXcMwqLrzex0bd3D4a91BRcvDTF63zJX6nQqt4uR9dB9dD57XArXefpNA3LfvyOK91HVv5xHNchnJDfvB+VVxpH90tU3V7Szltyn44efL8Ym0g5Lw9/VCeCjwL/dnZWc3n867QO4x0F4rflSFIHR1db21lfnGC4nHhRVZ3EFxdXdXl5WVdXl72nn263W5rOp3emui1bNS600VCH7nvW7np+eA1WPaPvJ6EaWEarw/T6lYbYVqY5jYK0z6+EqaFabw+TKtbbYRpYZrbKEz7eEp4Fp7x+vCsbrURnoVnbqOPI8/2LjBJcS+w3jAdz5Vpl6Hvq9o/gsaBugOrbhcBrThKvAio4Kp9jk/ns30WXxbO3a5/1wHbdOeqX728DwYr7UgQuI39Xf04RBgk+p6ru1U3d1joHBaMfUXXCyO3zDGxXE+1I3u0jnk/4/G4s6m29rEP+pd3MMiOnnwONU8s6sREp+4sYLS9F8GhsbqPOO6hAu8TqpZNWzZWwWfx97igvpykuG4+iVGh4cTAJwLS4eTkpE5OTmo+n3c/ojefz2s2m/XuMnA96LvpdHprckKgMad9YsIX7b9YLLo7gC4uLury8rK7Y2HIr/xhQ+nYqpfUgefzM6HrcUl48lqfzOSfl9eTMC1MC9PCtDAtTLsvEqaFaWFamBamhWn3QcKz8Cw8C8/Cs8Pn2d4FpvF4XNPp9FaBag2E1+j88XjcSxRtXeW5cipX6RR43JKowekaFVtf3dQ18/n8VmDzejl6vV53hUKBorakL42vMevYeDzu2lAfbGez2dTl5WUtl8uazWZ1fn5eJycnNZlM9j5bkUG+3W57gaxrWgnLFxNGfaktT2Kdx5Xalm/VH7f7sS0WWg9y6aztgq2tuzpfz9SVf31lmTBmsdGWRPe1x5tiRz/QxsKlmJFe2u7IOz5U2DhurUZrnLIT22353IHKcXvB9PMcprSNJiPKYcYoc0q6Mn6rqq6urnrnnpycdP49Pj7unl9K+ysXtM1UvtZdBF40t9ttbwvqaDTqiv90Oq2Tk5NuK6j7XW3rmLaIK8bPzs46nXi3wIsXL3rPIZU/1ut1PX/+vCv8rDmqZaxX8pP019ZY3qnhoKI/Nd7xeFzz+bwX35zMcOuvx43iTTEe2S9hWpgWpoVpkjAtTDt0CdPCtDAtTJOEaWHaIUt4Fp6FZ+GZJDw7XJ7ducBER1NxBorO4TuDW+d7e54ovJ6Ktwohr2OiMBj4N89RsqodgsgLk7fVGrcCy/VRwjNQFHB81inbZKGkwxmMXsj1N9uhMEDcN4Sp+vFi4OOnXVrnsS23v/pj0dQx2Y7PxWUfKkJsjzGkPpVABJbso7aU9K27DKpuCon33yqSR0dHtdlsehAgQGkTn6wMneP25gRq6Dz3ha7xvHN4cDzUTVDWNVzxPz4+rtls1h3nhES2090lgq/gycIkvRTXEsJO/vSxyn8EC8cuvwrUigMVeT7PVP25Xl7T9Jng0A/ySR+3I8fcqpEtfzPevA2/vuX7SFvCtDCNEqaFaWFamHbIEqaFaZQwLUwL08K0Q5XwLDyjhGfhWXh2mDy7c4FJHTNh2Fkr+D35JFyx9OtbwPBC64NnkHpBVuJ5Yniw0XgU79eLPJ3O76mjbEinr9frLhFatmT7GiMDwFcVPeF5vopdy0ctu7MAtAJHutIv2+2216eOUz+Jkt39MlSo3OZVt58F6fZRcvMYi7mESe0JpnfGq+upsaiIqb3xeNwVEC+utD2Tl7YdSlgvzEMTGI6RxZZ35Kgge9teoFTEZXO1p+eZ8q4Nb4cTHNd1aBIh3XkXhfRtXaeCqyJPXVS4V6tVLRaLWi6XXd7wRyu90NO37jO1rzYEC78rgb5kjfH88vigLwkF/c0Jm0R63lXoI9cSprX/DtPCtDAtTAvTDk/CtPbfYVqYFqaFaWHaYUl41v47PAvPwrPw7JB4ducCkxdFSkt5fS8na8BcrZbRuAWNA2mBhMZurYxutzdb0rRlTW3oHBpH32mFkcVBfXGbIdsYjUZdkDOYufLZKgyTyaTrb7FY3LKp+lIB8cLvSavg1Is21J0NSs7x+OZOBo2Fn+UrLyQUtSlhn7ID7xJo+VPtc3VX/WvMWpkfmgBo7FXV+Vo24Hn0pfSUb6TjyclJL8ZZhNU2z5eeJycnvZV0xd9iseja4ESnVaRYAFvxQrgREBo7bS7bsR1t+dS2YtlJq/YOW/ZzdHS93ZvHNXnSefQf+5XvZW/aSNc6eLzQS8/ValWXl5e9OxRoJwGI/pfvtIX06uqqVxN4d8R2e31ngWLZJwn6fHR0/WN+8vMHH3zQuyNB23J1nABhO+yXY/ZckxC81F++5xb/yN0SpoVplDAtTAvTwrRDljAtTKOEaWFamBamHaqEZ+EZJTwLz8Kzw+TZ3gUmbhn0QunKsuDrfL5YECRMZgqDsFXQaQCK999657UKOAKJ+vl1rqvDzK9TUVG7LDL+jFnZQ++yvfrnqrVDVe0ykGgfh6ULdVXAM3HcNmqHq/Gtoiz7MlDZjpJ2KOh1LgHDZ4n6de4jgthXp1lEh+KX51GXyeTm+Z66s0M2F6TUv9uednbdNU4WGeni43KdaOchf+laxjzvsFAhZmFrgUi2Z6y0Ykzj1HcqgK2JGsdFu+92N9uzx+PxrUnccrnsfHx8fNxdpwkfocHJAXOP42LMM7alt8YrG+k4t+zyThJOPvjywq4YZZ/0idc96b5cLnvnR/ZLmBamcexhWpgmfcK0MO0QJUwL0zj2MC1Mkz5hWph2aBKehWcce3gWnkmf8OyweLZ3gUlOYkNSsuVUfc+/mZAST6aW+LFW4dagh4Lb25G+DCgl1VCRZxBy7A449sk2WYwU7FXVS8x9wKSDFWAeMF4wXHe1o7Z0XNezCOo86TQECf9+CCSyK/vW9wpgJoT7UWN3/Tg+Fm3qwL+HfE9/sS36lfqq0Ksgsl1BXABrwU+28OsIRca0x0RLJ044eMyv55hZLLwvxshQka+6vX2SfbB/ra6rP+nrOrru/s6+9T375mRHBVcv3hUiu7fypzV+Qkpj8XF77vi4WmBrjdGFk967bBG5W8K0MC1M6+sbpoVpYdrhSpgWpoVpfX3DtDAtTDtMCc/Cs/Csr294Fp4dIs/2LjAxIL2oq3Gtpnkiq4DQsEoEKtW6zgPd++UdDkwOD/ih4iJdZKxWgdrt+ttYldzUYzwe30p6vrMftqvVSAWFPnu7Vf2VXq4aevHypGHQqX23EYPSg0n963xe1yoMXFltJYEnFQsc+9XYGQNKrtVq1fOJFzoWcvbBrZksdIyVVtFlrO4rquxb12gFu2UL9aGt2l58WwWWOjo8OClg8dU2Rl/Npn09H1lA+MOOLl5kZReBjnGwWq2qqmqxWHR2lW6tiQFtqfxSW8x35qPuHFC/q9Wqrq6uukLPWqTt27SNx6WDbbFY9OzBYs980xikh4+PtnTbe0zR98xx6dzK6ch+CdPCtDAtTAvTwrT7ImFamBamhWlhWph2HyQ8C8/Cs/AsPDt8nu1dYJKwIPr3NMqQ06ggjSFjj8c3Wwm97VZRY5Hk9y0dJEwQBQaDg4msNhhIDGx9Jz2m02m3bdGTkQFG/TQ+rXhySx+v92Bk0kpHJQADzoNE13hQePF2f9MeDGwvTj4+fseCy/ZlX/cRt3HK7j6ulj33JRV9zOu8DdfdbcLzWXyY8PTpUHv0hwoZv3edXOdWzjnsGJO8bjQa9fSkj3WtcsOhyr4ohIzGwlhRDHAbL+OcerC/lr+pr7ajss/t9noLp/8IHnNmu735kUjGdWv7PP3q8JWtaCf3I79jnSNwub1V9mHM0Z7qazwedz72XIrslzAtTKsK08K0MC1Mux8SpoVpVWFamBamhWmHL+FZeFYVnoVn4dmh8mzvApMHNx1DZTRwFn6d56vvPMZ+vODonQb067wNTwrqwvPcaEOFysdHaTlvs9n0EqClN6+TbdWPt0cwSdh+qyC09JZ9W4Ve2yppD9dDPmQitgqQAnZoQuCTBQegw5gJqHOY/N43Cwv/dtjSXmxH13EywOMEjk8yvD+dw7h22zmIOA7XrfWZbdC3spP0c/ALprRvq0B5rPFH6oZsSKDze9qMhZwxpePqp6puTWzoI42DL/WzXC5v1Q7qOFS3Wt9zkqFjvCNJerYmw/v85/21Jqv0K2HJnGYcRPZLmBamVYVpOh6mhWlh2mFLmBamVYVpOh6mhWlh2uFKeBaeVYVnOh6ehWeHyrM7F5jYqCvjTtJqG89jErDocYVzt7vZgsbg4epdq+h5oXPDtYCyD0p+vvRvGZEJzDsSxuNxL8kcjtKLga8f0aMNaUe2xUJNv6gtnc/g9HZYLLkNkMV1u932fthsPL7ZQkq/SA/awqHNhFMb7hOOTYnuq/GyrRcY6sy40/VapXb/05eclLgP2D/jzeOPRZ+2b0GGsccCzULJ4ubxpHZo29ls1tlvOp32xuETCX03mUx6BYq+0FZa6UL7rFarXpFh3El/FiedR39pGyonG/S/9NfkwQFGH282m+7OAenHO3s8XloFmcc9J2hDn8COx+PeXRcOfrct/cA2WVt94sKt8n7XgU/QIsMSpoVpYdqNz8K0MM1tEaYdloRpYVqYduOzMC1Mc1uEaYcj4Vl4Fp7d+Cw8C8/cFofCs5EnDeXJkyc7BhIdqw4YGF50NRCtunlxV3GjM6v6q+VuHC/07qBWYVMA0kl0ol/nxva+mJTU0UGkMbdsw0D3AuPPbmRgs38PlPl83vPREPj4vQozE1V6bjabnv4KsKrqfe/2pK14rOp6++RkMqnj4+OerVksHMZKYP/RNAcTiybtxILjdmklCAtTaxLgUKXvODHiJEGQ87jTMW05rLp+ZqiOqQ9NBtS+T1Low9bki+2puOrF2JhOp9058pV8XVVdQb24uOjG2yqaPslp5QZlu93WbDa79exTxYWv3HN8nsNeADUR0/HWZEPCuyV4Z4IDkzEiuNBf0pfbs/m+DzQepwI3J4Q8V68/+IM/GG40UlVhWpgWpoVpYVqYdn8kTAvTwrQwLUwL0+6DhGfhWXgWnoVnh8+zvTuYWoWzZRx3KhNbwejFjgOhgXmN2ma/LCTUwYHAoOBqtI/LE0PfUW9e23LQ0NjpkNZxFwUIx8pzt9ttl4jSjUVFutIWnhAuXC3W+bQN22XRHvIlC7ZgxKTdbDZdwdMPrmlsXrDcdv6ZdnY/0UZc9XX7e1scOwucX++54EWVExuHgRd5fzEG6GeHqIqRg9InFW5HFnr1xWMORunOgjqfz7vi57nsvmrp5bFddfPjftJpPO6voG+321ugJ/h8UsSY410hbvtWbXPdWn7Uua06ofGxX4rnUOs4c0p+pm6MzX3giNxImBamhWlhmnQP08K0Q5cwLUwL08I06R6mhWmHLOFZeBaehWfSPTw7XJ7d+Yi8VlJ4o0wqKjMUnJ5wdIwHS+uz9+9Olk4MwKGEZhB6ux44fj37cn0kGn/LEV44PJh4PW3tgamE0HiV+DzOhGwVFSa1/iZA9Df1daC5X9kviwSvJwDlM0K/1S7jkP5oJbsXvSHQUh/9ve96+oXj8uOCo9qnPbwoUS8vHG43H7c+6xoWcI6DMPVY8hiVrh6To9H1tliNg8W3ZddWbLvenGxJr6Ojo5rP53V0dNTbduwTi1YuU3/Gp9vTr2m13bJ3a7LIOCL8W/FFf9H2zIdWsWd7buPI3RKmhWlhWpgWpoVp90XCtDAtTAvTwrQw7T5IeBaehWfhWXh2+Dzbu8DEYsnVOCksZ9BAXjiGVjq9WPlzN1l0tCKrguYFUtJ6hqTeFSCt/mezWdcGVyZ1rRd5GpvHqDOl9eNj2o7n9taLhZvCIuHb6jh2BuN2u+3ZxkHkyU4baXucxsHkkZ244i5d9J0KgvqkL6nz8fFxd5fEer2uxWLR265LWNDufG4m45I28Bf9q63C+2DNtpgH2uZL/52cnHRbeukXjl0r+fTB1dXVLZ1aEyQWOrajbbz0oewpm7bGxJiRHoS7+3k6ndZ8Pu9tNWZdkI4t2+tc94MKrto9OTnptq1Op9NaLpf16tWrWiwWPagoztxH0oFxwDskRqNRTw8919VBTD3pR+Y6oa6X57V8qS2ztJXidTabdf5yaEon3xY8Ho97dxZF7pYwLUwL06rXVpgWpoVphythWpgWplWvrTAtTAvTDlPCs/AsPKteW+FZeHaIPLtzgUlKsRBQQRYOOtkHwiJPg9FoOseLAAdC4+u4jmnVVue1HMN+/LMHoBuPfbpN1JbrRdt4orktfPxsq+VMtsnzq6r3418am4oMk4sga+nEZ9TybgT271sBvdC2iip18eKsFWO1ObRF1u0gaOk8T0j6jmNovbeEkxi1Rd29/VabQ0AZOt+Psz8fi4smYrIfCy6vacWV/Oa5wi2unjv0s8eJCpVA5fHC+sC2FQ/L5bKurq5611F/xo/0dqFf3G6tyZ1yihMMz3kJ6wYnI62t59JP4xRA+axT2kJbuhn7igWfmEb2S5gWpoVpNxKmhWlh2mFLmBamhWk3EqaFaWHa4Up4Fp6FZzcSnoVnh8qzvQtMbLxV9H3FbKgNLyZ3na93L1ItB7FdPyb9pYMn0lDSUw8ebznZA51tcQwuDjn2PxSIru9o1P9xPLbJuxcIkH3wUWC5LVTwCXEfC23NBG3p6+e6/VarVQcV6T/U55BfOPYhm+qaVmHl59ZkotW/PnuytmzkdmEc+50NXkRb/mOOsViqkLD4cExuM/eN6+K537KvwM+i3pqguW210r5cLnv2WywWt+4sGWrD/coXz983oRuqNW4jio/PxWOplQMErPvKocr31yn0kWsJ08I0fQ7TwjSONUwL0w5RwrQwTZ/DtDCNYw3TwrRDk/AsPNPn8Cw841jDs8Pi2Z2/wcRCqYa5VU8OmU6nvaCQs7lixmuq+qv4cqK2Y3HwXNGjEXy7JwOBq3hcoVMAtoKXY6UNuApPnRnYCmpKqxCwkHrw7SuAtIX64VZVnceAU6LzB/UcarpTgIktfWSz9Xpdr1696iWgVpW59ZS22263vW156mu9XndbECeTSTfO1WrVe94lC5hvE6RNh+6CkH34HW3jSel/j0ajLhYVSyoAXGWWTowL+WU0GnVbqxV73LYoP/odFipEerXySH7ZbredL2azWa8N+kmw9NjwCRj96VtmBeDlctnzf1X/LpPLy8tarVbdtlLqovboF+XCcrnstqUSIL4C3/I129Fx+pvf+50VzMHJZNLVLm6LbRVdxqD60bWcpFJfxhHjbrvd9mztNYZ6Sm/VMU74IvslTAvTwrQwLUwL0+6LhGlhWpgWpoVpYdp9kPAsPAvPwrPw7PB5tneByZVUhxIfMIuXDMMAptG90NGxXmwVpNTJddRnFiwmn9pksfTCqsLgkGGhp5OYdNySxoLhwtV4wVEv2toLDxOmFeRefDhmFg9vU/p4Euo8Bi4T5ujoqCvY9ENr7F6cWcRYRFTsPQbofyaw2mbBYjw5sPm5BVoWbo5V/bUKpp+rdvb5ib6hr6gn9aUP/ZpWEWX7unY6nd4an8eTAKS2+BxcxjhBpT4YI8vlsoP2bnf7OaQeE4Q5CxvHqDZaOcHrvA0fp9rixNJjVnCl7fU9+3E/em3iuT4Rov6en624cf1aY4vcLWFamBamhWmyQZgWph26hGlhWpgWpskGYVqYdsgSnoVn4Vl4JhuEZ4fLszsXmFqGbDXKgFOw09h+Do3G9rzQeCLyXDc4f3iKK/Y0TNXNSi+vVSCy2Gn8fn2rf+rqK9g8X4VMK7Lb7bYrlhy7Ekk6KMj1rnG1HK2AE0D4A4Ussm5XBXgL3kdHRzWbzboxT6fT7m4AXe8FxHX2ZFf7vNbjoDW+FkwEare3x5OPvzXZcLtIlPw+Pn5Pm3niOkhZGBwebKuVyOPxuBc3vkI/Ht+s9PMOBIecFx73k7dJ++lvxqrHGH3lk6HW2FpjUZx7DXJ7ewzJP6whDjbXc6jNoZiTfq1zHDwe+65Xq01+531z0nBXoY9cS5gWpoVpYVqYFqbdFwnTwrQwLUwL08K0+yDhWXgWnoVn4dnh8+zOR+SpoaHBusJyppzDlTquxDLQW0agkVp9qQ8Omlv1qm4Skz/cxgTxoPAgU4JwK+pQAWc73MKp43wpuBaLRS/ZmYxe6HXcV7IdsLQR4eVtyi9KVvrKi7SKKO8aEECOjm6es6l2q27uIGESePFoFXl+3yr0rcQjqCWMO/bnkw+Ol773Ccl2e7P1livqrUkJY1sF0QuuF14WR9l3CHSMz9bWTx3TnSr0p3TieW7TVuHwwku4KTdaMeb5sQ+c9AOh4CBhnEvv1nZmTjqHxqJrW7WAW+TlR4+lqv52b5fW5IXH+LfaZ87sywWfmETuljAtTAvTwjT/LkwL0w5VwrQwLUwL0/y7MC1MO0QJz8Kz8Cw88+/Cs8Pj2d4FJk+gVlJ64LpxWMjkMA2SirJdd5AnJo3GAHFY8HwVIw8U6iO9PZF0PQuh6+LPv2Sb/qq62eqpYrJcLjudNRaOh0VbeivA1BeTQTpXXW8D1Zim02lXsGhHjrGlBwG9211vR1ytVreAoPHzM+1Kv/AHAVl8ZJdWIXV9pfNyubxVgB1+Os4iIr1ahV7J7UBqTVZ4He/ccD3YFn0lewsG7lePIU5KJpNJb1up2tQERW1L1ut1dyfIUA62vtNWYvldunLywS2q3o70kM+4bZuTQNme9cXtKN8RMsxnXePxpe/dPxyn16/WBNfz2WHPzxzHUC2TeK2VrTymHVaR15MwLUwL08K0MC1Muy8SpoVpYVqYFqaFafdBwrPwLDwLz8Kzw+fZ3gUmfzaoDOKDbRlAoqRqGYar7S4sqNyCyMB0J6igsQ0vXrqOBUTnEkQ+fi8aHJO2bNKZ6pdJ2rKHB5rOp0OpLxOAdlFbOl/neZHg+B1KblMHnxcbBznvvJA92TbvsCDoNR4mF+OuVTgk9A+3CPtEgAWZvhxK6Jbf5TvpqjH49l7PGfqIwvhgMfCJlMch2+M1BB1zdbvd9myz2Wx68UU9OH7GjiYjemYt+9FdC4y9zWbT2YWx5Hd46PN6ve7uStG4CGbpSDh4/rI9nyjxfOmmvlo50gIxJyb76pZDhZPlofY5QfDYawHcx8+4iAxLmBamhWlhWphWnY60T0vCtI+3hGlhWpgWpoVp1elI+7QkTPv4SngWnoVn4Vl4Vp2OtE9LPq4827vARENRcRadfYXelWTit5KK7bqh/LO3sw86XlRUSNi3zvcxEUbuAAX1dDqt2WzWbfvj1lgWSCbXaDTqAkfnKDG8KHkgMpFpMw9siRdKL44s2vRDVd0qul7sqYNDQAlFaCuuWrrQbxQGNu3H9riVlm27uA898Si8s4Bj8LjmMfqOY/VYdVv6BIV98Zmm9AMBw4LCoqFx+USEOnihJwB0bLPZ1Gq16t2dQP1YzFrCSQNtSLsTQH6O9Fwulz1beo1yG/P71iSFxb7Vhk+Q9L3fteJ98VirhrZ0Za7RlpyAUl/3WeRuCdPCtDAtTAvTwrT7ImFamBamhWlhWph2HyQ8C8/Cs/AsPDt8nt35iDyJB7AXVh3nCia/9/ZaEPHzWgb2ZGy17br5HREyqBdeHdsXLArs2WxWy+Wy+8E8d5LOPT4+7vSZzWaDq+Lr9bpb1d9ur5+Tutlsun7G43GvkMkW/MG1qpvnS/K5m+p7u912K7a8xguFxINLIBvyqSDndwxovEpkXUc/sKiwT8KXyS3fqQDKJ/KFiwof7e/wc78w5nk3inzhcJJ9GVNcTee49D1X+FtQqKqazWbdRILFkhMF2kLgYzHwLbDr9bp7OUilp7alauzaTs07KtQ3bcXnr9K+nPyoHfqXccj4VYFfrVa1WCy6NhxefpcCbSVA0A+MFdYAjwVOUjkWnT+bzXr5p3F5TXGb8DzFtMYuX2srrtcljd9zI7JfwrQwjTYJ06rrP0wL08K0w5MwLUyjTcK06voP08K0MO2wJDwLz2iT8Ky6/sOz8OyQeLZ3gYlByyD0YiXHc1A0PAORRmVBZ6GUAfzFc/U3V3u56uiFt7VFVNerwLhBJ5NJV5x2u5sfpru4uOiK72QyqePj456j5/N5VVVXhJlgKmoCBMfROQWFcjqd1na77fqSHvIPg5nFg0VbY5Cui8Wis43bmEk6mUy6Pqqq92Nz9KtvmVVBkP76TH3UFle+Hb66jjGl/lUITk5Ouud6eizw83K5rPV6XcvlsudvAYag8SRn/DFmV6tVl4wSxVpr4uGFSD8OKbswx+iH2WzW86nDTOcIPt63Jg7MLxXxFy9e9HRqTcAU/ycnJ7088wKt/twOPF51U5T5LlDPZrOaz+ddH6vVqss3xaJ8r5zxSYLGwrsOuL1WdpLdHPyMNY1Pukq0NbdlL9rEbcNY4ThY89brdV1eXnZ/c2LDnGrFZ2S/hGlhWpgWpoVpYdp9kTAtTAvTwrQwLUy7DxKehWfhWXgWnh0+z/YuMHmxdKUZIPzeHUuleI6ufR3o6jwGmr5nsvA7/8zjr9OXv1hwtGKuxPPixAKovxk8WsV1/dS3xqSCy/OU6GyTYxW8aG8fh85X260CxzbdRwxibotkQWPBUoFR37SlYkvH6Tv53N91vrYJazWXyaN2pYd+ZE7+22w2XcGTL6iXxynfHRLu+1byu00FJyU5z5XuTHKP41Yh0PfSX+3SvyqYGjfziv712OFdD9SBOexAHBIvUPKZthxrQqQ7GVjIWY+8pni/9JP6cRu38sQnjpwY0UYtoA/ZoHXcY12+I8w9tjg2jidyt4RpYVqYFqaFaWHafZEwLUwL08K0MC1Muw8SnoVn4Vl4Fp4dPs/2LjBxkB58Q4NisChxqSALExOJK6fs2w3GVVo6QEnaSkxPAAr1U/9Mdq1WjsfjbpsctwFqlVbByaRVMnPFVJ+VZApsjlF3KUjn+XzetXl0dFQXFxe1Wq1qPp93ibDdbuvBgwedr7RirsLLbbI6fzwe19nZWVfwZA/ZdbPZ3Fq1ZfDpmNvDi4T7RJ+ZYEpE92MrFqW7YlLFUvbhs0OrrgsbYUDQXl1d1dXVVa1Wq15cqxC6ztTXCz0nQ/rMcTF/dFcHJwstELM9FnT1KVvRhoxr+tLvlqCv+eIxjZfvOsaV/VaBZW4TRBybbM67Q/SivyUOcMKOetCevn14KBZlJ9Ydj9+huuK+0/f7gEd/KQboEy/ibF/+9toR2S9hWpgWpoVpYVqYdl8kTAvTwrQwLUwL0+6DhGfhWXgWnoVnh8+zkRppydOnT3fsmM8e9GBgENIJNLiu3W5vVnJ1nFvxmJAecO4EHuOqdSs4dZ0KgP5mEvh1v/ALv1DvvPNOnZyc1L/8l/+ynj17VmdnZ51ex8fH9df+2l+r09PTzugq1Hp+5X/9r/+1/vt//+/dMfUzHo/rc5/7XH3lK1+pR48e1WKxqPfee6/+7b/9tzWZTOr09LTeeOON+tKXvlTr9bo++OCD+uY3v1l/4k/8ifrpn/7p+sQnPlF/8Ad/UN/73vfq3Xffrb/9t/92ffvb36533323/ubf/Ju12WzqBz/4Qf3mb/5mPXz4sLPPcrmsx48f11tvvVVf//rX6zd/8zfre9/7Xm02m26b6Xg8rvl8Xsvlsh4+fFh//a//9Xr58mV973vfq9/93d/tzmEy7Ha7WiwWXSGbTqe3fMd4YLJwS6xPJFgAZGP5UVtVtcqtgs7E17mz2axOTk7q+Pi400nbAq+ururi4uLWM0JZiAXExWLR2YlxpLEwlj2/pBe31jJ/ZHfFjnSnnfS6vLysxWLR3Q1xdXXVsyFziBMN15e5q3MZoywkrAfMTfqI+cPxe3HkOwu97Cf/aFs329G1hBfrD8Unpe4TXqMJmHzNekNQUxjT/r3XRQrrUOt617FlN8Lk2bNn+Q/mDgnTwrQwLUwL08K0+yJhWpgWpoVpYVqYdh8kPAvPwrPwLDw7fJ7t3cEk5aigK9Mqwn7cna3B+6qz/ub3bsRWH17c7zrPz+cKv/fz5ptv1he+8IV6/Phx/ft//+/rvffe656rqCL05//8n6/T09PuLgKtXCp4f/CDH9S3v/3tGo9vflROCf/WW2/V1772tXry5EktFov6/ve/X//u3/27btzT6bQ+85nP1MOHD2u1WtW3vvWt+umf/un64he/WJ/+9KdrsVjUixcv6unTp/W1r32t3n///frud79bv/iLv1jb7ba+853v1L/+1/+6C4ijo6NaLpc1m83qjTfeqK9+9av1W7/1W7VcLjvba2zr9boePXpUn/3sZ+vrX/96PX/+vH7v936v/tt/+2/dszWVnPKfC4uBr5wy0Nmv+9L9Rx9r0lB184N6hAqLhwqmCph0EJSn02mX3IpBFeTtdtsVeIGhpRcBRghqoqQ7HQQjXSNQCVYssF6o/DvGr98doGJIP3gesC+1RX9IlB+K+5YuQznYgjv7Zv+6g4Nj8zrBY61YoW29Ru2rJ363hfu4NYnkObQbc0K5wolxa0ytWtmqkRQW+8h+CdPCtDAtTJOEaWHaoUuYFqaFaWGaJEwL0w5ZwrPwLDwLzyTh2eHybO8CkxTioFqNDhlPQSalW23LcEPOcIPTcNSPxnDxoOY1reLEY0+ePKmf+ZmfqbfeeqvOz89rNBrVer3urXZ/5jOfqePj4261ej6f12KxqA8//LBms1l3na5RoR+NRvXJT36yu5NAhVV9K+BOTk7qc5/7XD158qT+2T/7Z/VTP/VT9fnPf76ePn3a9ffkyZP6uZ/7ufrWt75V0+m0fv7nf76znwqg7KUtso8ePaqf/dmfrel02hV/+mC9XtenPvWp+uIXv1hf/epX6+XLl7VcLuuNN96o//N//k93ru4YoG/0N4uDgpSr5vKb4sSLgfvPY4mTg5bvNWYVeRUSFfjpdFrHx8fdHQjaAipf8k6CV69e1WKxqIuLi16h90JJoLF/wUSfGbMsAtJTL/1onD773Q5DhY+FfqhoeV6yDQeu2mlNqujfVq77NSxifueI14LtdtsseD4uH1trXA4x788B14Kh2qN/CfXW5NTjuWU7XuMTh5aNh2wd2S9hWpgWpoVp8lGYdtMe/RumHY6EaWFamBamyUdh2k179G+YdhgSnoVn4Vl4Jh+FZzft0b8fd57ducDkxqUyXA1mEtIRLPRuWCVRK7A1IG7Tq6ouIWQAJawHkhdwPqvUjawV/7Ozs972xs3m+lmgm82mnj59Wk+ePKnz8/N69epVbbfXK8svX77stoV+8pOfrF/5lV+pv/gX/2L9p//0n+rXfu3X6u233+5sMZ1O6+rqqna7XT148KC2223NZrN6/PhxvXr1qs7OzrotpavVqj788MP64IMP6tvf/nb96q/+av3dv/t36/Of/3y98847dXp6Wv/0n/7T+p3f+Z3abDb16U9/uttaeXZ2Vg8ePKirq6vumZu73a5Wq1Utl8t68OBBTSaT2mw23fM4tcLNZ7fOZrP65V/+5foLf+Ev1I9//ON6/Phxff3rX6+vfvWr9Su/8iv17Nmzzl/yseylLYYMXK3ocwVffvBzWLi4yq648xVoxQbvEKCwWCr2ptNpnZ+fdwX47OysRqNRzefzOj4+rvPz89put90zU8/Pz2u5XHZbkdU341iA1Hjm83mnk+4g0Hmr1aob42KxqKurq+79+fPntVgsarFYdPGm8/RZwFZ7sqcXe31P4R0t9AEhJUjIZgS0/Ox5zi2nnFDRT/QPt6JSb+qvnGfB9QI8lP8cl+Co594yzwk3f5au2tH34/G48x1roU92dC7vYFGb8qX65gRIOtBv/GFE3cnEycN0Or3l40hbwrQwLUwL02j3MC1MO2QJ08K0MC1Mo93DtDDtUCU8C8/Cs/CMdg/PDo9nexeY6CgOmMLB6lwv6LyOyrNg83s6SSu7LSF86HT143ox4Hwcre/G43H3nE39qJ2ctdvt6uzsrMbjcb148aJWq1WdnJzUaDSq09PTOj8/r9PT03r//fc7B15cXHQJ+erVq3r77bfr+Pi4W6FXQHzxi1+s7373u/X8+fO6urqq0WhUL1++rA8//LD+yl/5K/WzP/uztVgs6rvf/W63ZfTy8rJXaAi69Xpd8/m8NptNVyikhwq8gph2uLi4qPPz83r06FF997vfrT/+x/94PXjwoM7Pz7tgV9uy/1ABV+CzEFDoH04a6C/5WTHivmah01ik52Qy6emk9vTDhbqLQvqpeKkwLBaLLrl1nsasSQQLh3TVtlNCTnGt55iu1+u6uLjotsK+evWqLi4uarlc1mq16v2gonyrAis7efzSHrS1YsSLm+zm53l+eC7SLw5zxoHrpr4Yi36uPm+3/TsJWNgVB5osOvh5vuKT9UV2ZJ+EFmO3qg8Mr2kq0A5ZTmb9zhr6b6hGMZ7UJvtw30eGJUwL08K0MI1thmlh2iFLmBamhWlhGtsM08K0Q5XwLDwLz8IzthmeHSbP9i4wUREqQyPp5Q6QeMGVtALJB6LjflcAdaGjh8A09GqtXmp1lX2Mx9dbFVXYdEwFS0mjF4vZarXq+lqtVt2K33q9rrfffruePHlSm82m3n333Xr69GmNx+P603/6T9dPfvKTev78eafbT37yk3r33Xfr537u5+r8/LyePXtWP/zhD7u7B7S6Ld2VPNRdY5XNFLwq9r5yul6vux8R/J//83/Wo0ePutV3rZBzFdRXX+lTFnomnHzB69SuJ7+CXfBnIdN4ZX/223r26Hg87q0sy68CgtplsWdRkp1V2GgDFlHmiorDZnP9o4aCzHK57O4cWK1W3bNWCRu1p6R3X9Jv7JOxwHhuFVSe1xL2W9V/NiqLmt/dMzSp4jHFB/vw/nyM+6DDwsiX68UxeDsew7T5EARZK11fhwrPZY1r2Yx+0/tdE9fIbQnTwrQwLUxj22FamHbIEqaFaWFamMa2w7Qw7VAlPAvPwrPwjG2HZ4fJs70LTK9eveo60HMhNWApp+ClIlJMCSfhljYFhI5ztUzJ5ski8YLmzlX/vIZOYmGmwXWMBlSSjUajev78eb148aJOT0+rqury8rI2m03N5/Oaz+d1dnbWbWV89epVPXv2rN5+++0usbVNUfb8s3/2z9bP//zP1/n5eX3jG9+od955p/7Un/pT9ff+3t+r3/7t367vfOc7nc7/+T//5/rRj35U//yf//P6H//jf9Tv/M7v1Pe///36xCc+UaPRqNu2OJ/P6/T0tLbbbR0fH9fJyUltNpt69uxZzWazevDgQe12u674qchyu5uOPXr0qJ48eVLr9br+xb/4F71nr7755pv14sWLev/99+vk5KTm83mX4LKX/MiCq3NUaGXzyWTS+YhJxUKnzwxy9avr+VJSHB8f1+npaa+I6bi28s7n87q8vOzGPpvN6vj4uFe8dFcJAaNCz8/KCxVzxZvif7lc1suXL7vCvlwu69mzZ3V5ednbvqo+XJjcfjcBx8dcI5j4mRBhf63CQd3UBguo4o8FW215+w6sqpuVe+aqtv1yYtaqAxICRD/aR8ix3rB9n8x6m5zgqV+3N+uJA6vlP25x1nNtOV7ZlFu3WQ8J7Pzj8noSpoVpYVqYJgnTwrRDlzAtTAvTwjRJmBamHbKEZ+FZeBaeScKzw+XZ3gUm/YiaFJcSNCiVp4F0PiGglwJUAcOAq7p53imNQsPzs4rKdrvtbUd0/dge9WHRqKpe4ozHN3cJTKfTevToUVcoeZ62E758+bLbEjoajerhw4fdcxen02l9+OGHXVGqqvr0pz9dJycn9cMf/rC+//3v1yc/+cm6vLzsCsTFxUW3+n5+fl5vvfVWjcfjevLkSb311lt1enpaL168qM1mU0+ePOmSTAGhFfDRaFRvvPFGZ4cPPvigNptN9xxF2YFQm06n9eUvf7nOz8/r4uKinj17Vu+99149f/683nzzzfozf+bP1Hg8rnfffbd7niqDVqCn/7nCLD8zbnwFXvaVL1m8aHtPAMaeYkQgZFFWsqqNxWLRK+onJyfdNYL5ZHL9DFVOJHgXx2Qy6Qq67lBggVdhv7i46LY4ayss/ca8Y0LLVp6PsofaYJGTLzhuPYeT9lT8M7+8qKlAtwr9dru9dUcLc26f3FVfhiZ8PtEbyn0WXQLZ22aM+kSQ25PZHicobJuF2+3Q0o9tElCsU7rTSX/7ZDqyX8K0MC1MC9PCtDDtvkiYFqaFaWFamBam3QcJz8Kz8Cw8C88On2d3/gYTB0OH8xwazAszr2dytM7RMQ82N5QHkfrlKqMXDOlOY3rysF3ppaTXVkpv04Hx6tWrXn9Kpt1u14Gz6noV8c0336xHjx7VarWqv/yX/3J99rOfrc997nN1dHRUb731Vr355pv17NmzOjo6qs9+9rP1i7/4i/W///f/rpOTk3r77bfr7bffrh/96EddP8vlsrMdx3Z0dP1Dguv1ui4vL2s2m3WFjT6Qj2TXd955px4+fFgPHjyoX/7lX67Pfe5zHVC+8IUv1Pe///36/d///S4A1be3I7vT/0wQJYXfBaDrFWNKIrZLv6poeVHgFtH1et3B0xOYfx8dHXWFX6DWnQQq9J7gupbj44r5crnsPe9U21E1ZtmQkPJkbtmPeeD5yr95XQuO+kxb0I9e3F1UrDXZaBU1P585r36GJnXelk/SvCa4PVqFVDmpdjlGjyO3aWscrpfiU4DxcySEx1Dt03nUibpF7pYwLUwL08I0+iFMC9MOWcK0MC1MC9PohzAtTDtUCc/Cs/AsPKMfwrPD5NmdC0wMkCHlvFjQWLyWq2pKWgbhkBF8wGrfz1utVj09KEpO9iHnSx/+aJrO0ZZFXc9CoHEdHx932xufPXvWnae7CJbLZa3X63r8+HFXPE5PT+unfuqn6unTp/XBBx/UP/gH/6C7y2A6ndaXvvSlev78eX3zm9+ss7Oz+upXv1p/9a/+1fqN3/iN+qVf+qX6mZ/5mfriF7/Ye3bmy5cvOxvID9PptM7OzurRo0d1dXVVy+WyHj16VLvdrl6+fFm73a7rU3Zar9c1Ho/ry1/+cj158qSePHlSf//v//06Ozur+Xxes9msvvzlL9e3v/3t3ir5ZrPpCp3sRLuzsLeAS3/IJ3rpDgDescA2q26KhXxHP2tl31fUpTMTU6/Ly8vu7golmHwkuG232+7OCV6rF0WgXS6XdXV11dlJ16o9jUuiPo+OjnowI9T1ndtResqn3jaB5dfJ3vKp7OQwa/mTegl28qna5/gd7F683ad8bxVV2cd1a8GZsm8iynZcPN6oj4/D+5FtHbYU2kmw8docuVvCtDAtTAvTwrQw7b5ImBamhWlhWpgWpt0HCc/Cs/AsPAvPDp9nexeYqvortW5IKcCip3N1LbcfKvg9sBW8Os6tgHxWIYUB6QZRQMrJei6lRD+81ro7gNtdR6NRLRaLuri4qNFoVH/n7/yd7lmo0+m0fuu3fqv+zb/5N7Xb7erq6qpOTk7qwYMHXTsPHjyoq6ur7vmhH374YZ2dndXTp0/rC1/4Qm232/qN3/iN+if/5J90yfD222/Xr//6r9cv/MIv1G63q29961v1a7/2a/XOO+/U//2//7e+8Y1v1Pvvv1+/9Eu/VP/wH/7D+sY3vlHvvvtuXVxc1GQyqcViUS9evKjHjx/Xbrerz3/+8/WP//E/7uzw/Pnz+vVf//WaTCZ1cnJSu92ufvVXf7X+xt/4GzWfz+v999+v3/7t367/8l/+S33lK1+pf/Wv/lX9x//4H+t3f/d36/j4uB49elR/7I/9sfpH/+gf1ac//en61Kc+VT/60Y86/19dXXXFWP6TjxyiDF4W7KqbHzLk5+12200OGIf0syf3aDTqPaeVycf4ky4sGq9eveo9K5QFgs91ZfFlorsM3enASYtW4XnHhHRWjHjuSWc+R5N3MFRV766RVvHk3zxOO6ov+ZU+Uz7Rpxqv8o255X7i+Lwtn1xSV7bld22wDklGo1EXQ60CyVig/bm1lYWW4+ekgrVO4/Q66eKQ0asFK44r/7h8NAnTwrQwLUwL08K0+yJhWpgWpoVpYVqYdh8kPAvPwrPwLDw7bJ7tXWDixRyYOqJCXuSlIIOS13BFUefSwJ74LZ3YHvuhkwgV6uhB1Co2en7m6elpPXjwoL70pS/VarWq+Xxe2+22fu/3fq92u123lVV96Rmay+Wyp9fZ2VmNx9d3EXzlK1+p0WhUl5eX9eLFi7q8vKztdlvz+bwuLi7q8ePH9fnPf77+3J/7c/XOO+9UVdX/+l//q54/f17f+ta3aj6f19/6W3+rvva1r1VV1X/4D/+hZ7urq6va7a632v7JP/knu0Ko57UqECeTSX3mM5/p7nCoqvrDP/zD+uY3v1kPHz6sly9f1o9//OM6Ojqq9957ry4uLjoYn52d1dnZWS0Wi1ur8S378nm3Smg/j8WYBdEDnYnqBYO+96I2FFMED5OWL8aaEtd1dci4DmpD1/hx5gbPbY3B84WFnucwX4dsw0IuYbEayhfmEkHIgup2benC8Xl/DijGBe8e0WRA1yjnNaHjeW7TfbYhODn2Vjx5jArcdwHV2+Jngp46sobmH5fXkzAtTAvTwrQwLUy7LxKmhWlhWpgWpoVp90HCs/AsPAvPwrPD59lHWmByhTxQhpLNz+VxN3pV3UpcH4gniOtGIw4FdWusDrH1el2vXr2qDz74oH7yk5/UaDTqtnRy1ZpB/Pz58/qjP/qj+vDDD7tVdOl3fHxcy+Wyjo6O6smTJ/WjH/2o3nvvvdpub54Fulgs6g//8A/rU5/6VI1Go/rMZz5Ty+WyfvKTn3RbQ7/zne/UdDqtv/SX/lI9fvy4Hjx4UM+fP68f/vCH3Q+4/f7v/35VXQfJ8fFxT1f56vLysn7wgx90BXi73dbZ2VlVXa88/9Ef/VE9e/asXr58WScnJ/XBBx/U5eVlvf/++/XjH/+42+rKrZrub45f21pZNHUdQem+5Ao648yLfyumWAhZCFrSinH+7Xch6DvqLF9KuHW3VfDvypFWgfA22G8LAj62qj5QOS4Hl+vVyjV+L/srFhzIrqsXdIfFkA4Ea6se+ASA1xNormNrrFX95zN74XeAeVvUp1Wz3H6tyRLtw2O6diimI30J08K0MC1MC9PCtPsiYVqYFqaFaWFamHYfJDwLz8Kz8Cw8O3yejTxwKA8fPuwd9OdQekJst9vuWZyurM5nAW8FEwtHK2n9x9bY7nK5bF5XVd3zSzUG9rVarbqg4Pgmk0ldXV3VarWq2WzWbe28urqq+Xzec55+IG06ndZsNuslIF/6wbbJZNI9x/TVq1d1enraBeBisehWqo+Pj+vi4qKm02nN5/Nu7EdHR3V+fl4ffPBBt4I8nU67566en593ATCdTuvFixed/V6+fFmPHz+us7Ozurq66u4iODk5qffee687j8/BvLq6qgcPHtRqtaoPP/ywHj9+XJeXl7VYLOrNN9/sgU+J7iu4ShAWfCaFfOQxwfN8ezELA1+6frfb1Wq1ulXgGCfcdungoe8Yy74dUcJxyY7Ua6goU18eoy188iI9Fcf8kT6/xgtzC8wEpmzIfHWgSSaTSc1ms168EHjSbbVa3Rq34kx50YIPxyMbbjab7rmyiif6UfrpXPpeW2db8dUCq3KIsec+8smpPutuExZq+pwiW8ieir/FYnHLb/S55P33389/L3dImBamhWlhWpgWpt0XCdPCtDAtTAvTwrT7IOFZeBaehWfh2eHzbO8OJhlLSvLVCkQGXuuYigADnUEkB3FlWob2QKQT1IZW+VuG5N90rBKT30lUcOUogURFf7O5/gE3OWS9XtfV1VVtt9su6JV80n21WnXPaZzNZnV0dFQnJyfd9ld9r+DcbDbdeRoz4aQAVKEVEPS5qrrnk+r6o6OjTgfpv9vt6vLysgsw9ad2N5tNvXr1qqr6d0Scnp52dmGStuzP55GORqNeAW4lmicN25LNdR79x6LMwupx6oXOJwneruKNOm02m1tAYxF1APEug1Y+MVda9phMJrfa1XhdODbak2PjWDUu2ZbQ5NhdaF/6pDWRGxIed594X14H/FhVf3Xfwe7nsZ99d360gLfb7TrYEPCKd5/k+gSVNuKzUhlf1Ld1judHZFjCtDAtTAvTdDxMC9MOXcK0MC1MC9N0PEwL0w5ZwrPwLDwLz3Q8PDtcnu1dYPJEaCWwjvEcBu3Qef5qibejhODg3IlDzmFh198qzkw+v5Y6arvavqBUkqgAKCiqqlv1226v73pQIfbAYeJvt9suYKQ/VzFns9mtwGdwSR9+Nx6Pu3Gr8Ksv2pEArKpuTPrhMRYvXSNdaQuPD9qX7VPHVqHYl3y0mxdQJYts3QID+2gVRC8s9BWvZzssxq1j/Jtj8T5on32vlnBM3k+rOFCvoXMctJyUtPJRbXKyQT8NjY0iH2hyIL+2YOKx47Z2ALb69VhtTTb4mT5mLtCH7kuKTzw0ttYdCxpj/mH56BKmhWlhWpg21GaYVr1zw7SPv4RpYVqYFqYNtRmmVe/cMO3jLeFZeBaehWdDbYZn1Tv348yzvQtM2hop0cBbRYWD5GBdIb2Px+PeNj4WXCbOeHx7yyOLlFbsdR6drXb43ir8Cjq/C4IrqTS87jCQ/rpbYDKZdD+Wp4KqrWej0aiurq66ZBA0BButcKqf5XLZ+UDFabfr/2BdVfW2sc1ms06X+XzebRuuqt423tFo1LWjOxjoWxVn6Uof6DMLpuyj8fKuB09qFsCWX/VigOtuCRYKX0VVH0xO6qB4Unz4ijT1YUwLrJ5UjDUHRavQUwhEJjfB4/rpOp7PpKfenvxDMGnZi7Hmd/t4flFXxet4fP1DjPIZQTeZTHpbmqmH+8B9uFwu6+rqqi4vL3tjUG4dHR318od3jvBd/RBItI9yhhNW2dr9rM/z+bxXq5hLuuNHY6Ze0vvo6Kjm83k3FuWnA8v9Qn1adovcljAtTAvTwrQwLUy7LxKmhWlhWpgWpoVp90HCs/AsPAvPwrPD59neBaaWwTUAJjENKyOq8FEJDxwGjQzlnzVgOkQBx0LE7wkY6qo+GaQ85omlYq3r5Eg9c7TqurhWXRcjOej4+LgLTj3zVG2sVqvujoLLy8uuTS8mjx8/7tqT7VSA+YN/6p/FzoE1Go3q+Pi41ut1LRaLbnvq0dFRrVarrj2/o0J3DAgEvKOhFRu0qydQq8Ay2TyxJpNJr8CwwLmtOHb/jn1JfxUF+pz2VX+CtCDM82ljL5ZepHktX4xdHtckgHHncU07S6gH73phQZedWSxasJS9eMzBIvAqRqUL46QVKxwLdWQdoX9ZMD3m/O4e6a/40bWMIY9N1iVOwGQz94nrzQLtNmfseYwP1UH5iTZgjihevf5FPrqEaWFamBamhWlh2n2RMC1MC9PCtDAtTLsPEp6FZ+FZeBaeHR7P9i4w8eJWceerlWR0jCc0XzSmB9OQUVh4mcQ0mmQ87j+D0gOwlRBe0Dw4GARsk0FDG+ha/u1j9KLItlviBYZ+8Gto21bA7+uTichg9IStur2q7vopjnT3BOHNwk5/MSa08srrJGpDfTgc+QOCSpZ90GL8ur/9fP/MRNffLb+0Jk8UjxP/zP6GClGrHfqLBYX2ZnzxevetFzDqxiJJkCgX2Bb97PmqflgApTvbVAyMxzd3+LANn4y4ri3f7PN5C5rSlfkunfaJTw5auuo8n/TeVegj1xKmhWnUPUwL07wvnRemhWmHIGFamEbdw7QwzfvSeWFamPZxl/AsPKPu4Vl45n3pvPDs482z11pg0qCYGF5I9xlAjtDgaRQOhu/etgdDVXVJ31qR9SRtJRILlGDhY9d1PkatHmqMCqrtdtvbZqZVTU8ad5y+13lanR1KIOncGpOfz7HLVrQNtyASVF4svGhxW57Etwi6LfXi6vNoNOpWpVkgGRtaNZXPVayZ4JPJpFth3Ww23erzbrerk5OTbguldJGf2J9Pbqgjix3ji5/dN74C7TbhGKUD/cFzWsWbP0zZSvZW3xyXw45Fz+3RiludQ/upMLlN3Y6ttuRrz1/FiN9Zw8mNfihSY/Q7Mjh+96e+5+TD/el21Gf5gMI8c5+1JousWe4D2oK+UMy7XpFhCdPCtDAtTGNbYVqYdsgSpoVpYVqYxrbCtDDtUCU8C8/Cs/CMbYVnh8mzvQtMvt1NzmMyywEMbHXMBNX5THQ9n9MHzMF4AWffLMzSt9WGzmVy65iKtozWcnzLuf7OwCPg6KhWEA05aN/3DGCOUcHVspkDgoHHY0x8+t2Lj/ftSce23V7sS+dJb9lrtVr1VpMZXw7oyWRS0+m0ZrNZ54fNZtM973U0GtXp6Wl3N8Fut6vVatWBQM/n9NVrxqYnqlash3zYSs6WD1p+5N0ShKIXzVafrpP08O2YaoN9uY7Ui+9sR7ZW+/Kj8kl9ENKaGNHejCGPPxYz5byDT7DWMcJC28upm87jNezTx85zWjWGOe6TH+rrcPS+WQs5aaFunuf76kikL2FamBamhWlhWvXOCdMOV8K0MC1MC9PCtOqdE6YdpoRn4Vl4Fp6FZ9U75xB5tneBab1ed0HgA2BA8UfdWsdlUDm8ZTwPIjrRjco2W6t4CjjpzS2MvnLKlenRqL+ySj2q2tsJaRsmBFfN/dzWuCkEhPfFArPP5rSbF2nqSh0d6rSh26ZVpNk2fUSbMHkdCmpDgJXwWZOEgUOEExFOSJTYiheBZLVa3bKldKQfNF6e6zoRPLRry4c+bsY+j9OX9DF97bHiOedxQfG++T37JTS8DY1dNqaunCipuPMuG45XMKPoHD132EHZKqjSQTE/VHs44WQ7rXri42rVQX52WLQAzHYJQ4mD0/tRmy27R4YlTAvTwrQwLUwL0+6LhGlhWpgWpoVpYdp9kPAsPAvPwrPw7PB5tneBSQ1KgVayeUGjgd0AHByTj320gtuN4oNtFXoajoWewcqiJhlKTLbtAefnqeB4YOuY2mmNTfbl1lc/7iDysQ/BSOLFuGUPBp/6YLFTQfRr2Q/94/20/E0b0n8squ5H3hkyHo+7Iq8Xr9EYNptN96OHnoS6RjEuHQkO1082EsB2u113J4PfmdHyQ2tC4z72XHLftYrJUP5JlyFxPVqF3uOE7fpkirYdmlRwQtTql7ZgXw5HApcF0vvihMAnc24f5nXrmI+3Zc/WxE967ZtEcxyMewLMdYoMS5gWpoVpYVqYFqbdFwnTwrQwLUwL08K0+yDhWXgWnoVn4dnh82zvAhNXyN0RFBqKhcEHz22sLNQ819+9DyWzr+z6QJnkTH4PQDm8BYLWSmULYgwW2kFbeOlMJreub9mOIKTNuCVWwkBx2w8F+l3F93UnQj4ZoN3kK547VNzUP23FYKdeDmZdv1gsev7Ull59x2SmHQlV2tVXt2kzjVl+4riYjLxWd6zoOt7h4Qmsa2gnxmmr2NPvLkNFquVL2nZfjnmB8v4c+vze726h31r60A4toN2VTzrGHGK7LZ1bE8GWvT2G3V5sd2hittlsbtmENY9QoP7MqcjdEqaFaXdJmBamDfUXpoVpHzcJ08K0uyRMC9OG+gvTwrSPk4Rn4dldEp6FZ0P9hWcfH57tXWCquimY7ohWALSKIFeBq6oXrD4gXaekZNJTH/Z1l+40ZFXdet5p1c32tKOjozsLvQwup7hzZSfpyOc4EiQqFD7GVuC1irb+5jh9RZc6t8ZB/6kv6uWFiHJ0dP2sS8KMojHNZrOuP9/SLH1ZlJmkshcnCPqOcSDwK06ZBPqOz93ledvtthaLRQ8oVVWr1ap3vscjx80+pYt0Oz4+7sUCJxf0kxd6xpcDaMivtB9t6zaUfb2IMQeUp17s5Cv6SzGuAq5nC3vh0p0brfHyrg6OVzq1xixdGbs6R7ozZjhO/jAf7cTty67/kO1bd9zIhgS7TwIWi0VvSyonTJp8yL4tAPjEIPJ6Eqb1JUy7ljAtTAvTbvQI0w5HwrS+hGnXEqaFaWHajR5h2mFIeNaX8OxawrPwLDy70ePjzrM7H5FHI9ABfOcgPfg02KF2WitsQ58VZF7w/Fz/nkHjCe1jdP1bycQEGBr70HXsx4s2v/OXhMHMYsciJJ08GD1AdC2BQ329bxddw0Tk2N02R0dHPf/xeaitQtbSt2Xn1ot+YnvcSq1klD+8P7bVKoxDiaa/6Qv1rQLp13FMrQK0T9xXDhWORcdpI2+r1TaP+TksUBqnJgGeG7yrhy/Ghcc1be2Tk5Z+bju/c4DFV/oLGH796wDW47w1MfCxeOzxe4eTt8H+Oa7I3RKmhWlhWpgWptWt7ylh2uFImBamhWlhWphWt76nhGmHIeFZeBaehWfhWd36nnIIPNu7wMQg4d/7hI6oul6R3Ww23Yo4B6Lg4HY/B4k+6xptH2RhoqOHgtIN5McdPK3xtNpsFUO2yR8sdNC0Atlt7IVKK45eALzIq00F9mw26/lALxZ5FkHpIN9wy6mP3QHBFWMXJjVXYD0+OC5+r/F4QjAxvJCpuHK1WbownmQ3jYdtylaum8bj+tKnXpT0Pa9rFS4vBEMx0Sos9JvfveMxp++kO8HGcxhbriuL53Q6rel02rtrZx+cWzHfimV/bxU+t99ms+nuDmDbQ9BuFXu3mftBcaDXbDbr7MG29fxd6cXYq6peTPNOAh8XJ8t+h0hkv4RpYVqYFqaFaWHafZEwLUwL08K0MC1Muw8SnoVn4Vl4Fp4dPs/ufERe7+T/13E+WG7baxUuFXsGx3Q67YpPy7iEjJwzmUxqPp8PJsq+Qu7HKa1i7brweiWEjmmrnuviBZNFUQW/FWwt/fmZYOR2Oq7Gep/T6bQbJwOFga82WGC4BZS6cZtqVX9FU/bwH7NTW9LLt3UOFQW24cVWfajgMg65Uux3O6hNns8iqRcTk/px2y0BNJ1Oe75cLBY9u6g/337IYixb+QTL41dxyThgTnDVfij2HaLeDwuuPrvuPF+FVdu/PY4ZJz458WOMG9dfkyj6jLnFuwem0+mt6+VX9+F0Ou3Fv2ys+GFxpw38ByEVI9RJ+vgEl35ugdPri64XKO4q8pFhCdPCNOkRpoVpbIN9hmlh2qFImBamSY8wLUxjG+wzTAvTDkHCs/BMeoRn4RnbYJ/h2cePZ3sXmLz4sVDwbyaKK8PVRAaUt99ypIyjttyQaqvVHtvxItICi4+T53sB8vFLT33vRZ798pohwAx9R4hW3f6xM+rNvobGWFU9ULR09qSX3nzeJft2u7mvlBDj8bhbLW2dR596zDE2aCslhycSY1Pt6HvdleB2Z0IzSd3n1Nl9xeu8DcY0bdYqzP53a3JAu7Xiz3XzV6s9v072viu2+F0rpnyMnHDwGp3nPmt9HurT9dxXB1pj4ASDPmOhZ7scQ8ueHtuMI7bJCUjLF26vyOtJmBamhWlhmq4L08K0Q5cwLUwL08I0XRemhWmHLOFZeBaehWe6Ljw7XJ691m8weTC0Cmsr0JXYEv6QFYu4B6v6lpP1nZ+/DwI+jn2B74W8lWSeDO4Irlx6waUOfseF61LV3+7Ysq/b2HX2c4aOD53TgpsXXhZVv44BzOu0Ci0beLFksngyEDKtxFL7npBV1wXfYdLyg8e1xz9X1HUnAccpHX3C07IrY9eLmrY33+WvfbkgPRyG7kcvrkPCftzuXow85lsv3/pMUDEmWtt52ce+/jhGz+mhuGrFI6/xGKP+Ok86yf6teNU5Lf+4PR1O7rvI60uYFqaFaWEaj4dpYdohS5gWpoVpYRqPh2lh2qFKeBaehWfhGY+HZ4fJs9E+5z548GDHztWZr2xy8D4Qna9n/7kT1I47tKekJZmE2xzH43G3KqxkoYG59YtbOOXozWZTy+WyFotFtzWOW/JahYHJya1y3ILGsbba8aCiXm4Dna+/tS2y5RfajM8cdSBqu18rMFnMWMCoo4JV52kLIEGv86bTae+5sGxb/erd73DgajK/47gUZ74Fdqigua1cN42fcNNrtVp1WzLlY4FsHzx9YqH44fM6uQV4X0xoWygnIzp3Op1256/X61v5QJi6DzhWxju3Qmvrs9qXnh5rykX1zzHxjgJep+PKS88P6k2/Mjb8M7dNE4C8I2Y0GvX84DZiDMq+BFPVzaRCutK+rWcEex0lmLwdxoLXyO12W++///7t2WOkJ2FamEYdw7QwLUy7ljDtMCVMC9OoY5gWpoVp1xKmHZ6EZ+EZdQzPwrPw7FoOjWev/Yg8/8xi4YozkKU0r3PxBGjp4S9eQye0Cl5Vf1snVxKr+ts15WiuzLJ9h407k0HectbQd7QFCxi/d3twzB6I4/G4ezYoC7QHrmzBBGOb7Jftc7z0N8dN0HtCSEeOzYsVdWFbTEQW51YCsIjTThqLdOO5bhvahcLJhduFPmrFNcfivvU2OMYhSPF6t5X70uPZYchJm/dHW7SeUdrq3+PUbcVzWj4kJFqxTLt52xJOxNwHVTd3dnie0z6EBb/3Nv1v+pP6s2+dy3jjOWyPE6lWn5FhCdPCtDAtTGN/tEWYFqYdmoRpYVqYFqaxP9oiTAvTDknCs/AsPAvP2B9tEZ4dDs9ea4HJC9OQUVlA9fdkMukZh21xYN7uUDFswUXfDyUOz/G23Vm6C4DFQUnvDh6CTqtvCQOp9e42dd3dwfreCwoDRAXf9WwlKfti3+5zD0Dq04oNJQrb95VjydDkwNv3eGISedvaMi1f+tZijxP3Yws8LBwey/rO/eKTAAdXK2YkPNeLNe3KYqDr3F58J0haOeG+aOkwVET3FaF9udn67L73otjSi33tm1h5LDkAPd9bcb4vh1s1Y589XM998fk6hT5yLWFamKa/w7QwLUwL0w5dwrQwTX+HaWFamBamHbKEZ+GZ/g7PwrPw7HB5tneB6ejoqFstVNFWw0qYlkGHlOV3XMHeV8xHo/6KOd+1xYzXMGn492q1uhXAKj4cw2w2q9ls1p2nLbYqDvp+Pp93nyeTSVc4XreIMnhaz73UFkQPZvmA4/ACoO/4rn5Go1GnK8etgNYWTwUut+227OzvXPXV9dJVWxY1Lq5Y0wZDdweogHmBkz1PTk5qNpt1bTEpPU5Wq9Wtba0te2l77XQ67RUv3qHhY2ZbnicaD79TXxq3xsmJheKPY1B7tA2/5x0yftfGXUWYhUvvsqHbX89u9fjx8UlacGf+UN/RaFSz2ayLHW5Blh8Z226f1rh4h43Hhb5nfNJenl9sl5MarzOcGDCO3c4al/tnMpncyv1WPET2S5gWpoVpYVqYFqbdFwnTwrQwLUwL08K0+yDhWXgWnoVn4dnh82zvAhOD0/8ej8dd8LW2rNGQrSLg56pwekGkwWhkTyAZyKVlOD7jlOOTwQg39qFnVDIYGNDSbbFYdME3m816q6zUUzp40FAXP1eBMR7f/HCd3luFa7e7eT5pK7CZOJ4samu7bT9blqBl0Lp93K7s3xOo6uYZpNKLz1N10fdHR0c1n89rNpt1z+mkffncWI1rvV7Xcrm8ZW+NgdsbpaeuE4hZgHkd7S/YEXLU3ePIC1YrPvYJ4SD9OLEiSFVAaCv2y7zx/GRu6DwvUJxUeMGnzVkbPA6rrp896s/+5dZRHz/f2Y7aZzzKz/7yscq/rHvqR5/9rgPPp9azawlMnau+ptPprbtg1CchEXk9CdPCtDAtTAvTwrT7ImFamBamhWlhWph2HyQ8C8/Cs/AsPDt8nu1dYGJDNBQDVcf8uBRtGVrC4PNBv4602qR48WeRYTDwPL63glzjYnJyVZCOaAVN6zy3MfumPfxcFiYea9mwVTQcJtvttgcCJrr72L8b8j8Bob5b1/COg1Y8cZy0r5L06OioJpNJlxiMPS/09If3u91uO59NJpNbunsx3+12t+KcfhuP9z8LmPHXiue7vmcc0r6tfGIB8Vj2HPBiL1357mNq5Re/b/VBu3vbPk75Zcierbhv6cHxeUwz3tm++qXNmTfeh4T5rkmCzvHJC/vWWNkmgcmxfdS6+f9nCdPCtDAtTAvTwrT7ImFamBamhWlhWph2HyQ8C8/Cs/AsPDt8nr3WAtOQAVnMW8HgCeTHNejRqL+lcSjwW45ynVrSapfBojFoZXEoudjWbrfrrUjzmslk0q2ys9hz1VV9tVYtfTsmi4sHLhNyKAEcqC1os28Wj7vs7AnLRGjpOVQMpGdVvzjKLxwDi/t0Ou0VZt7dwH5aBUnjo/3ZB23idzdwNd796jZSfOm5wEPCJFcbbjsWHI2Nkw2OQXcI+Oq2Vqa9Xwe7t+vjZzHy47Sz29/b9Fz0urFerzt/uz0crJ7rLZ18PK3vqvp3Q2mrMu2lz9ouOwQO3pHjOrZg7H7xusQ85p1akbslTBtuK0zr68DPYVqYRjuHaWHax0XCtOG2wrS+DvwcpoVptHOYFqZ9HCQ8G24rPOvrwM/hWXhGO4dn/9/zbLTvpIcPH+4U0F6c6BQGCAdS1d7CpkBhQg4NplVUuOWRQexb3Frnu4F5V4AXbA9YtindWRS8kLIf9SVdPNhbxci/I5SoX6uAe/EaKtra1qk+ud1VRb8Fe1+N11i4vVN90w/UQyv+ugvAY4Z/6zWZTLrtqHqX7+bz+a3VZunA7a5ccVcRkW0JAa76Un8eaxVfnaftwbQhx9aCmb5jLNP+6kvtM/ZcX48dSqtotGCsrZVVN8/j1PNhOQa+e0zxGbiMH5+oeF1xm+s8n2RKuEWZE7BWTaDQR9zm7fYb8g/rS8vmrTFSf9Yy/9vtxPYYX+PxuF68eLH/doJImBamhWlhWphWYdp9kTAtTAvTwrQwLUy7DxKehWfhWXgWnh0+z/buYFKjnuwqAK541e0kZUB58noS8Boa2/VpvcsoTKjW6qC35cZjn3znOGgXt48HhOsmB7XswyBv2YVFcijYXdyuDtz1ej3ov5aN77Kj25xA8G2wrlcLrK63xxbH6WBncdT1Xuhb/Sp2mMBDhb4V12rXbU2Q8DP792voZ72UO9KdNvai4JMHxp/nlvvUrxeUdfeG2nPA+LUsdEPwYWzRd4xx+qqla0sHScsOrQLagmvLrp7D7Mch6372cbnQ3pwcavytWnVXrYvcSJgWpoVpYZp0DtPCtEOXMC1MC9PCNOkcpoVphyzhWXgWnoVn0jk8O0ye3bnA5IoqUFjUWlso3QC8vmVYBa5/z8HoMwPIDeMOokN8THSK96P2WuP3NnSM53uwcxW7Ba/dbtfZgNsg6UgHiLapKQFbNm8F71ChY0Hk9dTRbe99epJ523x3iO0r9HzJFg5zTwb27+17e65Lq31+x7jyIsVi38oLt58Lr2Nc7bvOV5z35ZZipjWZoK5qQ/3r7h9tgaV9aEvPw1Y9aMVIq14QiDpnSF/1TX3YjuznMeOx3dK/NWFq6TRUS1riccNxOrxl6xYsXJ/I3RKmhWlhWpgWpvX7aukUph2GhGlhWpgWpoVp/b5aOoVpH38Jz8Kz8Cw8C8/6fbV0+rjybO8CE1fwttv+1j0qsF6vuy2KLAQsplLMA6tVmAQTN/hodPMMRg1MQTcajWo+n3dtSFetxrWKr8SN5EXbx6vPvENA7Uj3VvFTmy07qXgogbTtkjpwXFXV2ahV5JnMepftFotFtxV1sVh0YxqNRr3tfp40PM/9pnOlk17+g3UsHrSDF1W3KWNN56/X61qtVrcg6mAbAqv0Jtx2u11X0JhgrSTkCi/FtyHTdz4JkY2qbgqp66jztDWVxYnntgqbhNvCZ7NZTwcJx6q2OInTdl+Omzpqe+jV1VXv+n3PTXWR/dWX+uP2X53H/lsFntICub5frVY9ffVMXeY4ATcej2u5XPag7zmt7bnSje345K0VXz5O2kdbhmezWZcD+cfl9SRMC9Oof5h2M9YwLUwL0w5PwrQwjfqHaTdjDdPCtDDtsCQ8C8+of3h2M9bwLDw7JJ7tXWCSolKKRpWyKmzqiKvg+psO8OCkcVqO8QDWuwfNbrerxWJxyzhs1/tpFQF+T53dITrOAu3jZPs+Dj7D0oPUbSUhtFjE5IOW7eQ7vY9Go+65lCy6at/vYtAYWwVUffgYeL1iSHbyxGShp91YmHgeJxQat9olZHQux9Bqj/ZxP7HILJfL3rX+LFS143Hi46Q+nuCUFlh4Lf1Cu/k1DljZhf0zLwRmxRllaJz+N0FO3anfUO5V1a0Y1HnyBf3E9lkTZBv+7f5t5ajGwbhU29vtzZ0w7lNeMxTDVTc1lTHPcTOPOF5d07Ld60I0EqaFaWFamFa9NlvjDNPCtEORMC1MC9PCNLbZGmeYFqYdgoRn4Vl4Fp6xzdY4w7OPP8/2LjB50KtxflbHrYClQgocFk4vSO4wLzTsQ8Zkknpx90DyfoYKlM7xQG4VX3e6n8PvqGsr+BmAfOdYWsEjPVpgaYHUi8g+ezFxaRtvh9d44eT7vvG6eCx4ofexcoWbunqx07gJCxY8+nC9XjcTjknK2GhNCHid26l1fAgAnj9etNyu7ieOvwUA2YXnt/pw2HiB13srVls5zr7YPj/z1WrPc9lt28pdt3tLH7VNwLCotvT32BvSoQWMVr76eS39hupCpC9hWpgWpoVpYVqYdl8kTAvTwrQwLUwL0+6DhGfhWXgWnoVnh8+z1/oNptHoegujtmr5sd1u1yWED0bb60ajUXf90Plyllbr6GQNju+emK22ZAQvmgxItumy3W6b42I/XmS9iGu8FK5yS7Q97+joqLs7g07WMa7otoqL7O4Bp1dr5VHt+I/v6bO27/F83kEiHVarVU9fbYuk/3U9g5W+4fXUU7q3YrBlZ9rAhUVVd1b48aFrddyLjyeqbOQx1rIdY5J3XLQmBGyLfzsAZD/fAqutjSwUXrwISJ3LVXAXXa8Y5jnsi1tXec5ms6nVanVrm7eD2nXkltaWzq1JFsXzi7ag3pvN5lZfrmNLP8pQIXebMu6ZUxSBR9tW84/LR5MwLUwL025fH6bdSJgWph2ShGlhWph2+/ow7UbCtDDtUCQ8C8/Cs9vXh2c3Ep59vHl25w4mrrJW3d5O5QHGa3lMsGgluoKSA/YVTb2rLTdeq9j7OfrMZxruu9aDTtfT0UOO4xjpuFaScHzUT0XIjxN83q+OtfqTPxUkTBQlskPDgcUC6WBtFSNPpJaoXbZFaSUV23do6KVCTx34TrixELA4ssixEKhoSR8WTPncdaP9WoWaBZ/fD/lV10gfTmqoEwuW++Lo6Kim02mvT8WA5z5zj/4mwD2//TsC0mGg8TE+2Y/8yVfr2aLaWtzKAbcRa4/b1f02BAvvw33dmrRoPK3YZRvUz/Odfh3KrUhfwrQwLUwL08K0MO2+SJgWpoVpYVqYFqbdBwnPwrPwLDwLzw6fZ3sXmBi4KhIKrKrqBb6eoeg/yEehoRgESiwaxoPMjcM29LmV0B6AQwHJAkfdBDYPctqo9e7ijudnOkt/08b77mTwPlgYmBAMJg8WwpVttyDh4yP0WjbyPluTAV3LYG+Blz6WsB9fCWY7LCQsjIq91uow44HXONyZZB7/bgeCwH3Xsp3btQWIoVho6eBFTDDzZxj7JIAgasVASyefkDg8Wq9WjEof91OrD/qrFXOyJ79v5YnHWqtPF/dRK4/4mXc48Vpui2710bJf5PUkTAvT2HaYFqaFaWHaIUuYFqax7TAtTAvTwrRDlfAsPGPb4Vl4Fp4dJs/2LjApKaQMtyEqELiFVY50xR0YXki11Woo0TiYyWTSAYcGZSFjGx68rYTR6roXVAW6nz8EMje6EoPj0DiZHA5OJR6Liyeo90s70O5uD1+VVPHm6jh14Xn0LQukigXHovPoayaxJ6mvEhPaLdtT1xaY9b0AvlqtmhMUt7/85jGmvtx36lP96Ef5fALjMNV1Q4WjVaRoO/qdn73gjUbXd/DQHovFort7Yjwe13w+73RVLuoOg6GiKzsx57xI8jydKxCqttBfHq8+dumlc9brde9HD/1cQtcLuQtjz33v0vIX88WlpV/V7eca+2STednSVfYYGlPktoRpYVqYFqaFaX0J0w5XwrQwLUwL08K0voRphynhWXgWnoVn4VlfDpFnowAvEolEIpFIJBKJRCKRSCQSiUQikUgk8lEkD4SNRCKRSCQSiUQikUgkEolEIpFIJBKJfCTJAlMkEolEIpFIJBKJRCKRSCQSiUQikUjkI0kWmCKRSCQSiUQikUgkEolEIpFIJBKJRCIfSbLAFIlEIpFIJBKJRCKRSCQSiUQikUgkEvlIkgWmSCQSiUQikUgkEolEIpFIJBKJRCKRyEeSLDBFIpFIJBKJRCKRSCQSiUQikUgkEolEPpL8P3ZSewgZ1w8RAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Run example on BUID dataset. First, second, third, and fourth columns are ground-truth,\n", + "# our LMV-Med, MedSam fined tune,and zero-shot segmentation from SAM. \n", + "demo(\"buidnewprocess\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "01d85858", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebook/__init__.py b/notebook/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/notebook/image_demo/buidnewprocess/test/benign(292).png b/notebook/image_demo/buidnewprocess/test/benign(292).png new file mode 100644 index 0000000000000000000000000000000000000000..09e41d0746fcce56db82545f10474d2aa1d8350c --- /dev/null +++ b/notebook/image_demo/buidnewprocess/test/benign(292).png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:876caae6b939bbf1aba0804d805bda927d24066270e2b8228b2911a74acc71b0 +size 63519 diff --git a/notebook/image_demo/buidnewprocess/test_labels/benign(292).png b/notebook/image_demo/buidnewprocess/test_labels/benign(292).png new file mode 100644 index 0000000000000000000000000000000000000000..361b1ff61f43431c6ba0928602d8c6c4ac35102c --- /dev/null +++ b/notebook/image_demo/buidnewprocess/test_labels/benign(292).png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f19a2be3fd5a35813e255f66fc5e95340305ecceda60c1655e8ad2df66dfe01 +size 541 diff --git a/notebook/image_demo/isiconlytrain/test/ISIC_0001191.jpg b/notebook/image_demo/isiconlytrain/test/ISIC_0001191.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4257f2ac94f2c3f4a4b5d8a1ee6756a8466897ab Binary files /dev/null and b/notebook/image_demo/isiconlytrain/test/ISIC_0001191.jpg differ diff --git a/notebook/image_demo/isiconlytrain/test_labels/ISIC_0001191_segmentation.png b/notebook/image_demo/isiconlytrain/test_labels/ISIC_0001191_segmentation.png new file mode 100644 index 0000000000000000000000000000000000000000..6a0df5b36ac06a7a416235f8f2156853c6e129b4 --- /dev/null +++ b/notebook/image_demo/isiconlytrain/test_labels/ISIC_0001191_segmentation.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:438fdd005992814abe623c53b6f0fa98acdcbd3ff05366b79e8ec6c95940412e +size 1039 diff --git a/notebook/image_demo/kvasir/test/cju5xjn5mm78b09871spyqhhr.jpg b/notebook/image_demo/kvasir/test/cju5xjn5mm78b09871spyqhhr.jpg new file mode 100644 index 0000000000000000000000000000000000000000..06f5f34eeb50a5986ead0eb9e32427045639dfbd Binary files /dev/null and b/notebook/image_demo/kvasir/test/cju5xjn5mm78b09871spyqhhr.jpg differ diff --git a/notebook/image_demo/kvasir/test_labels/cju5xjn5mm78b09871spyqhhr.jpg b/notebook/image_demo/kvasir/test_labels/cju5xjn5mm78b09871spyqhhr.jpg new file mode 100644 index 0000000000000000000000000000000000000000..02bb3f16ec9dc0d32f09e0702a3f752febdd9879 Binary files /dev/null and b/notebook/image_demo/kvasir/test_labels/cju5xjn5mm78b09871spyqhhr.jpg differ diff --git a/object_detection/README.md b/object_detection/README.md new file mode 100644 index 0000000000000000000000000000000000000000..598016644497b0fffe5c00f07f437f1cc3587a66 --- /dev/null +++ b/object_detection/README.md @@ -0,0 +1,31 @@ +# Training Faster RCNN model using LVM-Med (R50) + +## 1. Activate conda environment +```bash +conda activate lvm_med +``` + +## 2. Convert dataset to Coco format +We illustrate LVM-Med ResNet-50 for VinDr dataset, which detects 14 different regions in X-ray images. +You can download the dataset from this link [`VinDr`](https://www.kaggle.com/datasets/awsaf49/vinbigdata-512-image-dataset) and put the folder vinbigdata into the folder object_detection. To build the dataset, after downloading the dataset, you can refer to the script ```convert_to_coco.py``` inside the folder object_detection and run it. +```bash +python convert_to_coco.py # Note, please check links inside the code in lines 146 and 158 to build the dataset correctly +``` + +## 3. Set train, valid, test folders +Edit [`base_config_track.py`](/Object_Detection/base_config_track.py) at: ++ Lines `11`, `12` for training set ++ Lines `60`, `61` for valid set ++ Lines `65`, `66` for test set ++ Lines `86` for folder store models. + +## 4. Train model and test +```bash +bash command.sh +``` + +## 5. Train from current epochs: +```bash +CUDA_VISIBLE_DEVICES=5 python finetune_with_path_modify_test_eval.py --experiment-name 'lvm-med-r50' --weight-path ../lvm_med_weights/lvmmed_resnet.torch --batch-size 16 --optim adam --clip 1 --lr 0.0001 --epochs 40 --labeled-dataset-percent 1.0 --resume +``` + diff --git a/object_detection/base_config_track.py b/object_detection/base_config_track.py new file mode 100644 index 0000000000000000000000000000000000000000..9c339c8446a40708da03451a27f6bc42b5697e01 --- /dev/null +++ b/object_detection/base_config_track.py @@ -0,0 +1,105 @@ +from mmdet.apis import set_random_seed +from mmcv import Config + +def get_config(base_directory='.'): + print ("Using base_config_track") + cfg = Config.fromfile(base_directory + '/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py') + #print(cfg.pretty_text) + + cfg.classes = ("Aortic_enlargement", "Atelectasis", "Calcification", "Cardiomegaly", "Consolidation", "ILD", "Infiltration", "Lung_Opacity", "Nodule/Mass", "Other_lesion", "Pleural_effusion", "Pleural_thickening", "Pneumothorax", "Pulmonary_fibrosis") + + cfg.data.train.img_prefix = base_directory + '/data/' + cfg.data.train.ann_file = base_directory + '/data/train_annotations.json' + cfg.data.train.classes = cfg.classes + cfg.data.train.type='CocoDatasetSubset' + + img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + + albu_train_transforms = [ + dict( + type='RandomSizedBBoxSafeCrop', + height=512, + width=512, + erosion_rate=0.2), + ] + + cfg.data.train.pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(512, 512), keep_ratio=True), + dict(type='Pad', size_divisor=32), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='Albu', + transforms=albu_train_transforms, + bbox_params=dict( + type='BboxParams', + format='pascal_voc', + label_fields=['gt_labels'], + min_visibility=0.0, + filter_lost_elements=True), + keymap={ + 'img': 'image', + 'gt_bboxes': 'bboxes' + }, + update_pad_shape=False, + skip_img_without_anno=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), + ] + + + cfg.data.train = dict( + type='ClassBalancedDataset', + oversample_thr=0.4, + dataset=cfg.data.train + ) + + cfg.data.val.img_prefix = base_directory + '/data/' + cfg.data.val.ann_file = base_directory + '/data/valid_annotations.json' + cfg.data.val.classes = cfg.classes + cfg.data.val.type='CocoDataset' + + cfg.data.test.img_prefix = base_directory + '/data/' + cfg.data.test.ann_file = base_directory + '/data/test_annotations.json' + cfg.data.test.classes = cfg.classes + cfg.data.test.type='CocoDataset' + + cfg.model.roi_head.bbox_head.num_classes = 14 + + cfg.optimizer.lr = 0.02 / 8 + cfg.lr_config.warmup = None + cfg.log_config.interval = 10 + + # We can set the checkpoint saving interval to reduce the storage cost + cfg.checkpoint_config.interval = 1 + + # Set seed thus the results are more reproducible + cfg.seed = 1 + set_random_seed(1, deterministic=False) + cfg.gpu_ids = range(1) + + # we can use here mask_rcnn. + # cfg.load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' + cfg.work_dir = "../trained_weights" + + + # One Epoch takes around 18 mins + cfg.total_epochs = 30 + cfg.runner.max_epochs = 30 + + cfg.data.samples_per_gpu = 6 + + cfg.log_config = dict( # config to register logger hook + interval=50, # Interval to print the log + hooks=[ + dict(type='TensorboardLoggerHook'), # The Tensorboard logger is also supported + dict(type='TextLoggerHook') + ]) # The logger used to record the training process. + + cfg.workflow = [('train', 1), ('val', 1)] + cfg.evaluation=dict(classwise=True, metric='bbox') + + return cfg diff --git a/object_detection/checkpoints/__init__.py b/object_detection/checkpoints/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/object_detection/command.sh b/object_detection/command.sh new file mode 100644 index 0000000000000000000000000000000000000000..53ef3f3b24508824cb6e7f86ac6409253be6405d --- /dev/null +++ b/object_detection/command.sh @@ -0,0 +1,4 @@ +## Training Faster RCNN model using LVM-Med-R50 +CUDA_VISIBLE_DEVICES=5 python finetune_with_path_modify_test_eval.py --experiment-name 'lvm-med-r50' --weight-path ../lvm_med_weights/lvmmed_resnet.torch --batch-size 16 --optim adam --clip 1 --lr 0.0001 --epochs 40 --labeled-dataset-percent 1.0 +# test model +python test_one_sequences.py -exp-name 'lvm-med-r50' diff --git a/object_detection/configs/_base_/datasets/cityscapes_detection.py b/object_detection/configs/_base_/datasets/cityscapes_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..e341b59d6fa6265c2d17dc32aae2341871670a3d --- /dev/null +++ b/object_detection/configs/_base_/datasets/cityscapes_detection.py @@ -0,0 +1,56 @@ +# dataset settings +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/object_detection/configs/_base_/datasets/cityscapes_instance.py b/object_detection/configs/_base_/datasets/cityscapes_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..4e3c34e2c85b4fc2ba854e1b409af70dc2c34e94 --- /dev/null +++ b/object_detection/configs/_base_/datasets/cityscapes_instance.py @@ -0,0 +1,56 @@ +# dataset settings +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/object_detection/configs/_base_/datasets/coco_detection.py b/object_detection/configs/_base_/datasets/coco_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..149f590bb45fa65c29fd4c005e4a237d7dd2e117 --- /dev/null +++ b/object_detection/configs/_base_/datasets/coco_detection.py @@ -0,0 +1,49 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/object_detection/configs/_base_/datasets/coco_instance.py b/object_detection/configs/_base_/datasets/coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..9901a858414465d19d8ec6ced316b460166176b4 --- /dev/null +++ b/object_detection/configs/_base_/datasets/coco_instance.py @@ -0,0 +1,49 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/object_detection/configs/_base_/datasets/coco_instance_semantic.py b/object_detection/configs/_base_/datasets/coco_instance_semantic.py new file mode 100644 index 0000000000000000000000000000000000000000..6c8bf07b278f615e7ff5e67490d7a92068574b5b --- /dev/null +++ b/object_detection/configs/_base_/datasets/coco_instance_semantic.py @@ -0,0 +1,54 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/object_detection/configs/_base_/datasets/coco_panoptic.py b/object_detection/configs/_base_/datasets/coco_panoptic.py new file mode 100644 index 0000000000000000000000000000000000000000..dbade7c0ac20141806b93f0ea7b5ca26d748246e --- /dev/null +++ b/object_detection/configs/_base_/datasets/coco_panoptic.py @@ -0,0 +1,59 @@ +# dataset settings +dataset_type = 'CocoPanopticDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadPanopticAnnotations', + with_bbox=True, + with_mask=True, + with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 4), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'annotations/panoptic_train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_val2017.json', + img_prefix=data_root + 'val2017/', + seg_prefix=data_root + 'annotations/panoptic_val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_val2017.json', + img_prefix=data_root + 'val2017/', + seg_prefix=data_root + 'annotations/panoptic_val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['PQ']) diff --git a/object_detection/configs/_base_/datasets/deepfashion.py b/object_detection/configs/_base_/datasets/deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..308b4b2ac4d9e3516ba4a57e9d3b6af91e97f24b --- /dev/null +++ b/object_detection/configs/_base_/datasets/deepfashion.py @@ -0,0 +1,53 @@ +# dataset settings +dataset_type = 'DeepFashionDataset' +data_root = 'data/DeepFashion/In-shop/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(750, 1101), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(750, 1101), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + imgs_per_gpu=2, + workers_per_gpu=1, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=train_pipeline, + data_root=data_root), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/DeepFashion_segmentation_gallery.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root)) +evaluation = dict(interval=5, metric=['bbox', 'segm']) diff --git a/object_detection/configs/_base_/datasets/lvis_v0.5_instance.py b/object_detection/configs/_base_/datasets/lvis_v0.5_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..207e0053c24d73e05e78c764d05e65c102675320 --- /dev/null +++ b/object_detection/configs/_base_/datasets/lvis_v0.5_instance.py @@ -0,0 +1,24 @@ +# dataset settings +_base_ = 'coco_instance.py' +dataset_type = 'LVISV05Dataset' +data_root = 'data/lvis_v0.5/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_train.json', + img_prefix=data_root + 'train2017/')), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/'), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/')) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/object_detection/configs/_base_/datasets/lvis_v1_instance.py b/object_detection/configs/_base_/datasets/lvis_v1_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..be791edd79495dce88d010eea63e33d398f242b0 --- /dev/null +++ b/object_detection/configs/_base_/datasets/lvis_v1_instance.py @@ -0,0 +1,24 @@ +# dataset settings +_base_ = 'coco_instance.py' +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/object_detection/configs/_base_/datasets/voc0712.py b/object_detection/configs/_base_/datasets/voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..ae09acdd5c9580217815300abbad9f08b71b37ed --- /dev/null +++ b/object_detection/configs/_base_/datasets/voc0712.py @@ -0,0 +1,55 @@ +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1000, 600), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1000, 600), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=[ + data_root + 'VOC2007/ImageSets/Main/trainval.txt', + data_root + 'VOC2012/ImageSets/Main/trainval.txt' + ], + img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'], + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='mAP') diff --git a/object_detection/configs/_base_/datasets/wider_face.py b/object_detection/configs/_base_/datasets/wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..d1d649be42bca2955fb56a784fe80bcc2fdce4e1 --- /dev/null +++ b/object_detection/configs/_base_/datasets/wider_face.py @@ -0,0 +1,63 @@ +# dataset settings +dataset_type = 'WIDERFaceDataset' +data_root = 'data/WIDERFace/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=60, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=2, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'train.txt', + img_prefix=data_root + 'WIDER_train/', + min_size=17, + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline)) diff --git a/object_detection/configs/_base_/default_runtime.py b/object_detection/configs/_base_/default_runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..55097c5b242da66c9735c0b45cd84beefab487b1 --- /dev/null +++ b/object_detection/configs/_base_/default_runtime.py @@ -0,0 +1,16 @@ +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +custom_hooks = [dict(type='NumClassCheckHook')] + +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/object_detection/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py b/object_detection/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..2902ccae5a8ffaa6ae9c49212b68a71035c83e60 --- /dev/null +++ b/object_detection/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py @@ -0,0 +1,196 @@ +# model settings +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/object_detection/configs/_base_/models/cascade_rcnn_r50_fpn.py b/object_detection/configs/_base_/models/cascade_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..42f74ae748a32bdce10ab9003fd45f87721d02ff --- /dev/null +++ b/object_detection/configs/_base_/models/cascade_rcnn_r50_fpn.py @@ -0,0 +1,179 @@ +# model settings +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ]), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/object_detection/configs/_base_/models/fast_rcnn_r50_fpn.py b/object_detection/configs/_base_/models/fast_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..9982fe0956d60022a2c702a824ffaff192e93e1e --- /dev/null +++ b/object_detection/configs/_base_/models/fast_rcnn_r50_fpn.py @@ -0,0 +1,62 @@ +# model settings +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/object_detection/configs/_base_/models/faster_rcnn_r50_caffe_c4.py b/object_detection/configs/_base_/models/faster_rcnn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..51b5db469e83cc6b3cf2adae92b5d5741825ab35 --- /dev/null +++ b/object_detection/configs/_base_/models/faster_rcnn_r50_caffe_c4.py @@ -0,0 +1,114 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=6000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/object_detection/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py b/object_detection/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py new file mode 100644 index 0000000000000000000000000000000000000000..a377a6f09664b5eca189fa77dcb47c69842fdbf2 --- /dev/null +++ b/object_detection/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py @@ -0,0 +1,105 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + strides=(1, 2, 2, 1), + dilations=(1, 1, 1, 2), + out_indices=(3, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=2048, + feat_channels=2048, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=2048, + featmap_strides=[16]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=2048, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms=dict(type='nms', iou_threshold=0.7), + nms_pre=6000, + max_per_img=1000, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/object_detection/configs/_base_/models/faster_rcnn_r50_fpn.py b/object_detection/configs/_base_/models/faster_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..1ef8e7b2579504e7614429609524ae38239701cc --- /dev/null +++ b/object_detection/configs/_base_/models/faster_rcnn_r50_fpn.py @@ -0,0 +1,108 @@ +# model settings +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) + # soft-nms is also supported for rcnn testing + # e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05) + )) diff --git a/object_detection/configs/_base_/models/mask_rcnn_r50_caffe_c4.py b/object_detection/configs/_base_/models/mask_rcnn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..122202e1a5d6b3367de9a8c632864cf168ca5b9d --- /dev/null +++ b/object_detection/configs/_base_/models/mask_rcnn_r50_caffe_c4.py @@ -0,0 +1,125 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='MaskRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=None, + mask_head=dict( + type='FCNMaskHead', + num_convs=0, + in_channels=2048, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=14, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=6000, + nms=dict(type='nms', iou_threshold=0.7), + max_per_img=1000, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/object_detection/configs/_base_/models/mask_rcnn_r50_fpn.py b/object_detection/configs/_base_/models/mask_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..d903e55e2d95135b1448e566d4d5ec8146597a6a --- /dev/null +++ b/object_detection/configs/_base_/models/mask_rcnn_r50_fpn.py @@ -0,0 +1,120 @@ +# model settings +model = dict( + type='MaskRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/object_detection/configs/_base_/models/retinanet_r50_fpn.py b/object_detection/configs/_base_/models/retinanet_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..56e43fa7764cb0f48510415f21888ba0df0c6eb5 --- /dev/null +++ b/object_detection/configs/_base_/models/retinanet_r50_fpn.py @@ -0,0 +1,60 @@ +# model settings +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + bbox_head=dict( + type='RetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/object_detection/configs/_base_/models/rpn_r50_caffe_c4.py b/object_detection/configs/_base_/models/rpn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..8b32ca99258e5ddf249d11eadcd46630d88bd55e --- /dev/null +++ b/object_detection/configs/_base_/models/rpn_r50_caffe_c4.py @@ -0,0 +1,58 @@ +# model settings +model = dict( + type='RPN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=None, + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/object_detection/configs/_base_/models/rpn_r50_fpn.py b/object_detection/configs/_base_/models/rpn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..edaf4d4b06b64b88a4ddd64419fc026e64a6af1d --- /dev/null +++ b/object_detection/configs/_base_/models/rpn_r50_fpn.py @@ -0,0 +1,58 @@ +# model settings +model = dict( + type='RPN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/object_detection/configs/_base_/models/ssd300.py b/object_detection/configs/_base_/models/ssd300.py new file mode 100644 index 0000000000000000000000000000000000000000..f17df010069e300f9f0b6eb456f87e61b8582787 --- /dev/null +++ b/object_detection/configs/_base_/models/ssd300.py @@ -0,0 +1,56 @@ +# model settings +input_size = 300 +model = dict( + type='SingleStageDetector', + backbone=dict( + type='SSDVGG', + depth=16, + with_last_pool=False, + ceil_mode=True, + out_indices=(3, 4), + out_feature_indices=(22, 34), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://vgg16_caffe')), + neck=dict( + type='SSDNeck', + in_channels=(512, 1024), + out_channels=(512, 1024, 512, 256, 256, 256), + level_strides=(2, 2, 1, 1), + level_paddings=(1, 1, 0, 0), + l2_norm_scale=20), + bbox_head=dict( + type='SSDHead', + in_channels=(512, 1024, 512, 256, 256, 256), + num_classes=80, + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2])), + # model training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False), + test_cfg=dict( + nms_pre=1000, + nms=dict(type='nms', iou_threshold=0.45), + min_bbox_size=0, + score_thr=0.02, + max_per_img=200)) +cudnn_benchmark = True diff --git a/object_detection/configs/_base_/schedules/schedule_1x.py b/object_detection/configs/_base_/schedules/schedule_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..3543058ba205e223e2a0c89ce4961903c737685e --- /dev/null +++ b/object_detection/configs/_base_/schedules/schedule_1x.py @@ -0,0 +1,19 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# lr_config = dict( +# policy='step', +# warmup='linear', +# warmup_iters=500, +# warmup_ratio=0.001, +# step=[8, 11]) +# runner = dict(type='EpochBasedRunner', max_epochs=12) + +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.1, + step=[55, 60]) +runner = dict(type='EpochBasedRunner', max_epochs=60) diff --git a/object_detection/configs/_base_/schedules/schedule_20e.py b/object_detection/configs/_base_/schedules/schedule_20e.py new file mode 100644 index 0000000000000000000000000000000000000000..00e859022156dcbef6501c04d03f335639f2c1f6 --- /dev/null +++ b/object_detection/configs/_base_/schedules/schedule_20e.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/_base_/schedules/schedule_2x.py b/object_detection/configs/_base_/schedules/schedule_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..69dc9ee8080649ce3646b5775b0ca2e9c863d0f5 --- /dev/null +++ b/object_detection/configs/_base_/schedules/schedule_2x.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/albu_example/README.md b/object_detection/configs/albu_example/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7f0eff52b3cdbe7a2b0aca86e916016df3ffaa0f --- /dev/null +++ b/object_detection/configs/albu_example/README.md @@ -0,0 +1,35 @@ +# Albu Example + +## Abstract + + + +Data augmentation is a commonly used technique for increasing both the size and the diversity of labeled training sets by leveraging input transformations that preserve output labels. In computer vision domain, image augmentations have become a common implicit regularization technique to combat overfitting in deep convolutional neural networks and are ubiquitously used to improve performance. While most deep learning frameworks implement basic image transformations, the list is typically limited to some variations and combinations of flipping, rotating, scaling, and cropping. Moreover, the image processing speed varies in existing tools for image augmentation. We present Albumentations, a fast and flexible library for image augmentations with many various image transform operations available, that is also an easy-to-use wrapper around other augmentation libraries. We provide examples of image augmentations for different computer vision tasks and show that Albumentations is faster than other commonly used image augmentation tools on the most of commonly used image transformations. + + +
+ +
+ + + + +## Citation + + + +``` +@article{2018arXiv180906839B, + author = {A. Buslaev, A. Parinov, E. Khvedchenya, V.~I. Iglovikov and A.~A. Kalinin}, + title = "{Albumentations: fast and flexible image augmentations}", + journal = {ArXiv e-prints}, + eprint = {1809.06839}, + year = 2018 +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.4 | 16.6 | 38.0 | 34.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208-ab203bcd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208_225520.log.json) | diff --git a/object_detection/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py b/object_detection/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3f879a6c573871ea17b2bf158173aadf14457b6 --- /dev/null +++ b/object_detection/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py @@ -0,0 +1,73 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +albu_train_transforms = [ + dict( + type='ShiftScaleRotate', + shift_limit=0.0625, + scale_limit=0.0, + rotate_limit=0, + interpolation=1, + p=0.5), + dict( + type='RandomBrightnessContrast', + brightness_limit=[0.1, 0.3], + contrast_limit=[0.1, 0.3], + p=0.2), + dict( + type='OneOf', + transforms=[ + dict( + type='RGBShift', + r_shift_limit=10, + g_shift_limit=10, + b_shift_limit=10, + p=1.0), + dict( + type='HueSaturationValue', + hue_shift_limit=20, + sat_shift_limit=30, + val_shift_limit=20, + p=1.0) + ], + p=0.1), + dict(type='JpegCompression', quality_lower=85, quality_upper=95, p=0.2), + dict(type='ChannelShuffle', p=0.1), + dict( + type='OneOf', + transforms=[ + dict(type='Blur', blur_limit=3, p=1.0), + dict(type='MedianBlur', blur_limit=3, p=1.0) + ], + p=0.1), +] +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='Pad', size_divisor=32), + dict( + type='Albu', + transforms=albu_train_transforms, + bbox_params=dict( + type='BboxParams', + format='pascal_voc', + label_fields=['gt_labels'], + min_visibility=0.0, + filter_lost_elements=True), + keymap={ + 'img': 'image', + 'gt_masks': 'masks', + 'gt_bboxes': 'bboxes' + }, + update_pad_shape=False, + skip_img_without_anno=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'pad_shape', 'scale_factor')) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/atss/README.md b/object_detection/configs/atss/README.md new file mode 100644 index 0000000000000000000000000000000000000000..035964f9f76321439b883c00f056500a7185298b --- /dev/null +++ b/object_detection/configs/atss/README.md @@ -0,0 +1,35 @@ +# Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection + +## Abstract + + + +Object detection has been dominated by anchor-based detectors for several years. Recently, anchor-free detectors have become popular due to the proposal of FPN and Focal Loss. In this paper, we first point out that the essential difference between anchor-based and anchor-free detection is actually how to define positive and negative training samples, which leads to the performance gap between them. If they adopt the same definition of positive and negative samples during training, there is no obvious difference in the final performance, no matter regressing from a box or a point. This shows that how to select positive and negative training samples is important for current object detectors. Then, we propose an Adaptive Training Sample Selection (ATSS) to automatically select positive and negative samples according to statistical characteristics of object. It significantly improves the performance of anchor-based and anchor-free detectors and bridges the gap between them. Finally, we discuss the necessity of tiling multiple anchors per location on the image to detect objects. Extensive experiments conducted on MS COCO support our aforementioned analysis and conclusions. With the newly introduced ATSS, we improve state-of-the-art detectors by a large margin to 50.7% AP without introducing any overhead. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{zhang2019bridging, + title = {Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection}, + author = {Zhang, Shifeng and Chi, Cheng and Yao, Yongqiang and Lei, Zhen and Li, Stan Z.}, + journal = {arXiv preprint arXiv:1912.02424}, + year = {2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.7 | 19.7 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209_102539.log.json) | +| R-101 | pytorch | 1x | 5.6 | 12.3 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.log.json) | diff --git a/object_detection/configs/atss/atss_r101_fpn_1x_coco.py b/object_detection/configs/atss/atss_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5225d2ab672738d4d427eba252e92bd554252476 --- /dev/null +++ b/object_detection/configs/atss/atss_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './atss_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/atss/atss_r50_fpn_1x_coco.py b/object_detection/configs/atss/atss_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..42ff4c598f94f221ded7c91ce330e43310beddae --- /dev/null +++ b/object_detection/configs/atss/atss_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='ATSS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='ATSSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/atss/metafile.yml b/object_detection/configs/atss/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..f4c567ef29ba9ea4fddd7bc00d63a4bca41b1cfa --- /dev/null +++ b/object_detection/configs/atss/metafile.yml @@ -0,0 +1,60 @@ +Collections: + - Name: ATSS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ATSS + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1912.02424 + Title: 'Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection' + README: configs/atss/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/atss.py#L6 + Version: v2.0.0 + +Models: + - Name: atss_r50_fpn_1x_coco + In Collection: ATSS + Config: configs/atss/atss_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + inference time (ms/im): + - value: 50.76 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth + + - Name: atss_r101_fpn_1x_coco + In Collection: ATSS + Config: configs/atss/atss_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 81.3 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.pth diff --git a/object_detection/configs/autoassign/README.md b/object_detection/configs/autoassign/README.md new file mode 100644 index 0000000000000000000000000000000000000000..172071d50012a088c507f9a11623d2137f8382c5 --- /dev/null +++ b/object_detection/configs/autoassign/README.md @@ -0,0 +1,39 @@ +# AutoAssign: Differentiable Label Assignment for Dense Object Detection + +## Abstract + + + +Determining positive/negative samples for object detection is known as label assignment. Here we present an anchor-free detector named AutoAssign. It requires little human knowledge and achieves appearance-aware through a fully differentiable weighting mechanism. During training, to both satisfy the prior distribution of data and adapt to category characteristics, we present Center Weighting to adjust the category-specific prior distributions. To adapt to object appearances, Confidence Weighting is proposed to adjust the specific assign strategy of each instance. The two weighting modules are then combined to generate positive and negative weights to adjust each location's confidence. Extensive experiments on the MS COCO show that our method steadily surpasses other best sampling strategies by large margins with various backbones. Moreover, our best model achieves 52.1% AP, outperforming all existing one-stage detectors. Besides, experiments on other datasets, e.g., PASCAL VOC, Objects365, and WiderFace, demonstrate the broad applicability of AutoAssign. + + +
+ +
+ + + + +## Citation + + + +``` +@article{zhu2020autoassign, + title={AutoAssign: Differentiable Label Assignment for Dense Object Detection}, + author={Zhu, Benjin and Wang, Jianfeng and Jiang, Zhengkai and Zong, Fuhang and Liu, Songtao and Li, Zeming and Sun, Jian}, + journal={arXiv preprint arXiv:2007.03496}, + year={2020} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:------:|:------:|:--------:| +| R-50 | caffe | 1x | 4.08 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.log.json) | + +**Note**: + +1. We find that the performance is unstable with 1x setting and may fluctuate by about 0.3 mAP. mAP 40.3 ~ 40.6 is acceptable. Such fluctuation can also be found in the original implementation. +2. You can get a more stable results ~ mAP 40.6 with a schedule total 13 epoch, and learning rate is divided by 10 at 10th and 13th epoch. diff --git a/object_detection/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py b/object_detection/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..db548dc3ca4e54f631668f880eb53586bc17579c --- /dev/null +++ b/object_detection/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py @@ -0,0 +1,85 @@ +# We follow the original implementation which +# adopts the Caffe pre-trained backbone. +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='AutoAssign', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + relu_before_extra_convs=True, + init_cfg=dict(type='Caffe2Xavier', layer='Conv2d')), + bbox_head=dict( + type='AutoAssignHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + loss_bbox=dict(type='GIoULoss', loss_weight=5.0)), + train_cfg=None, + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(lr=0.01, paramwise_cfg=dict(norm_decay_mult=0.)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 1000, + step=[8, 11]) +total_epochs = 12 diff --git a/object_detection/configs/autoassign/metafile.yml b/object_detection/configs/autoassign/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..f1e9051934e737736cfe6f3fb3ed3562b517f9ec --- /dev/null +++ b/object_detection/configs/autoassign/metafile.yml @@ -0,0 +1,33 @@ +Collections: + - Name: AutoAssign + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - AutoAssign + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/2007.03496 + Title: 'AutoAssign: Differentiable Label Assignment for Dense Object Detection' + README: configs/autoassign/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.12.0/mmdet/models/detectors/autoassign.py#L6 + Version: v2.12.0 + +Models: + - Name: autoassign_r50_fpn_8x2_1x_coco + In Collection: AutoAssign + Config: configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py + Metadata: + Training Memory (GB): 4.08 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth diff --git a/object_detection/configs/carafe/README.md b/object_detection/configs/carafe/README.md new file mode 100644 index 0000000000000000000000000000000000000000..dca52e6d1ef364ca12d883d79b54e3798207e189 --- /dev/null +++ b/object_detection/configs/carafe/README.md @@ -0,0 +1,46 @@ +# CARAFE: Content-Aware ReAssembly of FEatures + +## Abstract + + + +Feature upsampling is a key operation in a number of modern convolutional network architectures, e.g. feature pyramids. Its design is critical for dense prediction tasks such as object detection and semantic/instance segmentation. In this work, we propose Content-Aware ReAssembly of FEatures (CARAFE), a universal, lightweight and highly effective operator to fulfill this goal. CARAFE has several appealing properties: (1) Large field of view. Unlike previous works (e.g. bilinear interpolation) that only exploit sub-pixel neighborhood, CARAFE can aggregate contextual information within a large receptive field. (2) Content-aware handling. Instead of using a fixed kernel for all samples (e.g. deconvolution), CARAFE enables instance-specific content-aware handling, which generates adaptive kernels on-the-fly. (3) Lightweight and fast to compute. CARAFE introduces little computational overhead and can be readily integrated into modern network architectures. We conduct comprehensive evaluations on standard benchmarks in object detection, instance/semantic segmentation and inpainting. CARAFE shows consistent and substantial gains across all the tasks (1.2%, 1.3%, 1.8%, 1.1db respectively) with negligible computational overhead. It has great potential to serve as a strong building block for future research. It has great potential to serve as a strong building block for future research. + + +
+ +
+ + + + +## Citation + + + +We provide config files to reproduce the object detection & instance segmentation results in the ICCV 2019 Oral paper for [CARAFE: Content-Aware ReAssembly of FEatures](https://arxiv.org/abs/1905.02188). + +``` +@inproceedings{Wang_2019_ICCV, + title = {CARAFE: Content-Aware ReAssembly of FEatures}, + author = {Wang, Jiaqi and Chen, Kai and Xu, Rui and Liu, Ziwei and Loy, Chen Change and Lin, Dahua}, + booktitle = {The IEEE International Conference on Computer Vision (ICCV)}, + month = {October}, + year = {2019} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. + +| Method | Backbone | Style | Lr schd | Test Proposal Num | Inf time (fps) | Box AP | Mask AP | Config | Download | +|:--------------------:|:--------:|:-------:|:-------:|:-----------------:|:--------------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 16.5 | 38.6 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_20200504_175733.log.json) | +| - | - | - | - | 2000 | | | | | +| Mask R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 14.0 | 39.3 | 35.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.393__segm_mAP-0.358_20200503_135957-8687f195.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_20200503_135957.log.json) | +| - | - | - | - | 2000 | | | | | + +## Implementation + +The CUDA implementation of CARAFE can be find at https://github.com/myownskyW7/CARAFE. diff --git a/object_detection/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py b/object_detection/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dedac3f46b4710d16a8bc66f00663e379b2ebdc7 --- /dev/null +++ b/object_detection/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py b/object_detection/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..668c023981b9d421e5b51a48757c3819d090307f --- /dev/null +++ b/object_detection/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,60 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)), + roi_head=dict( + mask_head=dict( + upsample_cfg=dict( + type='carafe', + scale_factor=2, + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/carafe/metafile.yml b/object_detection/configs/carafe/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..b58a3f69ee94b5aa8cee1f2a294e57d098fe2552 --- /dev/null +++ b/object_detection/configs/carafe/metafile.yml @@ -0,0 +1,55 @@ +Collections: + - Name: CARAFE + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - FPN_CARAFE + - ResNet + - RoIPool + Paper: + URL: https://arxiv.org/abs/1905.02188 + Title: 'CARAFE: Content-Aware ReAssembly of FEatures' + README: configs/carafe/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.12.0/mmdet/models/necks/fpn_carafe.py#L11 + Version: v2.12.0 + +Models: + - Name: faster_rcnn_r50_fpn_carafe_1x_coco + In Collection: CARAFE + Config: configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py + Metadata: + Training Memory (GB): 4.26 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth + + - Name: mask_rcnn_r50_fpn_carafe_1x_coco + In Collection: CARAFE + Config: configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py + Metadata: + Training Memory (GB): 4.31 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.393__segm_mAP-0.358_20200503_135957-8687f195.pth diff --git a/object_detection/configs/cascade_rcnn/README.md b/object_detection/configs/cascade_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a88cfd772a6c72818d2a161a24325152684c7b73 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/README.md @@ -0,0 +1,83 @@ +# Cascade R-CNN: High Quality Object Detection and Instance Segmentation + +## Abstract + + + +In object detection, the intersection over union (IoU) threshold is frequently used to define positives/negatives. The threshold used to train a detector defines its quality. While the commonly used threshold of 0.5 leads to noisy (low-quality) detections, detection performance frequently degrades for larger thresholds. This paradox of high-quality detection has two causes: 1) overfitting, due to vanishing positive samples for large thresholds, and 2) inference-time quality mismatch between detector and test hypotheses. A multi-stage object detection architecture, the Cascade R-CNN, composed of a sequence of detectors trained with increasing IoU thresholds, is proposed to address these problems. The detectors are trained sequentially, using the output of a detector as training set for the next. This resampling progressively improves hypotheses quality, guaranteeing a positive training set of equivalent size for all detectors and minimizing overfitting. The same cascade is applied at inference, to eliminate quality mismatches between hypotheses and detectors. An implementation of the Cascade R-CNN without bells or whistles achieves state-of-the-art performance on the COCO dataset, and significantly improves high-quality detection on generic and specific object detection datasets, including VOC, KITTI, CityPerson, and WiderFace. Finally, the Cascade R-CNN is generalized to instance segmentation, with nontrivial improvements over the Mask R-CNN. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{Cai_2019, + title={Cascade R-CNN: High Quality Object Detection and Instance Segmentation}, + ISSN={1939-3539}, + url={http://dx.doi.org/10.1109/tpami.2019.2956516}, + DOI={10.1109/tpami.2019.2956516}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Cai, Zhaowei and Vasconcelos, Nuno}, + year={2019}, + pages={1–1} +} +``` + +## Results and models + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: |:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.2 | | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.404_20200504_174853-b857be87.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_20200504_174853.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316_214748.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_bbox_mAP-0.41_20200504_175131-e9872a90.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_20200504_175131.log.json) | +| R-101-FPN | caffe | 1x | 6.2 | | 42.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.423_20200504_175649-cab8dbd5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_20200504_175649.log.json) | +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317_101744.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_bbox_mAP-0.425_20200504_231812-5057dcc5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_20200504_231812.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 10.9 | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316-95c2deb6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316_055608.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 7.6 | | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608-9ae0a720.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702-43ce6a30.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 10.7 | | 44.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357-051557b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357.log.json)| + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 5.9 | | 41.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.412__segm_mAP-0.36_20200504_174659-5004b251.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_20200504_174659.log.json) | +| R-50-FPN | pytorch | 1x | 6.0 | 11.2 | 41.2 | 35.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203_170449.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.9 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_bbox_mAP-0.419__segm_mAP-0.365_20200504_174711-4af8e66e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_20200504_174711.log.json)| +| R-101-FPN | caffe | 1x | 7.8 | | 43.2 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.432__segm_mAP-0.376_20200504_174813-5c1e9599.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_20200504_174813.log.json)| +| R-101-FPN | pytorch | 1x | 7.9 | 9.8 | 42.9 | 37.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203-befdf6ee.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203_092521.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 43.4 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_bbox_mAP-0.434__segm_mAP-0.378_20200504_174836-005947da.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_20200504_174836.log.json)| +| X-101-32x4d-FPN | pytorch | 1x | 9.2 | 8.6 | 44.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201-0f411b1f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201_052416.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 9.2 | - | 45.0 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917-ed1f4751.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 12.2 | 6.7 | 45.3 | 39.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203-9a2db89d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203_044059.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 12.2 | | 45.6 |39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033-bdb5126a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033.log.json)| + +**Notes:** + +- The `20e` schedule in Cascade (Mask) R-CNN indicates decreasing the lr at 16 and 19 epochs, with a total of 20 epochs. + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training for Cascade Mask R-CNN. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :----------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 3x | 5.7 | | 44.0 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210707_002651-6e29b3a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210707_002651.log.json) +| R-50-FPN | pytorch| 3x | 5.9 | | 44.3 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco_20210628_164719-5bdc3824.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco_20210628_164719.log.json) +| R-101-FPN | caffe | 3x | 7.7 | | 45.4 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210707_002620-a5bd2389.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210707_002620.log.json) +| R-101-FPN | pytorch| 3x | 7.8 | | 45.5 | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco_20210628_165236-51a2d363.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco_20210628_165236.log.json) +| X-101-32x4d-FPN | pytorch| 3x | 9.0 | | 46.3 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210706_225234-40773067.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210706_225234.log.json) +| X-101-32x8d-FPN | pytorch| 3x | 12.1 | | 46.1 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210719_180640-9ff7e76f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210719_180640.log.json) +| X-101-64x4d-FPN | pytorch| 3x | 12.0 | | 46.6 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210719_210311-d3e64ba0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210719_210311.log.json) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ee6231034a2fccc42b11b99830f748091551851 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1df87fc6f30b027f459f0d246987b9de67c4b6bd --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f59c155848d6a40ec31c4de880f7900d9067c6ab --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..45ab7edffd33063022e95c6e2b44e503e69eda2c --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b20f167082d8927b59785dfd97d3652640c0e21 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..12d37efc90ba33f59bb653f00f0166a0c548a5d4 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = ['./cascade_mask_rcnn_r50_fpn_1x_coco.py'] + +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9fb817e82cf330cfa6b962fa88ad8c4eafb4899b --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,49 @@ +_base_ = ['./cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py'] +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..49ab539aa4cdf7c396b6f109efe2dc7a6d596a2a --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1296dc45dd89da9c0801e1242080c67957cace74 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_20e.py', '../_base_/default_runtime.py' +] diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ed0c6d1a88d7c05105c741613d558f92f13b9a9a --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = [ + '../common/mstrain_3x_coco_instance.py', + '../_base_/models/cascade_mask_rcnn_r50_fpn.py' +] diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..06cbbe70dc84f25ba588e80d0061c634e63e94f9 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e352362b17919bb2ebfffb5b442292880cfb27a --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7d37d17dcb37c5d25cd3bcd3d207e4edab6667a1 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eeec1aa1aefabd12cd769eb039f26441b1bd584a --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py @@ -0,0 +1,60 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +# ResNeXt-101-32x8d model trained with Caffe2 at FB, +# so the mean and std need to be changed. +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7dbef5fa2a3a3d962df78ffb1b0b4357b783fd67 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..579b1aca49383f9d3874f4797bc1dbb2a1311e7c --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ed6cf4b53b709ecc81fc8a09d18e0f11e1ae8df5 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e90f4bb004798265af98489d6ed584a6a09d434 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5c077760dd20dc5e00b3b2a1ca6de89347657231 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1719c25d59bc6dbe1c0ef71f08160057c21d5bf --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..696bcfb939e91c16898c2e039ec9a05d23105d1e --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..87e21fbff82763caf0e14ba641493870a15578b1 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6f886e1c407ff9376929a7092f82e5508d2b1ac9 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ac02c10d743d0ce4b9cc4bb5f1e29cbc6aff06a --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..486e45ead418d83a80224f241bc2355b82877640 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..78229f0da3f5a1ac1dfc628821327efd5f34668d --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,15 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..58812dec5a85d86d85b79d7b53ba33bc6327a815 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,15 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/cascade_rcnn/metafile.yml b/object_detection/configs/cascade_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..1007f2eb8106a626d28fa82595bb79744736aeb7 --- /dev/null +++ b/object_detection/configs/cascade_rcnn/metafile.yml @@ -0,0 +1,525 @@ +Collections: + - Name: Cascade R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Cascade R-CNN + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: http://dx.doi.org/10.1109/tpami.2019.2956516 + Title: 'Cascade R-CNN: Delving into High Quality Object Detection' + README: configs/cascade_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/cascade_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: cascade_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.404_20200504_174853-b857be87.pth + + - Name: cascade_rcnn_r50_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth + + - Name: cascade_rcnn_r50_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_bbox_mAP-0.41_20200504_175131-e9872a90.pth + + - Name: cascade_rcnn_r101_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.423_20200504_175649-cab8dbd5.pth + + - Name: cascade_rcnn_r101_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth + + - Name: cascade_rcnn_r101_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_bbox_mAP-0.425_20200504_231812-5057dcc5.pth + + - Name: cascade_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316-95c2deb6.pth + + - Name: cascade_rcnn_x101_32x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.6 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608-9ae0a720.pth + + - Name: cascade_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702-43ce6a30.pth + + - Name: cascade_rcnn_x101_64x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357-051557b1.pth + + - Name: cascade_mask_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.412__segm_mAP-0.36_20200504_174659-5004b251.pth + + - Name: cascade_mask_rcnn_r50_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 89.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth + + - Name: cascade_mask_rcnn_r50_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 89.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_bbox_mAP-0.419__segm_mAP-0.365_20200504_174711-4af8e66e.pth + + - Name: cascade_mask_rcnn_r101_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.432__segm_mAP-0.376_20200504_174813-5c1e9599.pth + + - Name: cascade_mask_rcnn_r101_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203-befdf6ee.pth + + - Name: cascade_mask_rcnn_r101_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_bbox_mAP-0.434__segm_mAP-0.378_20200504_174836-005947da.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201-0f411b1f.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917-ed1f4751.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 12.2 + inference time (ms/im): + - value: 149.25 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203-9a2db89d.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 12.2 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033-bdb5126a.pth + + - Name: cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.7 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210707_002651-6e29b3a6.pth + + - Name: cascade_mask_rcnn_r50_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco_20210628_164719-5bdc3824.pth + + - Name: cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 7.7 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210707_002620-a5bd2389.pth + + - Name: cascade_mask_rcnn_r101_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco_20210628_165236-51a2d363.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 9.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210706_225234-40773067.pth + + - Name: cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 12.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210719_180640-9ff7e76f.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 12.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210719_210311-d3e64ba0.pth diff --git a/object_detection/configs/cascade_rpn/README.md b/object_detection/configs/cascade_rpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..06b25a53bc14ff51ab662be14d9ca67633d19ce6 --- /dev/null +++ b/object_detection/configs/cascade_rpn/README.md @@ -0,0 +1,45 @@ +# Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution + +## Abstract + + + +This paper considers an architecture referred to as Cascade Region Proposal Network (Cascade RPN) for improving the region-proposal quality and detection performance by systematically addressing the limitation of the conventional RPN that heuristically defines the anchors and aligns the features to the anchors. First, instead of using multiple anchors with predefined scales and aspect ratios, Cascade RPN relies on a single anchor per location and performs multi-stage refinement. Each stage is progressively more stringent in defining positive samples by starting out with an anchor-free metric followed by anchor-based metrics in the ensuing stages. Second, to attain alignment between the features and the anchors throughout the stages, adaptive convolution is proposed that takes the anchors in addition to the image features as its input and learns the sampled features guided by the anchors. A simple implementation of a two-stage Cascade RPN achieves AR 13.4 points higher than that of the conventional RPN, surpassing any existing region proposal methods. When adopting to Fast R-CNN and Faster R-CNN, Cascade RPN can improve the detection mAP by 3.1 and 3.5 points, respectively. + + +
+ +
+ + + + +## Citation + + + +We provide the code for reproducing experiment results of [Cascade RPN](https://arxiv.org/abs/1909.06720). + +``` +@inproceedings{vu2019cascade, + title={Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution}, + author={Vu, Thang and Jang, Hyunjun and Pham, Trung X and Yoo, Chang D}, + booktitle={Conference on Neural Information Processing Systems (NeurIPS)}, + year={2019} +} +``` + +## Benchmark + +### Region proposal performance + +| Method | Backbone | Style | Mem (GB) | Train time (s/iter) | Inf time (fps) | AR 1000 | Config | Download | +|:------:|:--------:|:-----:|:--------:|:-------------------:|:--------------:|:-------:|:-------:|:--------------------------------------:| +| CRPN | R-50-FPN | caffe | - | - | - | 72.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_r50_caffe_fpn_1x_coco/cascade_rpn_r50_caffe_fpn_1x_coco-7aa93cef.pth) | + +### Detection performance + +| Method | Proposal | Backbone | Style | Schedule | Mem (GB) | Train time (s/iter) | Inf time (fps) | box AP | Config | Download | +|:-------------:|:-----------:|:--------:|:-------:|:--------:|:--------:|:-------------------:|:--------------:|:------:|:-------:|:--------------------------------------------:| +| Fast R-CNN | Cascade RPN | R-50-FPN | caffe | 1x | - | - | - | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco/crpn_fast_rcnn_r50_caffe_fpn_1x_coco-cb486e66.pth) | +| Faster R-CNN | Cascade RPN | R-50-FPN | caffe | 1x | - | - | - | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco/crpn_faster_rcnn_r50_caffe_fpn_1x_coco-c8283cca.pth) | diff --git a/object_detection/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29f5d0745b5689178bcbadc3c30b91ecc8cd5140 --- /dev/null +++ b/object_detection/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,77 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict( + bbox_head=dict( + bbox_coder=dict(target_stds=[0.04, 0.04, 0.08, 0.08]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.5), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict( + pos_iou_thr=0.65, neg_iou_thr=0.65, min_pos_iou=0.65), + sampler=dict(num=256))), + test_cfg=dict(rcnn=dict(score_thr=1e-3))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bad86e6ddf084b5b7e145463c88a8d2d887d6a53 --- /dev/null +++ b/object_detection/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,92 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' +rpn_weight = 0.7 +model = dict( + rpn_head=dict( + _delete_=True, + type='CascadeRPNHead', + num_stages=2, + stages=[ + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[1.0], + strides=[4, 8, 16, 32, 64]), + adapt_cfg=dict(type='dilation', dilation=3), + bridged_feature=True, + sampling=False, + with_cls=False, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.1, 0.1, 0.5, 0.5)), + loss_bbox=dict( + type='IoULoss', linear=True, + loss_weight=10.0 * rpn_weight)), + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + adapt_cfg=dict(type='offset'), + bridged_feature=False, + sampling=True, + with_cls=True, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.05, 0.05, 0.1, 0.1)), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0 * rpn_weight), + loss_bbox=dict( + type='IoULoss', linear=True, + loss_weight=10.0 * rpn_weight)) + ]), + roi_head=dict( + bbox_head=dict( + bbox_coder=dict(target_stds=[0.04, 0.04, 0.08, 0.08]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.5), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=[ + dict( + assigner=dict( + type='RegionAssigner', center_ratio=0.2, ignore_ratio=0.5), + allowed_border=-1, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False) + ], + rpn_proposal=dict(max_per_img=300, nms=dict(iou_threshold=0.8)), + rcnn=dict( + assigner=dict( + pos_iou_thr=0.65, neg_iou_thr=0.65, min_pos_iou=0.65), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(max_per_img=300, nms=dict(iou_threshold=0.8)), + rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5562e696a8d16514fc2139874799ab2ef1df74a1 --- /dev/null +++ b/object_detection/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,77 @@ +_base_ = '../rpn/rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='CascadeRPNHead', + num_stages=2, + stages=[ + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[1.0], + strides=[4, 8, 16, 32, 64]), + adapt_cfg=dict(type='dilation', dilation=3), + bridged_feature=True, + sampling=False, + with_cls=False, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.1, 0.1, 0.5, 0.5)), + loss_bbox=dict(type='IoULoss', linear=True, loss_weight=10.0)), + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + adapt_cfg=dict(type='offset'), + bridged_feature=False, + sampling=True, + with_cls=True, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.05, 0.05, 0.1, 0.1)), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', linear=True, loss_weight=10.0)) + ]), + train_cfg=dict(rpn=[ + dict( + assigner=dict( + type='RegionAssigner', center_ratio=0.2, ignore_ratio=0.5), + allowed_border=-1, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.3, + ignore_iof_thr=-1, + iou_calculator=dict(type='BboxOverlaps2D')), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.8), + min_bbox_size=0))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/centernet/README.md b/object_detection/configs/centernet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..37c18e7084dd565d436cf0ee9873b3522d7a0974 --- /dev/null +++ b/object_detection/configs/centernet/README.md @@ -0,0 +1,44 @@ +# Objects as Points + +## Abstract + + + +Detection identifies objects as axis-aligned boxes in an image. Most successful object detectors enumerate a nearly exhaustive list of potential object locations and classify each. This is wasteful, inefficient, and requires additional post-processing. In this paper, we take a different approach. We model an object as a single point --- the center point of its bounding box. Our detector uses keypoint estimation to find center points and regresses to all other object properties, such as size, 3D location, orientation, and even pose. Our center point based approach, CenterNet, is end-to-end differentiable, simpler, faster, and more accurate than corresponding bounding box based detectors. CenterNet achieves the best speed-accuracy trade-off on the MS COCO dataset, with 28.1% AP at 142 FPS, 37.4% AP at 52 FPS, and 45.1% AP with multi-scale testing at 1.4 FPS. We use the same approach to estimate 3D bounding box in the KITTI benchmark and human pose on the COCO keypoint dataset. Our method performs competitively with sophisticated multi-stage methods and runs in real-time. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{zhou2019objects, + title={Objects as Points}, + author={Zhou, Xingyi and Wang, Dequan and Kr{\"a}henb{\"u}hl, Philipp}, + booktitle={arXiv preprint arXiv:1904.07850}, + year={2019} +} +``` + +## Results and models + +| Backbone | DCN | Mem (GB) | Box AP | Flip box AP| Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :----: | +| ResNet-18 | N | 3.45 | 25.9 | 27.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centernet/centernet_resnet18_140e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210705_093630-bb5b3bf7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210705_093630.log.json) | +| ResNet-18 | Y | 3.47 | 29.5 | 30.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210702_155131-c8cd631f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210702_155131.log.json) | + +Note: + +- Flip box AP setting is single-scale and `flip=True`. +- Due to complex data enhancement, we find that the performance is unstable and may fluctuate by about 0.4 mAP. mAP 29.4 ~ 29.8 is acceptable in ResNet-18-DCNv2. +- Compared to the source code, we refer to [CenterNet-Better](https://github.com/FateScript/CenterNet-better), and make the following changes + - fix wrong image mean and variance in image normalization to be compatible with the pre-trained backbone. + - Use SGD rather than ADAM optimizer and add warmup and grad clip. + - Use DistributedDataParallel as other models in MMDetection rather than using DataParallel. diff --git a/object_detection/configs/centernet/centernet_resnet18_140e_coco.py b/object_detection/configs/centernet/centernet_resnet18_140e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..52c86a5eca27086dbc5ee2449aca749c550e852f --- /dev/null +++ b/object_detection/configs/centernet/centernet_resnet18_140e_coco.py @@ -0,0 +1,3 @@ +_base_ = './centernet_resnet18_dcnv2_140e_coco.py' + +model = dict(neck=dict(use_dcn=False)) diff --git a/object_detection/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py b/object_detection/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9eb1db748b00e3f9e8d964298a082f8e1e77c011 --- /dev/null +++ b/object_detection/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py @@ -0,0 +1,122 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='CenterNet', + backbone=dict( + type='ResNet', + depth=18, + norm_eval=False, + norm_cfg=dict(type='BN'), + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet18')), + neck=dict( + type='CTResNetNeck', + in_channel=512, + num_deconv_filters=(256, 128, 64), + num_deconv_kernels=(4, 4, 4), + use_dcn=True), + bbox_head=dict( + type='CenterNetHead', + num_classes=80, + in_channel=64, + feat_channel=64, + loss_center_heatmap=dict(type='GaussianFocalLoss', loss_weight=1.0), + loss_wh=dict(type='L1Loss', loss_weight=0.1), + loss_offset=dict(type='L1Loss', loss_weight=1.0)), + train_cfg=None, + test_cfg=dict(topk=100, local_maximum_kernel=3, max_per_img=100)) + +# We fixed the incorrect img_norm_cfg problem in the source code. +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True, color_type='color'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(512, 512), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + mean=[0, 0, 0], + std=[1, 1, 1], + to_rgb=True, + test_pad_mode=None), + dict(type='Resize', img_scale=(512, 512), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict( + type='RandomCenterCropPad', + ratios=None, + border=None, + mean=[0, 0, 0], + std=[1, 1, 1], + to_rgb=True, + test_mode=True, + test_pad_mode=['logical_or', 31], + test_pad_add_pix=1), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'flip_direction', + 'img_norm_cfg', 'border'), + keys=['img']) + ]) +] + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=16, + workers_per_gpu=4, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +# Based on the default settings of modern detectors, the SGD effect is better +# than the Adam in the source code, so we use SGD default settings and +# if you use adam+lr5e-4, the map is 29.1. +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) + +# learning policy +# Based on the default settings of modern detectors, we added warmup settings. +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 1000, + step=[18, 24]) # the real step is [18*5, 24*5] +runner = dict(max_epochs=28) # the real epoch is 28*5=140 diff --git a/object_detection/configs/centernet/metafile.yml b/object_detection/configs/centernet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..e86e57b54e51bff24f0f582f84711fad91e75a62 --- /dev/null +++ b/object_detection/configs/centernet/metafile.yml @@ -0,0 +1,46 @@ +Collections: + - Name: CenterNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x TITANXP GPUs + Architecture: + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.07850 + Title: 'Objects as Points' + README: configs/centernet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.13.0/mmdet/models/detectors/centernet.py#L10 + Version: v2.13.0 + +Models: + - Name: centernet_resnet18_dcnv2_140e_coco + In Collection: CenterNet + Config: configs/centernet/centernet_resnet18_dcnv2_140e_coco.py + Metadata: + Batch Size: 128 + Training Memory (GB): 3.47 + Epochs: 140 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 29.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210702_155131-c8cd631f.pth + + - Name: centernet_resnet18_140e_coco + In Collection: CenterNet + Config: configs/centernet/centernet_resnet18_140e_coco.py + Metadata: + Batch Size: 128 + Training Memory (GB): 3.45 + Epochs: 140 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 25.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210705_093630-bb5b3bf7.pth diff --git a/object_detection/configs/centripetalnet/README.md b/object_detection/configs/centripetalnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f3d22a57ab43450b61578ad91a6bbad75f9b1cf3 --- /dev/null +++ b/object_detection/configs/centripetalnet/README.md @@ -0,0 +1,40 @@ +# CentripetalNet: Pursuing High-quality Keypoint Pairs for Object Detection + +## Abstract + + + +Keypoint-based detectors have achieved pretty-well performance. However, incorrect keypoint matching is still widespread and greatly affects the performance of the detector. In this paper, we propose CentripetalNet which uses centripetal shift to pair corner keypoints from the same instance. CentripetalNet predicts the position and the centripetal shift of the corner points and matches corners whose shifted results are aligned. Combining position information, our approach matches corner points more accurately than the conventional embedding approaches do. Corner pooling extracts information inside the bounding boxes onto the border. To make this information more aware at the corners, we design a cross-star deformable convolution network to conduct feature adaption. Furthermore, we explore instance segmentation on anchor-free detectors by equipping our CentripetalNet with a mask prediction module. On MS-COCO test-dev, our CentripetalNet not only outperforms all existing anchor-free detectors with an AP of 48.0% but also achieves comparable performance to the state-of-the-art instance segmentation approaches with a 40.2% MaskAP. + + +
+ +
+ + + + +## Citation + + + +```latex +@InProceedings{Dong_2020_CVPR, +author = {Dong, Zhiwei and Li, Guoxuan and Liao, Yue and Wang, Fei and Ren, Pengju and Qian, Chen}, +title = {CentripetalNet: Pursuing High-Quality Keypoint Pairs for Object Detection}, +booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, +month = {June}, +year = {2020} +} +``` + +## Results and models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [16 x 6](./centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | 190/210 | 16.7 | 3.7 | 44.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804.log.json) | + +Note: + +- TTA setting is single-scale and `flip=True`. +- The model we released is the best checkpoint rather than the latest checkpoint (box AP 44.8 vs 44.6 in our experiment). diff --git a/object_detection/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py b/object_detection/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e9c5defd1cda850f9702c05a86e0671880ef5e38 --- /dev/null +++ b/object_detection/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CentripetalHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=0, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1), + loss_guiding_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=0.05), + loss_centripetal_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[190]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/object_detection/configs/centripetalnet/metafile.yml b/object_detection/configs/centripetalnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..61aed3e58571e78fe2d604dd6a4abc69f19a3988 --- /dev/null +++ b/object_detection/configs/centripetalnet/metafile.yml @@ -0,0 +1,39 @@ +Collections: + - Name: CentripetalNet + Metadata: + Training Data: COCO + Training Techniques: + - Adam + Training Resources: 16x V100 GPUs + Architecture: + - Corner Pooling + - Stacked Hourglass Network + Paper: + URL: https://arxiv.org/abs/2003.09119 + Title: 'CentripetalNet: Pursuing High-quality Keypoint Pairs for Object Detection' + README: configs/centripetalnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.5.0/mmdet/models/detectors/cornernet.py#L9 + Version: v2.5.0 + +Models: + - Name: centripetalnet_hourglass104_mstest_16x6_210e_coco + In Collection: CentripetalNet + Config: configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py + Metadata: + Batch Size: 96 + Training Memory (GB): 16.7 + inference time (ms/im): + - value: 270.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth diff --git a/object_detection/configs/cityscapes/README.md b/object_detection/configs/cityscapes/README.md new file mode 100644 index 0000000000000000000000000000000000000000..28310f15a9ec70e2b5f7d5d6962abe30560a08ea --- /dev/null +++ b/object_detection/configs/cityscapes/README.md @@ -0,0 +1,50 @@ +# The Cityscapes Dataset for Semantic Urban Scene Understanding + +## Abstract + + + +Visual understanding of complex urban street scenes is an enabling factor for a wide range of applications. Object detection has benefited enormously from large-scale datasets, especially in the context of deep learning. For semantic urban scene understanding, however, no current dataset adequately captures the complexity of real-world urban scenes. +To address this, we introduce Cityscapes, a benchmark suite and large-scale dataset to train and test approaches for pixel-level and instance-level semantic labeling. Cityscapes is comprised of a large, diverse set of stereo video sequences recorded in streets from 50 different cities. 5000 of these images have high quality pixel-level annotations; 20000 additional images have coarse annotations to enable methods that leverage large volumes of weakly-labeled data. Crucially, our effort exceeds previous attempts in terms of dataset size, annotation richness, scene variability, and complexity. Our accompanying empirical study provides an in-depth analysis of the dataset characteristics, as well as a performance evaluation of several state-of-the-art approaches based on our benchmark. + + +
+ +
+ + + + +## Citation + + + +``` +@inproceedings{Cordts2016Cityscapes, + title={The Cityscapes Dataset for Semantic Urban Scene Understanding}, + author={Cordts, Marius and Omran, Mohamed and Ramos, Sebastian and Rehfeld, Timo and Enzweiler, Markus and Benenson, Rodrigo and Franke, Uwe and Roth, Stefan and Schiele, Bernt}, + booktitle={Proc. of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2016} +} +``` + +## Common settings + +- All baselines were trained using 8 GPU with a batch size of 8 (1 images per GPU) using the [linear scaling rule](https://arxiv.org/abs/1706.02677) to scale the learning rate. +- All models were trained on `cityscapes_train`, and tested on `cityscapes_val`. +- 1x training schedule indicates 64 epochs which corresponds to slightly less than the 24k iterations reported in the original schedule from the [Mask R-CNN paper](https://arxiv.org/abs/1703.06870) +- COCO pre-trained weights are used to initialize. +- A conversion [script](../../tools/dataset_converters/cityscapes.py) is provided to convert Cityscapes into COCO format. Please refer to [install.md](../../docs/1_exist_data_model.md#prepare-datasets) for details. +- `CityscapesDataset` implemented three evaluation methods. `bbox` and `segm` are standard COCO bbox/mask AP. `cityscapes` is the cityscapes dataset official evaluation, which may be slightly higher than COCO. + +### Faster R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :---: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.2 | - | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502-829424c0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502_114915.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------: | :------------: | :----: | :-----: | :------: | :------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.3 | - | 40.9 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20201211_133733-d2858245.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20201211_133733.log.json) | diff --git a/object_detection/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py b/object_detection/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..c6da80ce85fa02a70b884bcc1900ea06d4478a38 --- /dev/null +++ b/object_detection/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_detection.py', + '../_base_/default_runtime.py' +] +model = dict( + backbone=dict(init_cfg=None), + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +runner = dict( + type='EpochBasedRunner', max_epochs=8) # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' # noqa diff --git a/object_detection/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py b/object_detection/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..679890d8a03cf1c9858ddf114fe4385a9d30c941 --- /dev/null +++ b/object_detection/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,46 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict(init_cfg=None), + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +runner = dict( + type='EpochBasedRunner', max_epochs=8) # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth' # noqa diff --git a/object_detection/configs/common/lsj_100e_coco_instance.py b/object_detection/configs/common/lsj_100e_coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..cacf23d74c344c39ed511c5d7183cee490d20ee7 --- /dev/null +++ b/object_detection/configs/common/lsj_100e_coco_instance.py @@ -0,0 +1,90 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +image_size = (1024, 1024) + +file_client_args = dict(backend='disk') +# comment out the code below to use different file client +# file_client_args = dict( +# backend='petrel', +# path_mapping=dict({ +# './data/': 's3://openmmlab/datasets/detection/', +# 'data/': 's3://openmmlab/datasets/detection/' +# })) + +train_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=image_size, + ratio_range=(0.1, 2.0), + multiscale_mode='range', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=image_size, + recompute_bbox=True, + allow_negative_crop=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(1e-2, 1e-2)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=image_size), # padding to image_size leads 0.5+ mAP + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=4, # simply change this from 2 to 16 for 50e - 400e training. + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=5, metric=['bbox', 'segm']) + +# optimizer assumes bs=64 +optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.00004) +optimizer_config = dict(grad_clip=None) + +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.067, + step=[22, 24]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/object_detection/configs/common/mstrain-poly_3x_coco_instance.py b/object_detection/configs/common/mstrain-poly_3x_coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..c22ed9457197be61ec76117568f2351575573d43 --- /dev/null +++ b/object_detection/configs/common/mstrain-poly_3x_coco_instance.py @@ -0,0 +1,80 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['bbox', 'segm']) + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/common/mstrain_3x_coco.py b/object_detection/configs/common/mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..80ec8b8dbf0f76a99395bf615b6f2a60cafdd7e5 --- /dev/null +++ b/object_detection/configs/common/mstrain_3x_coco.py @@ -0,0 +1,76 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/common/mstrain_3x_coco_instance.py b/object_detection/configs/common/mstrain_3x_coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..50f39bef3fe3c6e0f99259135745e89e000745ea --- /dev/null +++ b/object_detection/configs/common/mstrain_3x_coco_instance.py @@ -0,0 +1,76 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['bbox', 'segm']) + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/cornernet/README.md b/object_detection/configs/cornernet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d7dc08c4aa97678eca9e45d49c1e94ec93beff5e --- /dev/null +++ b/object_detection/configs/cornernet/README.md @@ -0,0 +1,47 @@ +# Cornernet: Detecting objects as paired keypoints + +## Abstract + + + +We propose CornerNet, a new approach to object detection where we detect an object bounding box as a pair of keypoints, the top-left corner and the bottom-right corner, using a single convolution neural network. By detecting objects as paired keypoints, we eliminate the need for designing a set of anchor boxes commonly used in prior single-stage detectors. In addition to our novel formulation, we introduce corner pooling, a new type of pooling layer that helps the network better localize corners. Experiments show that CornerNet achieves a 42.2% AP on MS COCO, outperforming all existing one-stage detectors. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{law2018cornernet, + title={Cornernet: Detecting objects as paired keypoints}, + author={Law, Hei and Deng, Jia}, + booktitle={15th European Conference on Computer Vision, ECCV 2018}, + pages={765--781}, + year={2018}, + organization={Springer Verlag} +} +``` + +## Results and models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [10 x 5](./cornernet_hourglass104_mstest_10x5_210e_coco.py) | 180/210 | 13.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720.log.json) | +| HourglassNet-104 | [8 x 6](./cornernet_hourglass104_mstest_8x6_210e_coco.py) | 180/210 | 15.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618-79b44c30.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618.log.json) | +| HourglassNet-104 | [32 x 3](./cornernet_hourglass104_mstest_32x3_210e_coco.py) | 180/210 | 9.5 | 3.9 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110-1efaea91.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110.log.json) | + +Note: + +- TTA setting is single-scale and `flip=True`. +- Experiments with `images_per_gpu=6` are conducted on Tesla V100-SXM2-32GB, `images_per_gpu=3` are conducted on GeForce GTX 1080 Ti. +- Here are the descriptions of each experiment setting: + - 10 x 5: 10 GPUs with 5 images per gpu. This is the same setting as that reported in the original paper. + - 8 x 6: 8 GPUs with 6 images per gpu. The total batchsize is similar to paper and only need 1 node to train. + - 32 x 3: 32 GPUs with 3 images per gpu. The default setting for 1080TI and need 4 nodes to train. diff --git a/object_detection/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py b/object_detection/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89f387641207512ae1b1c91ca56965004e5eb868 --- /dev/null +++ b/object_detection/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=5, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/object_detection/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py b/object_detection/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..873d59844f4b487a32186b0c6fd5ffea6459b373 --- /dev/null +++ b/object_detection/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=3, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/object_detection/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py b/object_detection/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ef749ccc8ddafd84da852c56821d7624a0111eb2 --- /dev/null +++ b/object_detection/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/object_detection/configs/cornernet/metafile.yml b/object_detection/configs/cornernet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..c2f6143a74a36a59c7b54531212cfc51b79636bc --- /dev/null +++ b/object_detection/configs/cornernet/metafile.yml @@ -0,0 +1,83 @@ +Collections: + - Name: CornerNet + Metadata: + Training Data: COCO + Training Techniques: + - Adam + Training Resources: 8x V100 GPUs + Architecture: + - Corner Pooling + - Stacked Hourglass Network + Paper: + URL: https://arxiv.org/abs/1808.01244 + Title: 'CornerNet: Detecting Objects as Paired Keypoints' + README: configs/cornernet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.3.0/mmdet/models/detectors/cornernet.py#L9 + Version: v2.3.0 + +Models: + - Name: cornernet_hourglass104_mstest_10x5_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py + Metadata: + Training Resources: 10x V100 GPUs + Batch Size: 50 + Training Memory (GB): 13.9 + inference time (ms/im): + - value: 238.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth + + - Name: cornernet_hourglass104_mstest_8x6_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py + Metadata: + Batch Size: 48 + Training Memory (GB): 15.9 + inference time (ms/im): + - value: 238.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618-79b44c30.pth + + - Name: cornernet_hourglass104_mstest_32x3_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py + Metadata: + Training Resources: 32x V100 GPUs + Batch Size: 96 + Training Memory (GB): 9.5 + inference time (ms/im): + - value: 256.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110-1efaea91.pth diff --git a/object_detection/configs/dcn/README.md b/object_detection/configs/dcn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d9d23f07b0668cdb4c2575b71a7b84b596c2710d --- /dev/null +++ b/object_detection/configs/dcn/README.md @@ -0,0 +1,68 @@ +# Deformable Convolutional Networks + +## Abstract + + + +Convolutional neural networks (CNNs) are inherently limited to model geometric transformations due to the fixed geometric structures in its building modules. In this work, we introduce two new modules to enhance the transformation modeling capacity of CNNs, namely, deformable convolution and deformable RoI pooling. Both are based on the idea of augmenting the spatial sampling locations in the modules with additional offsets and learning the offsets from target tasks, without additional supervision. The new modules can readily replace their plain counterparts in existing CNNs and can be easily trained end-to-end by standard back-propagation, giving rise to deformable convolutional networks. Extensive experiments validate the effectiveness of our approach on sophisticated vision tasks of object detection and semantic segmentation. + + +
+ +
+ + + + +## Citation + + + +```none +@inproceedings{dai2017deformable, + title={Deformable Convolutional Networks}, + author={Dai, Jifeng and Qi, Haozhi and Xiong, Yuwen and Li, Yi and Zhang, Guodong and Hu, Han and Wei, Yichen}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2017} +} +``` + + + +``` +@article{zhu2018deformable, + title={Deformable ConvNets v2: More Deformable, Better Results}, + author={Zhu, Xizhou and Hu, Han and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1811.11168}, + year={2018} +} +``` + +## Results and Models + +| Backbone | Model | Style | Conv | Pool | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:----------------:|:------------:|:-------:|:-------------:|:------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 4.0 | 17.8 | 41.3 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_212941.log.json) | +| R-50-FPN | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.1 | 17.6 | 41.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130-d099253b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130_222144.log.json) | +| *R-50-FPN (dg=4) | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.2 | 17.4 | 41.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130-01262257.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130_222058.log.json) | +| R-50-FPN | Faster | pytorch | - | dpool | 1x | 5.0 | 17.2 | 38.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307-90d3c01d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307_203250.log.json) | +| R-50-FPN | Faster | pytorch | - | mdpool | 1x | 5.8 | 16.6 | 38.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307-c0df27ff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307_203304.log.json) | +| R-101-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 12.5 | 42.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-1377f13d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_230019.log.json) | +| X-101-32x4d-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 7.3 | 10.0 | 44.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203-4f85c69c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203_001325.log.json) | +| R-50-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 15.4 | 41.8 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203-4d9ad43b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203_061339.log.json) | +| R-50-FPN | Mask | pytorch | mdconv(c3-c5) | - | 1x | 4.5 | 15.1 | 41.5 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203-ad97591f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203_063443.log.json) | +| R-101-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 6.5 | 11.7 | 43.5 | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216-a71f5bce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216_191601.log.json) | +| R-50-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 14.6 | 43.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-2f1fca44.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_220843.log.json) | +| R-101-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 6.4 | 11.0 | 45.0 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-3b2f0594.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_224829.log.json) | +| R-50-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 10.0 | 44.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202-42e767a2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202_010309.log.json) | +| R-101-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 8.0 | 8.6 | 45.8 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204-df0c5f10.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204_134006.log.json) | +| X-101-32x4d-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 9.2 | | 47.3 | 41.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-e75f90c8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-20200606_183737.log.json) | +| R-50-FPN (FP16) | Mask | pytorch | dconv(c3-c5) | - | 1x | 3.0 | | 41.9 | 37.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247-c06429d2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247.log.json) | +| R-50-FPN (FP16) | Mask | pytorch | mdconv(c3-c5)| - | 1x | 3.1 | | 42.0 | 37.6 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434-cf8fefa5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434.log.json) | + +**Notes:** + +- `dconv` and `mdconv` denote (modulated) deformable convolution, `c3-c5` means adding dconv in resnet stage 3 to 5. `dpool` and `mdpool` denote (modulated) deformable roi pooling. +- The dcn ops are modified from https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch, which should be more memory efficient and slightly faster. +- (*) For R-50-FPN (dg=4), dg is short for deformable_group. This model is trained and tested on Amazon EC2 p3dn.24xlarge instance. +- **Memory, Train/Inf time is outdated.** diff --git a/object_detection/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..081b998f6f54d3d805dbab38b26750a378c0d93f --- /dev/null +++ b/object_detection/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3b3683af235f46df36d8793e52c2b9c52e0defeb --- /dev/null +++ b/object_detection/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..daaa4729c8280107b19107607ec399230713cf93 --- /dev/null +++ b/object_detection/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a01df33c94e1f8b5f51a51a780b30a77ce99b2c0 --- /dev/null +++ b/object_detection/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aa664bd61c78873a74af229caa8f62feca8daa5e --- /dev/null +++ b/object_detection/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f5fee7e13cdfd531bf24d7c261e843855124f762 --- /dev/null +++ b/object_detection/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8787088f27a09a3f8fd0d05a1144c0abdedd0a21 --- /dev/null +++ b/object_detection/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py b/object_detection/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b695f0e19049dc91b7656d7684df151896b7727 --- /dev/null +++ b/object_detection/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='DeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d1bcf3c102fb660641eda2a1398db3df520caa3a --- /dev/null +++ b/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py b/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0ab89c261f970e16a9c4407620bd16a0df9e9e9 --- /dev/null +++ b/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=4, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py b/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad7b0346a63dfa3c3ca246b624155fc4fd331a3f --- /dev/null +++ b/object_detection/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='ModulatedDeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/object_detection/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e3bea1950ac8b1227b97d9eacafb208c4724f8eb --- /dev/null +++ b/object_detection/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cb340022ea27f563b8c4a570cf89b5f09e6434cd --- /dev/null +++ b/object_detection/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ababe58dc3fdfbbc6c366f48271db31bf6e2e9e2 --- /dev/null +++ b/object_detection/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee5cca7d535bc0a3e181f690a46ab42c42f1b9b1 --- /dev/null +++ b/object_detection/configs/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) + +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/dcn/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7e21454bd96e4accdf0693d5fc805622f605be7c --- /dev/null +++ b/object_detection/configs/dcn/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) + +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/object_detection/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ca2a67cde62bff078b7c4c0d696a585265e4c3a --- /dev/null +++ b/object_detection/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/dcn/metafile.yml b/object_detection/configs/dcn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..7919b8422267a7477edd8360852368336a106361 --- /dev/null +++ b/object_detection/configs/dcn/metafile.yml @@ -0,0 +1,377 @@ +Collections: + - Name: Deformable Convolutional Networks + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Deformable Convolution + Paper: + URL: https://arxiv.org/abs/1811.11168 + Title: "Deformable ConvNets v2: More Deformable, Better Results" + README: configs/dcn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/ops/dcn/deform_conv.py#L15 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 56.18 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth + + - Name: faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.1 + inference time (ms/im): + - value: 56.82 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130-d099253b.pth + + - Name: faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 57.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130-01262257.pth + + - Name: faster_rcnn_r50_fpn_dpool_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + inference time (ms/im): + - value: 58.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307-90d3c01d.pth + + - Name: faster_rcnn_r50_fpn_mdpool_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py + Metadata: + Training Memory (GB): 5.8 + inference time (ms/im): + - value: 60.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307-c0df27ff.pth + + - Name: faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 80 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-1377f13d.pth + + - Name: faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203-4f85c69c.pth + + - Name: mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 64.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203-4d9ad43b.pth + + - Name: mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 66.23 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203-ad97591f.pth + + - Name: mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py + Metadata: + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + Training Memory (GB): 3.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247-c06429d2.pth + + - Name: mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 3.1 + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434-cf8fefa5.pth + + - Name: mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216-a71f5bce.pth + + - Name: cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 68.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-2f1fca44.pth + + - Name: cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-3b2f0594.pth + + - Name: cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202-42e767a2.pth + + - Name: cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204-df0c5f10.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-e75f90c8.pth diff --git a/object_detection/configs/deepfashion/README.md b/object_detection/configs/deepfashion/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e2c042f56a90b71dd330b5f21dbb6b9b966d2dcb --- /dev/null +++ b/object_detection/configs/deepfashion/README.md @@ -0,0 +1,74 @@ +# DeepFashion: Powering Robust Clothes Recognition and Retrieval With Rich Annotations + +## Abstract + + + +Recent advances in clothes recognition have been driven by the construction of clothes datasets. Existing datasets are limited in the amount of annotations and are difficult to cope with the various challenges in real-world applications. In this work, we introduce DeepFashion, a large-scale clothes dataset with comprehensive annotations. It contains over 800,000 images, which are richly annotated with massive attributes, clothing landmarks, and correspondence of images taken under different scenarios including store, street snapshot, and consumer. Such rich annotations enable the development of powerful algorithms in clothes recognition and facilitating future researches. To demonstrate the advantages of DeepFashion, we propose a new deep model, namely FashionNet, which learns clothing features by jointly predicting clothing attributes and landmarks. The estimated landmarks are then employed to pool or gate the learned features. It is optimized in an iterative manner. Extensive experiments demonstrate the effectiveness of FashionNet and the usefulness of DeepFashion. + + +
+ +
+ + + + +## Introduction + + + +[MMFashion](https://github.com/open-mmlab/mmfashion) develops "fashion parsing and segmentation" module +based on the dataset +[DeepFashion-Inshop](https://drive.google.com/drive/folders/0B7EVK8r0v71pVDZFQXRsMDZCX1E?usp=sharing). +Its annotation follows COCO style. +To use it, you need to first download the data. Note that we only use "img_highres" in this task. +The file tree should be like this: + +```sh +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── DeepFashion +│ │ ├── In-shop +│ │ ├── Anno +│ │ │   ├── segmentation +│ │ │   | ├── DeepFashion_segmentation_train.json +│ │ │   | ├── DeepFashion_segmentation_query.json +│ │ │   | ├── DeepFashion_segmentation_gallery.json +│ │ │   ├── list_bbox_inshop.txt +│ │ │   ├── list_description_inshop.json +│ │ │   ├── list_item_inshop.txt +│ │ │   └── list_landmarks_inshop.txt +│ │ ├── Eval +│ │ │ └── list_eval_partition.txt +│ │ ├── Img +│ │ │ ├── img +│ │ │ │ ├──XXX.jpg +│ │ │ ├── img_highres +│ │ │ └── ├──XXX.jpg + +``` + +After that you can train the Mask RCNN r50 on DeepFashion-In-shop dataset by launching training with the `mask_rcnn_r50_fpn_1x.py` config +or creating your own config file. + +## Citation + +``` +@inproceedings{liuLQWTcvpr16DeepFashion, + author = {Liu, Ziwei and Luo, Ping and Qiu, Shi and Wang, Xiaogang and Tang, Xiaoou}, + title = {DeepFashion: Powering Robust Clothes Recognition and Retrieval with Rich Annotations}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2016} +} +``` + +## Model Zoo + +| Backbone | Model type | Dataset | bbox detection Average Precision | segmentation Average Precision | Config | Download (Google) | +| :---------: | :----------: | :-----------------: | :--------------------------------: | :----------------------------: | :---------:| :-------------------------: | +| ResNet50 | Mask RCNN | DeepFashion-In-shop | 0.599 | 0.584 |[config](https://github.com/open-mmlab/mmdetection/blob/master/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion/mask_rcnn_r50_fpn_15e_deepfashion_20200329_192752.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion/20200329_192752.log.json) | diff --git a/object_detection/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py b/object_detection/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..c4e86387e3ce4aad3dd68d7613160fced4d3785b --- /dev/null +++ b/object_detection/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py @@ -0,0 +1,10 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/deepfashion.py', '../_base_/schedules/schedule_1x.py', + '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=15), mask_head=dict(num_classes=15))) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=15) diff --git a/object_detection/configs/deformable_detr/README.md b/object_detection/configs/deformable_detr/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e3b8e41d27c74e7ce81317ee346327f073563328 --- /dev/null +++ b/object_detection/configs/deformable_detr/README.md @@ -0,0 +1,45 @@ +# Deformable DETR: Deformable Transformers for End-to-End Object Detection + +## Abstract + + + +DETR has been recently proposed to eliminate the need for many hand-designed components in object detection while demonstrating good performance. However, it suffers from slow convergence and limited feature spatial resolution, due to the limitation of Transformer attention modules in processing image feature maps. To mitigate these issues, we proposed Deformable DETR, whose attention modules only attend to a small set of key sampling points around a reference. Deformable DETR can achieve better performance than DETR (especially on small objects) with 10 times less training epochs. Extensive experiments on the COCO benchmark demonstrate the effectiveness of our approach. + + +
+ +
+ + + + +## Citation + + + +We provide the config files for Deformable DETR: [Deformable DETR: Deformable Transformers for End-to-End Object Detection](https://arxiv.org/abs/2010.04159). + +``` +@inproceedings{ +zhu2021deformable, +title={Deformable DETR: Deformable Transformers for End-to-End Object Detection}, +author={Xizhou Zhu and Weijie Su and Lewei Lu and Bin Li and Xiaogang Wang and Jifeng Dai}, +booktitle={International Conference on Learning Representations}, +year={2021}, +url={https://openreview.net/forum?id=gZ9hCDWe6ke} +} +``` + +## Results and Models + +| Backbone | Model | Lr schd | box AP | Config | Download | +|:------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | Deformable DETR |50e | 44.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.log.json) | +| R-50 | + iterative bounding box refinement |50e | 46.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.log.json) | +| R-50 | ++ two-stage Deformable DETR |50e | 46.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.log.json) | + +# NOTE + +1. All models are trained with batch size 32. +2. The performance is unstable. `Deformable DETR` and `iterative bounding box refinement` may fluctuate about 0.3 mAP. `two-stage Deformable DETR` may fluctuate about 0.2 mAP. diff --git a/object_detection/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py b/object_detection/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a58d9a241bc45f2ca57f817789894502c010a16 --- /dev/null +++ b/object_detection/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py @@ -0,0 +1,172 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +model = dict( + type='DeformableDETR', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='ChannelMapper', + in_channels=[512, 1024, 2048], + kernel_size=1, + out_channels=256, + act_cfg=None, + norm_cfg=dict(type='GN', num_groups=32), + num_outs=4), + bbox_head=dict( + type='DeformableDETRHead', + num_query=300, + num_classes=80, + in_channels=2048, + sync_cls_avg_factor=True, + as_two_stage=False, + transformer=dict( + type='DeformableDetrTransformer', + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', embed_dims=256), + feedforward_channels=1024, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DeformableDetrTransformerDecoder', + num_layers=6, + return_intermediate=True, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + dict( + type='MultiScaleDeformableAttention', + embed_dims=256) + ], + feedforward_channels=1024, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')))), + positional_encoding=dict( + type='SinePositionalEncoding', + num_feats=128, + normalize=True, + offset=-0.5), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=2.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='FocalLossCost', weight=2.0), + reg_cost=dict(type='BBoxL1Cost', weight=5.0, box_format='xywh'), + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=2.0))), + test_cfg=dict(max_per_img=100)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different +# from the default setting in mmdet. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[ + [ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + # The radio of all image in train dataset < 7 + # follow the original impl + img_scale=[(400, 4200), (500, 4200), (600, 4200)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +# test_pipeline, NOTE the Pad's size_divisor is different from the default +# setting (size_divisor=32). While there is little effect on the performance +# whether we use the default setting or use size_divisor=1. +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(filter_empty_gt=False, pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='AdamW', + lr=2e-4, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={ + 'backbone': dict(lr_mult=0.1), + 'sampling_offsets': dict(lr_mult=0.1), + 'reference_points': dict(lr_mult=0.1) + })) +optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[40]) +runner = dict(type='EpochBasedRunner', max_epochs=50) diff --git a/object_detection/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py b/object_detection/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..01f13df4886558366625bc4f3a367cb8a5154462 --- /dev/null +++ b/object_detection/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py @@ -0,0 +1,2 @@ +_base_ = 'deformable_detr_r50_16x2_50e_coco.py' +model = dict(bbox_head=dict(with_box_refine=True)) diff --git a/object_detection/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py b/object_detection/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2aa840d9e961f62307f05e8dde2d8520edef8cad --- /dev/null +++ b/object_detection/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py @@ -0,0 +1,2 @@ +_base_ = 'deformable_detr_refine_r50_16x2_50e_coco.py' +model = dict(bbox_head=dict(as_two_stage=True)) diff --git a/object_detection/configs/deformable_detr/metafile.yml b/object_detection/configs/deformable_detr/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..873292db7bc7da32fa6acab9fa8beef7fe1b2266 --- /dev/null +++ b/object_detection/configs/deformable_detr/metafile.yml @@ -0,0 +1,56 @@ +Collections: + - Name: Deformable DETR + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Multi Scale Train + - Gradient Clip + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - Transformer + Paper: + URL: https://openreview.net/forum?id=gZ9hCDWe6ke + Title: 'Deformable DETR: Deformable Transformers for End-to-End Object Detection' + README: configs/deformable_detr/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.12.0/mmdet/models/detectors/deformable_detr.py#L6 + Version: v2.12.0 + +Models: + - Name: deformable_detr_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth + + - Name: deformable_detr_refine_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.pth + + - Name: deformable_detr_twostage_refine_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.pth diff --git a/object_detection/configs/detectors/README.md b/object_detection/configs/detectors/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c90302b2f6c9e55d934cbbc6581b868ef014a918 --- /dev/null +++ b/object_detection/configs/detectors/README.md @@ -0,0 +1,73 @@ +# DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution + +## Abstract + + + +Many modern object detectors demonstrate outstanding performances by using the mechanism of looking and thinking twice. In this paper, we explore this mechanism in the backbone design for object detection. At the macro level, we propose Recursive Feature Pyramid, which incorporates extra feedback connections from Feature Pyramid Networks into the bottom-up backbone layers. At the micro level, we propose Switchable Atrous Convolution, which convolves the features with different atrous rates and gathers the results using switch functions. Combining them results in DetectoRS, which significantly improves the performances of object detection. On COCO test-dev, DetectoRS achieves state-of-the-art 55.7% box AP for object detection, 48.5% mask AP for instance segmentation, and 50.0% PQ for panoptic segmentation. + + +
+ +
+ + + + +## Citation + + + +We provide the config files for [DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution](https://arxiv.org/pdf/2006.02334.pdf). + +```BibTeX +@article{qiao2020detectors, + title={DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution}, + author={Qiao, Siyuan and Chen, Liang-Chieh and Yuille, Alan}, + journal={arXiv preprint arXiv:2006.02334}, + year={2020} +} +``` + +## Dataset + +DetectoRS requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +DetectoRS includes two major components: + +- Recursive Feature Pyramid (RFP). +- Switchable Atrous Convolution (SAC). + +They can be used independently. +Combining them together results in DetectoRS. +The results on COCO 2017 val are shown in the below table. + +| Method | Detector | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| RFP | Cascade + ResNet-50 | 1x | 7.5 | - | 44.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco-8cf51bfd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco_20200624_104126.log.json) | +| SAC | Cascade + ResNet-50 | 1x | 5.6 | - | 45.0| | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco-24bfda62.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco_20200624_104402.log.json) | +| DetectoRS | Cascade + ResNet-50 | 1x | 9.9 | - | 47.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco-32a10ba0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco_20200706_001203.log.json) | +| RFP | HTC + ResNet-50 | 1x | 11.2 | - | 46.6 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_rfp_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco-8ff87c51.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco_20200624_103053.log.json) | +| SAC | HTC + ResNet-50 | 1x | 9.3 | - | 46.4 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_sac_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco-bfa60c54.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco_20200624_103111.log.json) | +| DetectoRS | HTC + ResNet-50 | 1x | 13.6 | - | 49.1 | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_htc_r50_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco-329b1453.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco_20200624_103659.log.json) | +| DetectoRS | HTC + ResNet-101 | 20e | 19.6 | | 50.5 | 43.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_htc_r101_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r101_20e_coco/detectors_htc_r101_20e_coco_20210419_203638-348d533b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r101_20e_coco/detectors_htc_r101_20e_coco_20210419_203638.log.json) | + +*Note*: This is a re-implementation based on MMDetection-V2. +The original implementation is based on MMDetection-V1. diff --git a/object_detection/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py b/object_detection/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4430d8a677e48f84552eb23403bc874c56bda506 --- /dev/null +++ b/object_detection/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/object_detection/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py b/object_detection/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ccd9319b2d1badebf3b891c8e3bdd55a435a4b7c --- /dev/null +++ b/object_detection/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/object_detection/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py b/object_detection/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f76040434f1ff07608c83202f779dfacfe91c323 --- /dev/null +++ b/object_detection/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/object_detection/configs/detectors/detectors_htc_r101_20e_coco.py b/object_detection/configs/detectors/detectors_htc_r101_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..93d7d2b1adeb3fbdb7bac0107edf4433669e8015 --- /dev/null +++ b/object_detection/configs/detectors/detectors_htc_r101_20e_coco.py @@ -0,0 +1,28 @@ +_base_ = '../htc/htc_r101_fpn_20e_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet101', + style='pytorch'))) diff --git a/object_detection/configs/detectors/detectors_htc_r50_1x_coco.py b/object_detection/configs/detectors/detectors_htc_r50_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0d2fc4f77fcca715c1dfb613306d214b636aa0c0 --- /dev/null +++ b/object_detection/configs/detectors/detectors_htc_r50_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/object_detection/configs/detectors/htc_r50_rfp_1x_coco.py b/object_detection/configs/detectors/htc_r50_rfp_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..496104e12550a1985f9c9e3748a343f69d7df6d8 --- /dev/null +++ b/object_detection/configs/detectors/htc_r50_rfp_1x_coco.py @@ -0,0 +1,24 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/object_detection/configs/detectors/htc_r50_sac_1x_coco.py b/object_detection/configs/detectors/htc_r50_sac_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..72d4db963ffd95851b945911b3db9941426583ab --- /dev/null +++ b/object_detection/configs/detectors/htc_r50_sac_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/object_detection/configs/detectors/metafile.yml b/object_detection/configs/detectors/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..4bed56949e738108b7f3479ee1e4a5447d45701f --- /dev/null +++ b/object_detection/configs/detectors/metafile.yml @@ -0,0 +1,114 @@ +Collections: + - Name: DetectoRS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ASPP + - FPN + - RFP + - RPN + - ResNet + - RoIAlign + - SAC + Paper: + URL: https://arxiv.org/abs/2006.02334 + Title: 'DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution' + README: configs/detectors/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/backbones/detectors_resnet.py#L205 + Version: v2.2.0 + +Models: + - Name: cascade_rcnn_r50_rfp_1x_coco + In Collection: DetectoRS + Config: configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py + Metadata: + Training Memory (GB): 7.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco-8cf51bfd.pth + + - Name: cascade_rcnn_r50_sac_1x_coco + In Collection: DetectoRS + Config: configs/detectors/cascade_rcnn_r50_sac_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco-24bfda62.pth + + - Name: detectors_cascade_rcnn_r50_1x_coco + In Collection: DetectoRS + Config: configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py + Metadata: + Training Memory (GB): 9.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco-32a10ba0.pth + + - Name: htc_r50_rfp_1x_coco + In Collection: DetectoRS + Config: configs/detectors/htc_r50_rfp_1x_coco.py + Metadata: + Training Memory (GB): 11.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco-8ff87c51.pth + + - Name: htc_r50_sac_1x_coco + In Collection: DetectoRS + Config: configs/detectors/htc_r50_sac_1x_coco.py + Metadata: + Training Memory (GB): 9.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco-bfa60c54.pth + + - Name: detectors_htc_r50_1x_coco + In Collection: DetectoRS + Config: configs/detectors/detectors_htc_r50_1x_coco.py + Metadata: + Training Memory (GB): 13.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco-329b1453.pth diff --git a/object_detection/configs/detr/README.md b/object_detection/configs/detr/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a04e7ab7a949acbc84e773c37f73f9c49156a86c --- /dev/null +++ b/object_detection/configs/detr/README.md @@ -0,0 +1,41 @@ +# End-to-End Object Detection with Transformers + +## Abstract + + + +We present a new method that views object detection as a direct set prediction problem. Our approach streamlines the detection pipeline, effectively removing the need for many hand-designed components like a non-maximum suppression procedure or anchor generation that explicitly encode our prior knowledge about the task. The main ingredients of the new framework, called DEtection TRansformer or DETR, are a set-based global loss that forces unique predictions via bipartite matching, and a transformer encoder-decoder architecture. Given a fixed small set of learned object queries, DETR reasons about the relations of the objects and the global image context to directly output the final set of predictions in parallel. The new model is conceptually simple and does not require a specialized library, unlike many other modern detectors. DETR demonstrates accuracy and run-time performance on par with the well-established and highly-optimized Faster RCNN baseline on the challenging COCO object detection dataset. Moreover, DETR can be easily generalized to produce panoptic segmentation in a unified manner. We show that it significantly outperforms competitive baselines. + + +
+ +
+ + + + +## Citation + + + +We provide the config files for DETR: [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872). + +```BibTeX +@inproceedings{detr, + author = {Nicolas Carion and + Francisco Massa and + Gabriel Synnaeve and + Nicolas Usunier and + Alexander Kirillov and + Sergey Zagoruyko}, + title = {End-to-End Object Detection with Transformers}, + booktitle = {ECCV}, + year = {2020} +} +``` + +## Results and Models + +| Backbone | Model | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | DETR |150e |7.9| | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detr/detr_r50_8x2_150e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835.log.json) | diff --git a/object_detection/configs/detr/detr_r50_8x2_150e_coco.py b/object_detection/configs/detr/detr_r50_8x2_150e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..892447dec15f3ac0411c5b8d36725b84a40ecfec --- /dev/null +++ b/object_detection/configs/detr/detr_r50_8x2_150e_coco.py @@ -0,0 +1,150 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +model = dict( + type='DETR', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(3, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + bbox_head=dict( + type='DETRHead', + num_classes=80, + in_channels=2048, + transformer=dict( + type='Transformer', + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1) + ], + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')), + )), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + loss_cls=dict( + type='CrossEntropyLoss', + bg_cls_weight=0.1, + use_sigmoid=False, + loss_weight=1.0, + class_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=1.), + reg_cost=dict(type='BBoxL1Cost', weight=5.0, box_format='xywh'), + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=2.0))), + test_cfg=dict(max_per_img=100)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different +# from the default setting in mmdet. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +# test_pipeline, NOTE the Pad's size_divisor is different from the default +# setting (size_divisor=32). While there is little effect on the performance +# whether we use the default setting or use size_divisor=1. +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='AdamW', + lr=0.0001, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={'backbone': dict(lr_mult=0.1, decay_mult=1.0)})) +optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[100]) +runner = dict(type='EpochBasedRunner', max_epochs=150) diff --git a/object_detection/configs/detr/metafile.yml b/object_detection/configs/detr/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..45622cf9152c6461cd5924605de87c763009f491 --- /dev/null +++ b/object_detection/configs/detr/metafile.yml @@ -0,0 +1,33 @@ +Collections: + - Name: DETR + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Multi Scale Train + - Gradient Clip + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - Transformer + Paper: + URL: https://arxiv.org/abs/2005.12872 + Title: 'End-to-End Object Detection with Transformers' + README: configs/detr/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.7.0/mmdet/models/detectors/detr.py#L7 + Version: v2.7.0 + +Models: + - Name: detr_r50_8x2_150e_coco + In Collection: DETR + Config: configs/detr/detr_r50_8x2_150e_coco.py + Metadata: + Training Memory (GB): 7.9 + Epochs: 150 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth diff --git a/object_detection/configs/double_heads/README.md b/object_detection/configs/double_heads/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8191d7ab29e5e29ba727f130a71869e50d6ab83e --- /dev/null +++ b/object_detection/configs/double_heads/README.md @@ -0,0 +1,36 @@ +# Rethinking Classification and Localization for Object Detection + +## Abstract + + + +Two head structures (i.e. fully connected head and convolution head) have been widely used in R-CNN based detectors for classification and localization tasks. However, there is a lack of understanding of how does these two head structures work for these two tasks. To address this issue, we perform a thorough analysis and find an interesting fact that the two head structures have opposite preferences towards the two tasks. Specifically, the fully connected head (fc-head) is more suitable for the classification task, while the convolution head (conv-head) is more suitable for the localization task. Furthermore, we examine the output feature maps of both heads and find that fc-head has more spatial sensitivity than conv-head. Thus, fc-head has more capability to distinguish a complete object from part of an object, but is not robust to regress the whole object. Based upon these findings, we propose a Double-Head method, which has a fully connected head focusing on classification and a convolution head for bounding box regression. Without bells and whistles, our method gains +3.5 and +2.8 AP on MS COCO dataset from Feature Pyramid Network (FPN) baselines with ResNet-50 and ResNet-101 backbones, respectively. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{wu2019rethinking, + title={Rethinking Classification and Localization for Object Detection}, + author={Yue Wu and Yinpeng Chen and Lu Yuan and Zicheng Liu and Lijuan Wang and Hongzhi Li and Yun Fu}, + year={2019}, + eprint={1904.06493}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 6.8 | 9.5 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130_220238.log.json) | diff --git a/object_detection/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9b8118b4b633c78120c370f877f47e951c2fdb38 --- /dev/null +++ b/object_detection/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DoubleHeadRoIHead', + reg_roi_scale_factor=1.3, + bbox_head=dict( + _delete_=True, + type='DoubleConvFCBBoxHead', + num_convs=4, + num_fcs=2, + in_channels=256, + conv_out_channels=1024, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=2.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=2.0)))) diff --git a/object_detection/configs/double_heads/metafile.yml b/object_detection/configs/double_heads/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..6fe9b7af952d8d5e1d221862ee3f1098a547355e --- /dev/null +++ b/object_detection/configs/double_heads/metafile.yml @@ -0,0 +1,41 @@ +Collections: + - Name: Rethinking Classification and Localization for Object Detection + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/pdf/1904.06493 + Title: 'Rethinking Classification and Localization for Object Detection' + README: configs/double_heads/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/roi_heads/double_roi_head.py#L6 + Version: v2.0.0 + +Models: + - Name: dh_faster_rcnn_r50_fpn_1x_coco + In Collection: Rethinking Classification and Localization for Object Detection + Config: configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.8 + inference time (ms/im): + - value: 105.26 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth diff --git a/object_detection/configs/dynamic_rcnn/README.md b/object_detection/configs/dynamic_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d79b181ed619b4096432254f4f069f9a29cb496f --- /dev/null +++ b/object_detection/configs/dynamic_rcnn/README.md @@ -0,0 +1,34 @@ +# Dynamic R-CNN: Towards High Quality Object Detection via Dynamic Training + +## Abstract + + + +Although two-stage object detectors have continuously advanced the state-of-the-art performance in recent years, the training process itself is far from crystal. In this work, we first point out the inconsistency problem between the fixed network settings and the dynamic training procedure, which greatly affects the performance. For example, the fixed label assignment strategy and regression loss function cannot fit the distribution change of proposals and thus are harmful to training high quality detectors. Consequently, we propose Dynamic R-CNN to adjust the label assignment criteria (IoU threshold) and the shape of regression loss function (parameters of SmoothL1 Loss) automatically based on the statistics of proposals during training. This dynamic design makes better use of the training samples and pushes the detector to fit more high quality samples. Specifically, our method improves upon ResNet-50-FPN baseline with 1.9% AP and 5.5% AP90 on the MS COCO dataset with no extra overhead. + + +
+ +
+ + + + +## Citation + + + +``` +@article{DynamicRCNN, + author = {Hongkai Zhang and Hong Chang and Bingpeng Ma and Naiyan Wang and Xilin Chen}, + title = {Dynamic {R-CNN}: Towards High Quality Object Detection via Dynamic Training}, + journal = {arXiv preprint arXiv:2004.06002}, + year = {2020} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.8 | | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x-62a3f276.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x_20200618_095048.log.json) | diff --git a/object_detection/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f2deb99e44cba92fd79d0a2cd258ddf6927703c0 --- /dev/null +++ b/object_detection/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DynamicRoIHead', + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict(nms=dict(iou_threshold=0.85)), + rcnn=dict( + dynamic_rcnn=dict( + iou_topk=75, + beta_topk=10, + update_iter_interval=100, + initial_iou=0.4, + initial_beta=1.0))), + test_cfg=dict(rpn=dict(nms=dict(iou_threshold=0.85)))) diff --git a/object_detection/configs/dynamic_rcnn/metafile.yml b/object_detection/configs/dynamic_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..fec43db44da50bacdfe5aac29518f87194dd4e75 --- /dev/null +++ b/object_detection/configs/dynamic_rcnn/metafile.yml @@ -0,0 +1,35 @@ +Collections: + - Name: Dynamic R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Dynamic R-CNN + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/pdf/2004.06002 + Title: 'Dynamic R-CNN: Towards High Quality Object Detection via Dynamic Training' + README: configs/dynamic_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/roi_heads/dynamic_roi_head.py#L11 + Version: v2.2.0 + +Models: + - Name: dynamic_rcnn_r50_fpn_1x_coco + In Collection: Dynamic R-CNN + Config: configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x-62a3f276.pth diff --git a/object_detection/configs/empirical_attention/README.md b/object_detection/configs/empirical_attention/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8937be26e02e87ad80dfc4d12859ddddec8573e8 --- /dev/null +++ b/object_detection/configs/empirical_attention/README.md @@ -0,0 +1,37 @@ +# An Empirical Study of Spatial Attention Mechanisms in Deep Networks + +## Abstract + + + +Attention mechanisms have become a popular component in deep neural networks, yet there has been little examination of how different influencing factors and methods for computing attention from these factors affect performance. Toward a better general understanding of attention mechanisms, we present an empirical study that ablates various spatial attention elements within a generalized attention formulation, encompassing the dominant Transformer attention as well as the prevalent deformable convolution and dynamic convolution modules. Conducted on a variety of applications, the study yields significant findings about spatial attention in deep networks, some of which run counter to conventional understanding. For example, we find that the query and key content comparison in Transformer attention is negligible for self-attention, but vital for encoder-decoder attention. A proper combination of deformable convolution with key content only saliency achieves the best accuracy-efficiency tradeoff in self-attention. Our results suggest that there exists much room for improvement in the design of attention mechanisms. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{zhu2019empirical, + title={An Empirical Study of Spatial Attention Mechanisms in Deep Networks}, + author={Zhu, Xizhou and Cheng, Dazhi and Zhang, Zheng and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1904.05873}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Attention Component | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------------:|:----:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 1111 | N | 1x | 8.0 | 13.8 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130_210344.log.json) | +| R-50 | 0010 | N | 1x | 4.2 | 18.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130-7cb0c14d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130_210125.log.json) | +| R-50 | 1111 | Y | 1x | 8.0 | 12.7 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130-8b2523a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130_204442.log.json) | +| R-50 | 0010 | Y | 1x | 4.2 | 17.1 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130-1a2e831d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130_210410.log.json) | diff --git a/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a544e3ab636aea0efe56007a0ea40608b6e71ad4 --- /dev/null +++ b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bbefd27aa02f427e27068b37ecf4d30fbd49b519 --- /dev/null +++ b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..13a4645bfdb50d5a2f04cee49ecc5f7647d10acf --- /dev/null +++ b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1f26c081da27811f856fe9973eb444c82604727 --- /dev/null +++ b/object_detection/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/object_detection/configs/empirical_attention/metafile.yml b/object_detection/configs/empirical_attention/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..923bcb20d926353d5aedae9e08f5a099c19313f0 --- /dev/null +++ b/object_detection/configs/empirical_attention/metafile.yml @@ -0,0 +1,103 @@ +Collections: + - Name: Empirical Attention + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Deformable Convolution + - FPN + - RPN + - ResNet + - RoIAlign + - Spatial Attention + Paper: + URL: https://arxiv.org/pdf/1904.05873 + Title: 'An Empirical Study of Spatial Attention Mechanisms in Deep Networks' + README: configs/empirical_attention/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/ops/generalized_attention.py#L10 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_attention_1111_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth + + - Name: faster_rcnn_r50_fpn_attention_0010_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 54.35 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130-7cb0c14d.pth + + - Name: faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 78.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130-8b2523a6.pth + + - Name: faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 58.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130-1a2e831d.pth diff --git a/object_detection/configs/fast_rcnn/README.md b/object_detection/configs/fast_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0e98c1327fc8a24eeb1ac3a45e1b97a4d0f8104a --- /dev/null +++ b/object_detection/configs/fast_rcnn/README.md @@ -0,0 +1,30 @@ +# Fast R-CNN + +## Abstract + + + +This paper proposes a Fast Region-based Convolutional Network method (Fast R-CNN) for object detection. Fast R-CNN builds on previous work to efficiently classify object proposals using deep convolutional networks. Compared to previous work, Fast R-CNN employs several innovations to improve training and testing speed while also increasing detection accuracy. Fast R-CNN trains the very deep VGG16 network 9x faster than R-CNN, is 213x faster at test-time, and achieves a higher mAP on PASCAL VOC 2012. Compared to SPPnet, Fast R-CNN trains VGG16 3x faster, tests 10x faster, and is more accurate. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{girshick2015fast, + title={Fast r-cnn}, + author={Girshick, Ross}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2015} +} +``` + +## Results and models diff --git a/object_detection/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3ab8e98104de5af7b5b99e7cb03995736e9ac5a4 --- /dev/null +++ b/object_detection/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './fast_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..83852b24e7c8d23f812733f7b2fd24fc0d0f38f8 --- /dev/null +++ b/object_detection/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py b/object_detection/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c22088579ea4a5b2d8e32a8349da63d2dc8b5f7f --- /dev/null +++ b/object_detection/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fast_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f1b29ef30c7662d821921851c994d7ea78aeca34 --- /dev/null +++ b/object_detection/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,48 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + norm_cfg=dict(type='BN', requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d2f080e9d3b1ddade22341aa38c6258eaee78a50 --- /dev/null +++ b/object_detection/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/fast_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) diff --git a/object_detection/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py b/object_detection/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..228e85645c1c7d1556810d209679d49abcd86f8f --- /dev/null +++ b/object_detection/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/faster_rcnn/README.md b/object_detection/configs/faster_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3885c2ab70ca2555cdca6ad2c732014a2b6b298a --- /dev/null +++ b/object_detection/configs/faster_rcnn/README.md @@ -0,0 +1,81 @@ +# Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks + +## Abstract + + + +State-of-the-art object detection networks depend on region proposal algorithms to hypothesize object locations. Advances like SPPnet and Fast R-CNN have reduced the running time of these detection networks, exposing region proposal computation as a bottleneck. In this work, we introduce a Region Proposal Network (RPN) that shares full-image convolutional features with the detection network, thus enabling nearly cost-free region proposals. An RPN is a fully convolutional network that simultaneously predicts object bounds and objectness scores at each position. The RPN is trained end-to-end to generate high-quality region proposals, which are used by Fast R-CNN for detection. We further merge RPN and Fast R-CNN into a single network by sharing their convolutional features---using the recently popular terminology of neural networks with 'attention' mechanisms, the RPN component tells the unified network where to look. For the very deep VGG-16 model, our detection system has a frame rate of 5fps (including all steps) on a GPU, while achieving state-of-the-art object detection accuracy on PASCAL VOC 2007, 2012, and MS COCO datasets with only 300 proposals per image. In ILSVRC and COCO 2015 competitions, Faster R-CNN and RPN are the foundations of the 1st-place winning entries in several tracks. + + +
+ +
+ + + + +## Citation + + +```latex +@article{Ren_2017, + title={Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Ren, Shaoqing and He, Kaiming and Girshick, Ross and Sun, Jian}, + year={2017}, + month={Jun}, +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-DC5 | caffe | 1x | - | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909-531f0f43.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909.log.json) | +| R-50-FPN | caffe | 1x | 3.8 | | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.378_20200504_180032-c5925ee5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_20200504_180032.log.json) | +| R-50-FPN | pytorch | 1x | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN (FP16) | pytorch | 1x | 3.4 | 28.8 | 37.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204-d4dc1471.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204_143530.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_20200504_210434.log.json) | +| R-101-FPN | caffe | 1x | 5.7 | | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.398_20200504_180057-b269e9dd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_20200504_180057.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 15.6 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130-f513f705.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130_204655.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_bbox_mAP-0.398_20200504_210455-1d2dac9c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_20200504_210455.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 13.8 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203-cff10310.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203_000520.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.412_20200506_041400-64a12c0b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_20200506_041400.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 9.4 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204_134340.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033-5961fa95.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033.log.json) | + +## Different regression loss + +We trained with R-50-FPN pytorch style backbone for 1x schedule. + +| Backbone | Loss type | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-------: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | L1Loss | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | IoULoss | | | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco-fdd207f3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco_20200506_095954.log.json) | +| R-50-FPN | GIoULoss | | | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco-0eada910.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco_20200505_161120.log.json) | +| R-50-FPN | BoundedIoULoss | | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco-98ad993b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco_20200505_160738.log.json) | + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | caffe | 1x | - | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851-b33d21b9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851.log.json) | +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | caffe | 3x | - | | 38.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107-34a53b2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py) | caffe | 2x | 3.7 | | 39.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_bbox_mAP-0.397_20200504_231813-10b2de58.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_20200504_231813.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | caffe | 3x | 3.7 | | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054-1f77628b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054.log.json) | +| [R-50-FPN](./faster_rcnn_r50_fpn_mstrain_3x_coco.py) | pytorch | 3x | 3.9 | | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822-e10bd31c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822.log.json) | +| [R-101-FPN](./faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | caffe | 3x | 5.6 | | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742-a7ae426d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742.log.json) | +| [R-101-FPN](./faster_rcnn_r101_fpn_mstrain_3x_coco.py) | pytorch | 3x | 5.8 | | 41.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822-4d4d2ca8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822.log.json) | +| [X-101-32x4d-FPN](./faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 7.0 | | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151-16b9b260.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151.log.json) | +| [X-101-32x8d-FPN](./faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 10.1 | | 42.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954-002e082a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954.log.json) | +| [X-101-64x4d-FPN](./faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 10.0 | | 43.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528-26c63de6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528.log.json) | + +We further finetune some pre-trained models on the COCO subsets, which contain only a few of the 80 categories. + +| Backbone | Style | Class name | Pre-trained model | Mem (GB) | box AP | Config | Download | +| ------------------------------------------------------------ | ----- | ------------------ | ------------------------------------------------------------ | -------- | ------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) | caffe | person | [R-50-FPN-Caffe-3x](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | 3.7 | 55.8 | [config](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person/faster_rcnn_r50_fpn_1x_coco-person_20201216_175929-d022e227.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person/faster_rcnn_r50_fpn_1x_coco-person_20201216_175929.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py) | caffe | person-bicycle-car | [R-50-FPN-Caffe-3x](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | 3.7 | 44.1 | [config](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car_20201216_173117-6eda6d92.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car_20201216_173117.log.json) | diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c6f078c771d7b7188a2d66ae73b56206c3e84a95 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6a13fe9ff692d18927f9ada0604e675b2cd0bea9 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,49 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1de53a6cdfcd64541c2ddf0f4f699b7f8d003029 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0d41599430ae5ca371969076c6d53706ae92e975 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0b498bb687c6d3ac941061584aeba3653df97fe1 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..92344a151be9af53659845b51e4ece7f0a7b636f --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee2010c64a4c24e18b81c0be7e002ea474c57a44 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..14eaef2dffea606027001b69d12d11cb46693e1c --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..403747f127e0f7a301771e53e75bf0e83a1736c9 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..56c01bdcf55cbbb18b7519a46c9b8ce18797011a --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_90k_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_90k_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b5aea6a7275c651b65654893957a4e3312ceb293 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_90k_coco.py @@ -0,0 +1,15 @@ +_base_ = 'faster_rcnn_r50_caffe_fpn_1x_coco.py' + +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[60000, 80000]) + +# Runner type +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=90000) + +checkpoint_config = dict(interval=10000) +evaluation = dict(interval=10000, metric='bbox') diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py new file mode 100644 index 0000000000000000000000000000000000000000..4f1f376c33a0ad884a8930833c6205339966f82b --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py @@ -0,0 +1,9 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=3))) +classes = ('person', 'bicycle', 'car') +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) + +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_bbox_mAP-0.398_20200504_163323-30042637.pth' # noqa diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py new file mode 100644 index 0000000000000000000000000000000000000000..b5dfb4fe447472b2fabb7d193778dbf2fbf2ce25 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py @@ -0,0 +1,9 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=1))) +classes = ('person', ) +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) + +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_bbox_mAP-0.398_20200504_163323-30042637.pth' # noqa diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f807a19abce803dd99f82c5d1c4cec502d16253f --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,46 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..df58973fc009949d37e8a87e4d3ac39e2c313c65 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9eeaaceaf5e7533105f83b736ca7ce454159aedb --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,47 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..74dca24f26422967501e7ba31c3f39ca324e031c --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py @@ -0,0 +1,15 @@ +_base_ = 'faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' + +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[60000, 80000]) + +# Runner type +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=90000) + +checkpoint_config = dict(interval=10000) +evaluation = dict(interval=10000, metric='bbox') diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..009bd93d06b3284c7b31f33f82d636f774e86b74 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e77a7fa8d6b8c1ad7fe293bc932d621464287e0c --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..648081f19ca7d3ca9a7362a4a41e514d753ce4e8 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='BoundedIoULoss', loss_weight=10.0)))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_ciou_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_ciou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..886d5668c3df24a24baa112a3b0aefb15face892 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_ciou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='CIoULoss', loss_weight=12.0)))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..acd4040c979b2b83e456e5b2f58b9f4514af972a --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5556c4977e221182b013b68fef4b73d1b0605bf3 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ddf663e4f0e1525490a493674b32b3dc4c781bb2 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='IoULoss', loss_weight=10.0)))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..faf8f92437d839eda456187a29827907a5a9532b --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,3 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f897e7c55c8b8f0ef7a5db92f29ef1c2415965db --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict(train_cfg=dict(rcnn=dict(sampler=dict(type='OHEMSampler')))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..759ae3a7acec07daa75213835f1bc41d5c6de4a5 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + test_cfg=dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='soft_nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3808c9f2870d632feae36e521d0537141b7271d5 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e93f5d8173dd4b22c1022dadf5258e455d4b3fd5 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f55985d61cec9aff95c78c8e287baad6ba1300d9 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a5d5aebbdebb63b89dcac9e8bf4a4e88f5d980d3 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +# ResNeXt-101-32x8d model trained with Caffe2 at FB, +# so the mean and std need to be changed. +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8bf2b65a6a97173e2cb563c8f79c501936a2ee09 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7ea9b2da14da6b86f3497bfc3c56862a5c05730b --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py b/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..80397f4b18acb094f8f6e132ea21050c75b2de48 --- /dev/null +++ b/object_detection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/faster_rcnn/metafile.yml b/object_detection/configs/faster_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..5178a3b47ad9f6a82af5a3d5fcb9943b82646627 --- /dev/null +++ b/object_detection/configs/faster_rcnn/metafile.yml @@ -0,0 +1,407 @@ +Collections: + - Name: Faster R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + - RoIPool + Paper: + URL: https://arxiv.org/abs/1506.01497 + Title: "Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks" + README: configs/faster_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/faster_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_caffe_dc5_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909-531f0f43.pth + + - Name: faster_rcnn_r50_caffe_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.378_20200504_180032-c5925ee5.pth + + - Name: faster_rcnn_r50_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 46.73 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth + + - Name: faster_rcnn_r50_fpn_fp16_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 3.4 + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + inference time (ms/im): + - value: 34.72 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204-d4dc1471.pth + + - Name: faster_rcnn_r50_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 46.73 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth + + - Name: faster_rcnn_r101_caffe_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.398_20200504_180057-b269e9dd.pth + + - Name: faster_rcnn_r101_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 64.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130-f513f705.pth + + - Name: faster_rcnn_r101_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 64.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_bbox_mAP-0.398_20200504_210455-1d2dac9c.pth + + - Name: faster_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203-cff10310.pth + + - Name: faster_rcnn_x101_32x4d_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.412_20200506_041400-64a12c0b.pth + + - Name: faster_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 106.38 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth + + - Name: faster_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 106.38 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033-5961fa95.pth + + - Name: faster_rcnn_r50_fpn_iou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco-fdd207f3.pth + + - Name: faster_rcnn_r50_fpn_giou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco-0eada910.pth + + - Name: faster_rcnn_r50_fpn_bounded_iou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco-98ad993b.pth + + - Name: faster_rcnn_r50_caffe_dc5_mstrain_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851-b33d21b9.pth + + - Name: faster_rcnn_r50_caffe_dc5_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107-34a53b2c.pth + + - Name: faster_rcnn_r50_caffe_fpn_mstrain_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_bbox_mAP-0.397_20200504_231813-10b2de58.pth + + - Name: faster_rcnn_r50_caffe_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.7 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054-1f77628b.pth + + - Name: faster_rcnn_r50_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822-e10bd31c.pth + + - Name: faster_rcnn_r101_caffe_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742-a7ae426d.pth + + - Name: faster_rcnn_r101_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822-4d4d2ca8.pth + + - Name: faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 7.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151-16b9b260.pth + + - Name: faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 10.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954-002e082a.pth + + - Name: faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 10.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528-26c63de6.pth diff --git a/object_detection/configs/fcos/README.md b/object_detection/configs/fcos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fc5ca545b4eedf04a88e4b48224fc741975f15ce --- /dev/null +++ b/object_detection/configs/fcos/README.md @@ -0,0 +1,49 @@ +# FCOS: Fully Convolutional One-Stage Object Detection + +## Abstract + + + +We propose a fully convolutional one-stage object detector (FCOS) to solve object detection in a per-pixel prediction fashion, analogue to semantic segmentation. Almost all state-of-the-art object detectors such as RetinaNet, SSD, YOLOv3, and Faster R-CNN rely on pre-defined anchor boxes. In contrast, our proposed detector FCOS is anchor box free, as well as proposal free. By eliminating the predefined set of anchor boxes, FCOS completely avoids the complicated computation related to anchor boxes such as calculating overlapping during training. More importantly, we also avoid all hyper-parameters related to anchor boxes, which are often very sensitive to the final detection performance. With the only post-processing non-maximum suppression (NMS), FCOS with ResNeXt-64x4d-101 achieves 44.7% in AP with single-model and single-scale testing, surpassing previous one-stage detectors with the advantage of being much simpler. For the first time, we demonstrate a much simpler and flexible detection framework achieving improved detection accuracy. We hope that the proposed FCOS framework can serve as a simple and strong alternative for many other instance-level tasks. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{tian2019fcos, + title={FCOS: Fully Convolutional One-Stage Object Detection}, + author={Tian, Zhi and Shen, Chunhua and Chen, Hao and He, Tong}, + journal={arXiv preprint arXiv:1904.01355}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Style | GN | MS train | Tricks | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | Y | N | N | N | 1x | 3.6 | 22.7 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/fcos_r50_caffe_fpn_gn-head_1x_coco-821213aa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/20201227_180009.log.json) | +| R-50 | caffe | Y | N | Y | N | 1x | 3.7 | - | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/20210105_135818.log.json)| +| R-50 | caffe | Y | N | Y | Y | 1x | 3.8 | - | 42.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco-ae4d8b3d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/20210105_224556.log.json)| +| R-101 | caffe | Y | N | N | N | 1x | 5.5 | 17.3 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/fcos_r101_caffe_fpn_gn-head_1x_coco-0e37b982.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/20210103_155046.log.json) | + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | Y | Y | 2x | 2.6 | 22.9 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco-d92ceeea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/20201227_161900.log.json) | +| R-101 | caffe | Y | Y | 2x | 5.5 | 17.3 | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco-511424d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/20210103_155046.log.json) | +| X-101 | pytorch | Y | Y | 2x | 10.0 | 9.7 | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco-ede514a8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/20210114_133041.log.json) | + +**Notes:** + +- The X-101 backbone is X-101-64x4d. +- Tricks means setting `norm_on_bbox`, `centerness_on_reg`, `center_sampling` as `True`. +- DCN means using `DCNv2` in both backbone and head. diff --git a/object_detection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py b/object_detection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2699bdb979bdf2dce3f4f26946304aa1ed2f4751 --- /dev/null +++ b/object_detection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,54 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +model = dict( + backbone=dict( + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=False, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + test_cfg=dict(nms=dict(type='nms', iou_threshold=0.6))) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/object_detection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py b/object_detection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cf93c91e7128c277d1263b680beb108cfadbbc49 --- /dev/null +++ b/object_detection/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=True, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + test_cfg=dict(nms=dict(type='nms', iou_threshold=0.6))) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/object_detection/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py b/object_detection/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9f502e7b465f789a90100d96e881c60c84d9bf91 --- /dev/null +++ b/object_detection/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict(bbox_head=dict(center_sampling=True, center_sample_radius=1.5)) diff --git a/object_detection/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py b/object_detection/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..45bea48dc38881d0a0f41ef820723a1ac854c854 --- /dev/null +++ b/object_detection/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_caffe'))) diff --git a/object_detection/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py b/object_detection/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f4d36f1eeed8fe152c2c4cad702d3736bc56172c --- /dev/null +++ b/object_detection/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,47 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_caffe'))) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py b/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..955787bab9413f93908cc4542da89f1bdd31c492 --- /dev/null +++ b/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,106 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='constant', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2816b16f64dbcbfecd779650aaae0ca6cee0d810 --- /dev/null +++ b/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,4 @@ +# TODO: Remove this config after benchmarking all related configs +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +data = dict(samples_per_gpu=4, workers_per_gpu=4) diff --git a/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py b/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..497d03f6f702ecb47cccbe0089089b5a002ebcca --- /dev/null +++ b/object_detection/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py b/object_detection/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e70e4651230cbf58129b139d30de68c35e9c0e2d --- /dev/null +++ b/object_detection/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,60 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/fcos/metafile.yml b/object_detection/configs/fcos/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..ae922eb9faf9b9705f8ac66083b466a540f1ff40 --- /dev/null +++ b/object_detection/configs/fcos/metafile.yml @@ -0,0 +1,146 @@ +Collections: + - Name: FCOS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Group Normalization + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.01355 + Title: 'FCOS: Fully Convolutional One-Stage Object Detection' + README: configs/fcos/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/fcos.py#L6 + Version: v2.0.0 + +Models: + - Name: fcos_r50_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 3.6 + inference time (ms/im): + - value: 44.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/fcos_r50_caffe_fpn_gn-head_1x_coco-821213aa.pth + + - Name: fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth + + - Name: fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco-ae4d8b3d.pth + + - Name: fcos_r101_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 57.8 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/fcos_r101_caffe_fpn_gn-head_1x_coco-0e37b982.pth + + - Name: fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 2.6 + inference time (ms/im): + - value: 43.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco-d92ceeea.pth + + - Name: fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 57.8 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco-511424d6.pth + + - Name: fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 10.0 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco-ede514a8.pth diff --git a/object_detection/configs/foveabox/README.md b/object_detection/configs/foveabox/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7009e97fd280af81f47f3085e836e95d9f154167 --- /dev/null +++ b/object_detection/configs/foveabox/README.md @@ -0,0 +1,57 @@ +# FoveaBox: Beyond Anchor-based Object Detector + +## Abstract + + + +We present FoveaBox, an accurate, flexible, and completely anchor-free framework for object detection. While almost all state-of-the-art object detectors utilize predefined anchors to enumerate possible locations, scales and aspect ratios for the search of the objects, their performance and generalization ability are also limited to the design of anchors. Instead, FoveaBox directly learns the object existing possibility and the bounding box coordinates without anchor reference. This is achieved by: (a) predicting category-sensitive semantic maps for the object existing possibility, and (b) producing category-agnostic bounding box for each position that potentially contains an object. The scales of target boxes are naturally associated with feature pyramid representations. In FoveaBox, an instance is assigned to adjacent feature levels to make the model more accurate.We demonstrate its effectiveness on standard benchmarks and report extensive experimental analysis. Without bells and whistles, FoveaBox achieves state-of-the-art single model performance on the standard COCO and Pascal VOC object detection benchmark. More importantly, FoveaBox avoids all computation and hyper-parameters related to anchor boxes, which are often sensitive to the final detection performance. We believe the simple and effective approach will serve as a solid baseline and help ease future research for object detection. + + +
+ +
+ + + + + + +## Introduction + +FoveaBox is an accurate, flexible and completely anchor-free object detection system for object detection framework, as presented in our paper [https://arxiv.org/abs/1904.03797](https://arxiv.org/abs/1904.03797): +Different from previous anchor-based methods, FoveaBox directly learns the object existing possibility and the bounding box coordinates without anchor reference. This is achieved by: (a) predicting category-sensitive semantic maps for the object existing possibility, and (b) producing category-agnostic bounding box for each position that potentially contains an object. + +## Main Results + +### Results on R50/101-FPN + +| Backbone | Style | align | ms-train| Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | N | N | 1x | 5.6 | 24.1 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219-ee4d5303.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219_223025.log.json) | +| R-50 | pytorch | N | N | 2x | 5.6 | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203-2df792b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203_112043.log.json) | +| R-50 | pytorch | Y | N | 2x | 8.1 | 19.4 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203_134252.log.json) | +| R-50 | pytorch | Y | Y | 2x | 8.1 | 18.3 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205-85ce26cb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205_112557.log.json) | +| R-101 | pytorch | N | N | 1x | 9.2 | 17.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219-05e38f1c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219_011740.log.json) | +| R-101 | pytorch | N | N | 2x | 11.7 | - | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208-02320ea4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208_202059.log.json) | +| R-101 | pytorch | Y | N | 2x | 11.7 | 14.7 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208-c39a027a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208_203337.log.json) | +| R-101 | pytorch | Y | Y | 2x | 11.7 | 14.7 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208-649c5eb6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208_202124.log.json) | + +[1] *1x and 2x mean the model is trained for 12 and 24 epochs, respectively.* \ +[2] *Align means utilizing deformable convolution to align the cls branch.* \ +[3] *All results are obtained with a single model and without any test time data augmentation.*\ +[4] *We use 4 GPUs for training.* + +Any pull requests or issues are welcome. + +## Citations + +Please consider citing our paper in your publications if the project helps your research. BibTeX reference is as follows. + +```latex +@article{kong2019foveabox, + title={FoveaBox: Beyond Anchor-based Object Detector}, + author={Kong, Tao and Sun, Fuchun and Liu, Huaping and Jiang, Yuning and Shi, Jianbo}, + journal={arXiv preprint arXiv:1904.03797}, + year={2019} +} +``` diff --git a/object_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py b/object_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c5d178492d1031f03915e5a8e273f2b4b12a7e97 --- /dev/null +++ b/object_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,12 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/object_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cc5affefe85150d8e8d372920221e00c27646375 --- /dev/null +++ b/object_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,29 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py b/object_detection/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e7265bcdbef2a7ab5e8ba6b3fe13f02cb718b40a --- /dev/null +++ b/object_detection/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/object_detection/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc39beaac540a8d3e00bf968f1af08450f9d4cc --- /dev/null +++ b/object_detection/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,25 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py b/object_detection/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9201af11b88f4c161730f43e957c4d9c53828262 --- /dev/null +++ b/object_detection/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py b/object_detection/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1ef5243f93f5df47d9f1dab318655ea757e6c676 --- /dev/null +++ b/object_detection/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fovea_r50_fpn_4x4_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py b/object_detection/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7e986ebcd59f0fe59c760739d291a693f9b7a02e --- /dev/null +++ b/object_detection/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FOVEA', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + num_outs=5, + add_extra_convs='on_input'), + bbox_head=dict( + type='FoveaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + base_edge_list=[16, 32, 64, 128, 256], + scale_ranges=((1, 64), (32, 128), (64, 256), (128, 512), (256, 2048)), + sigma=0.4, + with_deform=False, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=1.50, + alpha=0.4, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + # training and testing settings + train_cfg=dict(), + test_cfg=dict( + nms_pre=1000, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +data = dict(samples_per_gpu=4, workers_per_gpu=4) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py b/object_detection/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..68ce4d250ac673a274d1458963eb02614e4f5f98 --- /dev/null +++ b/object_detection/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/foveabox/metafile.yml b/object_detection/configs/foveabox/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..fe9a2834643d215a9ff7e6a200d06a362a8a1a4b --- /dev/null +++ b/object_detection/configs/foveabox/metafile.yml @@ -0,0 +1,172 @@ +Collections: + - Name: FoveaBox + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 4x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.03797 + Title: 'FoveaBox: Beyond Anchor-based Object Detector' + README: configs/foveabox/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/fovea.py#L6 + Version: v2.0.0 + +Models: + - Name: fovea_r50_fpn_4x4_1x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219-ee4d5303.pth + + - Name: fovea_r50_fpn_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203-2df792b1.pth + + - Name: fovea_align_r50_fpn_gn-head_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 51.55 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth + + - Name: fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 54.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205-85ce26cb.pth + + - Name: fovea_r101_fpn_4x4_1x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 57.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219-05e38f1c.pth + + - Name: fovea_r101_fpn_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208-02320ea4.pth + + - Name: fovea_align_r101_fpn_gn-head_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208-c39a027a.pth + + - Name: fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208-649c5eb6.pth diff --git a/object_detection/configs/fpg/README.md b/object_detection/configs/fpg/README.md new file mode 100644 index 0000000000000000000000000000000000000000..dd69c613afbb253b97226e467db40e8907dc1646 --- /dev/null +++ b/object_detection/configs/fpg/README.md @@ -0,0 +1,44 @@ +# Feature Pyramid Grids + + +## Abstract + + + +Feature pyramid networks have been widely adopted in the object detection literature to improve feature representations for better handling of variations in scale. In this paper, we present Feature Pyramid Grids (FPG), a deep multi-pathway feature pyramid, that represents the feature scale-space as a regular grid of parallel bottom-up pathways which are fused by multi-directional lateral connections. FPG can improve single-pathway feature pyramid networks by significantly increasing its performance at similar computation cost, highlighting importance of deep pyramid representations. In addition to its general and uniform structure, over complicated structures that have been found with neural architecture search, it also compares favorably against such approaches without relying on search. We hope that FPG with its uniform and effective nature can serve as a strong component for future work in object recognition. + + +
+ +
+ + + + +## Citation + +```latex +@article{chen2020feature, + title={Feature pyramid grids}, + author={Chen, Kai and Cao, Yuhang and Loy, Chen Change and Lin, Dahua and Feichtenhofer, Christoph}, + journal={arXiv preprint arXiv:2004.03580}, + year={2020} +} +``` + +## Results and Models + +We benchmark the new training schedule (crop training, large batch, unfrozen BN, 50 epochs) introduced in NAS-FPN. +All backbones are Resnet-50 in pytorch style. + +| Method | Neck | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------------:|:-----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:-------:|:--------:| +| Faster R-CNN | FPG | 50e | 20.0 | - | 42.2 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/faster_rcnn_r50_fpg_crop640_50e_coco-76220505.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/20210218_223520.log.json) | +| Faster R-CNN | FPG-chn128 | 50e | 11.9 | - | 41.2 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/faster_rcnn_r50_fpg-chn128_crop640_50e_coco-24257de9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/20210218_221412.log.json) | +| Mask R-CNN | FPG | 50e | 23.2 | - | 42.7 | 37.8 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/mask_rcnn_r50_fpg_crop640_50e_coco-c5860453.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/20210222_205447.log.json) | +| Mask R-CNN | FPG-chn128 | 50e | 15.3 | - | 41.7 | 36.9 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/mask_rcnn_r50_fpg-chn128_crop640_50e_coco-5c6ea10d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/20210223_025039.log.json) | +| RetinaNet | FPG | 50e | 20.8 | - | 40.5 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/retinanet_r50_fpg_crop640_50e_coco-46fdd1c6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/20210225_143957.log.json) | +| RetinaNet | FPG-chn128 | 50e | 19.9 | - | 40.3 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/retinanet_r50_fpg-chn128_crop640_50e_coco-5cf33c76.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/20210225_184328.log.json) | + +**Note**: Chn128 means to decrease the number of channels of features and convs from 256 (default) to 128 in +Neck and BBox Head, which can greatly decrease memory consumption without sacrificing much precision. diff --git a/object_detection/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py b/object_detection/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4535034efa3f4c4a09064a753a2bbde68b6cd2f2 --- /dev/null +++ b/object_detection/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,9 @@ +_base_ = 'faster_rcnn_r50_fpg_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict(out_channels=128, inter_channels=128), + rpn_head=dict(in_channels=128), + roi_head=dict( + bbox_roi_extractor=dict(out_channels=128), + bbox_head=dict(in_channels=128))) diff --git a/object_detection/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py b/object_detection/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3ab2a2c5ef04fc38a686065167df62eb3d67266d --- /dev/null +++ b/object_detection/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,48 @@ +_base_ = 'faster_rcnn_r50_fpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) diff --git a/object_detection/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py b/object_detection/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..95f4e91f203bad8367942fc24b838da9fbf62947 --- /dev/null +++ b/object_detection/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,68 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict(norm_cfg=norm_cfg, norm_eval=False), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict(bbox_head=dict(norm_cfg=norm_cfg))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(max_epochs=50) +evaluation = dict(interval=2) diff --git a/object_detection/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py b/object_detection/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..baa4a5affc9b3ead0080d993b14f0d00392c2de5 --- /dev/null +++ b/object_detection/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,10 @@ +_base_ = 'mask_rcnn_r50_fpg_crop640_50e_coco.py' + +model = dict( + neck=dict(out_channels=128, inter_channels=128), + rpn_head=dict(in_channels=128), + roi_head=dict( + bbox_roi_extractor=dict(out_channels=128), + bbox_head=dict(in_channels=128), + mask_roi_extractor=dict(out_channels=128), + mask_head=dict(in_channels=128))) diff --git a/object_detection/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py b/object_detection/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3c9ea27617c85c54309ac454fff253a6d0462735 --- /dev/null +++ b/object_detection/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,48 @@ +_base_ = 'mask_rcnn_r50_fpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) diff --git a/object_detection/configs/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py b/object_detection/configs/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8dfdbb44a49bf75e7460c49285046b7f38cdfc75 --- /dev/null +++ b/object_detection/configs/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,74 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict(norm_cfg=norm_cfg, norm_eval=False), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + norm_cfg=norm_cfg, + num_outs=5), + roi_head=dict( + bbox_head=dict(norm_cfg=norm_cfg), mask_head=dict(norm_cfg=norm_cfg))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(max_epochs=50) +evaluation = dict(interval=2) diff --git a/object_detection/configs/fpg/metafile.yml b/object_detection/configs/fpg/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..885d8573631c8a64ef61a9dbefff828f92aa8ce6 --- /dev/null +++ b/object_detection/configs/fpg/metafile.yml @@ -0,0 +1,104 @@ +Collections: + - Name: Feature Pyramid Grids + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Feature Pyramid Grids + Paper: + URL: https://arxiv.org/abs/2004.03580 + Title: 'Feature Pyramid Grids' + README: configs/fpg/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.10.0/mmdet/models/necks/fpg.py#L101 + Version: v2.10.0 + +Models: + - Name: faster_rcnn_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 20.0 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/faster_rcnn_r50_fpg_crop640_50e_coco-76220505.pth + + - Name: faster_rcnn_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 11.9 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/faster_rcnn_r50_fpg-chn128_crop640_50e_coco-24257de9.pth + + - Name: mask_rcnn_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 23.2 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/mask_rcnn_r50_fpg_crop640_50e_coco-c5860453.pth + + - Name: mask_rcnn_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 15.3 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/mask_rcnn_r50_fpg-chn128_crop640_50e_coco-5c6ea10d.pth + + - Name: retinanet_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 20.8 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/retinanet_r50_fpg_crop640_50e_coco-46fdd1c6.pth + + - Name: retinanet_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 19.9 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/retinanet_r50_fpg-chn128_crop640_50e_coco-5cf33c76.pth diff --git a/object_detection/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py b/object_detection/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6cf7e56a4f23a42d3905560a9b8035d6d935ff --- /dev/null +++ b/object_detection/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,5 @@ +_base_ = 'retinanet_r50_fpg_crop640_50e_coco.py' + +model = dict( + neck=dict(out_channels=128, inter_channels=128), + bbox_head=dict(in_channels=128)) diff --git a/object_detection/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py b/object_detection/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..504ed5ec5040559b3d10f7caf8a970005a1a92d7 --- /dev/null +++ b/object_detection/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,53 @@ +_base_ = '../nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + _delete_=True, + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + add_extra_convs=True, + start_level=1, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) + +evaluation = dict(interval=2) diff --git a/object_detection/configs/free_anchor/README.md b/object_detection/configs/free_anchor/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a1dcb17f1fece8fa2067be47b518064cb16ea159 --- /dev/null +++ b/object_detection/configs/free_anchor/README.md @@ -0,0 +1,41 @@ +# FreeAnchor: Learning to Match Anchors for Visual Object Detection + +## Abstract + + + +Modern CNN-based object detectors assign anchors for ground-truth objects under the restriction of object-anchor Intersection-over-Unit (IoU). In this study, we propose a learning-to-match approach to break IoU restriction, allowing objects to match anchors in a flexible manner. Our approach, referred to as FreeAnchor, updates hand-crafted anchor assignment to "free" anchor matching by formulating detector training as a maximum likelihood estimation (MLE) procedure. FreeAnchor targets at learning features which best explain a class of objects in terms of both classification and localization. FreeAnchor is implemented by optimizing detection customized likelihood and can be fused with CNN-based detectors in a plug-and-play manner. Experiments on COCO demonstrate that FreeAnchor consistently outperforms their counterparts with significant margins. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{zhang2019freeanchor, + title = {{FreeAnchor}: Learning to Match Anchors for Visual Object Detection}, + author = {Zhang, Xiaosong and Wan, Fang and Liu, Chang and Ji, Rongrong and Ye, Qixiang}, + booktitle = {Neural Information Processing Systems}, + year = {2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:--------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.9 | 18.4 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130_095625.log.json) | +| R-101 | pytorch | 1x | 6.8 | 14.9 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130-358324e6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130_100723.log.json) | +| X-101-32x4d | pytorch | 1x | 8.1 | 11.1 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130-d4846968.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130_095627.log.json) | + +**Notes:** + +- We use 8 GPUs with 2 images/GPU. +- For more settings and models, please refer to the [official repo](https://github.com/zhangxiaosong18/FreeAnchor). diff --git a/object_detection/configs/free_anchor/metafile.yml b/object_detection/configs/free_anchor/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..170fb5c07a7277f5bf4f0a563284b6504dacebfe --- /dev/null +++ b/object_detection/configs/free_anchor/metafile.yml @@ -0,0 +1,79 @@ +Collections: + - Name: FreeAnchor + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FreeAnchor + - ResNet + Paper: + URL: https://arxiv.org/abs/1909.02466 + Title: 'FreeAnchor: Learning to Match Anchors for Visual Object Detection' + README: configs/free_anchor/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/dense_heads/free_anchor_retina_head.py#L10 + Version: v2.0.0 + +Models: + - Name: retinanet_free_anchor_r50_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.9 + inference time (ms/im): + - value: 54.35 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth + + - Name: retinanet_free_anchor_r101_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.8 + inference time (ms/im): + - value: 67.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130-358324e6.pth + + - Name: retinanet_free_anchor_x101_32x4d_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 90.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130-d4846968.pth diff --git a/object_detection/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py b/object_detection/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f4aea53cc39f4fd441ae9c9f3a6f541b2fa36929 --- /dev/null +++ b/object_detection/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py b/object_detection/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..28f983c29edd071b32a50f18ac7b3f5c1bfdda88 --- /dev/null +++ b/object_detection/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py @@ -0,0 +1,22 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='FreeAnchorRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.75))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..65f8a9e2a4d221732dcf55a4a4d4b07041271668 --- /dev/null +++ b/object_detection/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/fsaf/README.md b/object_detection/configs/fsaf/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ba4d409b0b5a8593a7fb469824c705bcdc4957c8 --- /dev/null +++ b/object_detection/configs/fsaf/README.md @@ -0,0 +1,61 @@ +# Feature Selective Anchor-Free Module for Single-Shot Object Detection + +## Abstract + + + +We motivate and present feature selective anchor-free (FSAF) module, a simple and effective building block for single-shot object detectors. It can be plugged into single-shot detectors with feature pyramid structure. The FSAF module addresses two limitations brought up by the conventional anchor-based detection: 1) heuristic-guided feature selection; 2) overlap-based anchor sampling. The general concept of the FSAF module is online feature selection applied to the training of multi-level anchor-free branches. Specifically, an anchor-free branch is attached to each level of the feature pyramid, allowing box encoding and decoding in the anchor-free manner at an arbitrary level. During training, we dynamically assign each instance to the most suitable feature level. At the time of inference, the FSAF module can work jointly with anchor-based branches by outputting predictions in parallel. We instantiate this concept with simple implementations of anchor-free branches and online feature selection strategy. Experimental results on the COCO detection track show that our FSAF module performs better than anchor-based counterparts while being faster. When working jointly with anchor-based branches, the FSAF module robustly improves the baseline RetinaNet by a large margin under various settings, while introducing nearly free inference overhead. And the resulting best model can achieve a state-of-the-art 44.6% mAP, outperforming all existing single-shot detectors on COCO. + + +
+ +
+ + + + +## Introduction + + + +FSAF is an anchor-free method published in CVPR2019 ([https://arxiv.org/pdf/1903.00621.pdf](https://arxiv.org/pdf/1903.00621.pdf)). +Actually it is equivalent to the anchor-based method with only one anchor at each feature map position in each FPN level. +And this is how we implemented it. +Only the anchor-free branch is released for its better compatibility with the current framework and less computational budget. + +In the original paper, feature maps within the central 0.2-0.5 area of a gt box are tagged as ignored. However, +it is empirically found that a hard threshold (0.2-0.2) gives a further gain on the performance. (see the table below) + +## Main Results + +### Results on R50/R101/X101-FPN + +| Backbone | ignore range | ms-train| Lr schd |Train Mem (GB)| Train time (s/iter) | Inf time (fps) | box AP | Config | Download | +|:----------:| :-------: |:-------:|:-------:|:------------:|:---------------:|:--------------:|:-------------:|:------:|:--------:| +| R-50 | 0.2-0.5 | N | 1x | 3.15 | 0.43 | 12.3 | 36.0 (35.9) | | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715-b555b0e0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715_094657.log.json) | +| R-50 | 0.2-0.2 | N | 1x | 3.15 | 0.43 | 13.0 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco-94ccc51f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco_20200428_072327.log.json)| +| R-101 | 0.2-0.2 | N | 1x | 5.08 | 0.58 | 10.8 | 39.3 (37.9) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco-9e71098f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco_20200428_160348.log.json)| +| X-101 | 0.2-0.2 | N | 1x | 9.38 | 1.23 | 5.6 | 42.4 (41.0) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco-e3f6e6fd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco_20200428_160424.log.json)| + +**Notes:** + +- *1x means the model is trained for 12 epochs.* +- *AP values in the brackets represent those reported in the original paper.* +- *All results are obtained with a single model and single-scale test.* +- *X-101 backbone represents ResNext-101-64x4d.* +- *All pretrained backbones use pytorch style.* +- *All models are trained on 8 Titan-XP gpus and tested on a single gpu.* + +## Citations + +BibTeX reference is as follows. + +```latex +@inproceedings{zhu2019feature, + title={Feature Selective Anchor-Free Module for Single-Shot Object Detection}, + author={Zhu, Chenchen and He, Yihui and Savvides, Marios}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={840--849}, + year={2019} +} +``` diff --git a/object_detection/configs/fsaf/fsaf_r101_fpn_1x_coco.py b/object_detection/configs/fsaf/fsaf_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..12b49fed5b6cd617aa9c05d76ed737d755992a34 --- /dev/null +++ b/object_detection/configs/fsaf/fsaf_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/fsaf/fsaf_r50_fpn_1x_coco.py b/object_detection/configs/fsaf/fsaf_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..67f3ec1c4c16fb9bd041dbb3a24d269a83145f26 --- /dev/null +++ b/object_detection/configs/fsaf/fsaf_r50_fpn_1x_coco.py @@ -0,0 +1,48 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + type='FSAF', + bbox_head=dict( + type='FSAFHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + reg_decoded_bbox=True, + # Only anchor-free branch is implemented. The anchor generator only + # generates 1 anchor at each feature point, as a substitute of the + # grid of features. + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=1, + scales_per_octave=1, + ratios=[1.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(_delete_=True, type='TBLRBBoxCoder', normalizer=4.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='none'), + loss_bbox=dict( + _delete_=True, + type='IoULoss', + eps=1e-6, + loss_weight=1.0, + reduction='none')), + # training and testing settings + train_cfg=dict( + assigner=dict( + _delete_=True, + type='CenterRegionAssigner', + pos_scale=0.2, + neg_scale=0.2, + min_pos_iof=0.01), + allowed_border=-1, + pos_weight=-1, + debug=False)) +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=10, norm_type=2)) diff --git a/object_detection/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89c0c6344aba6e6eae5657eff60745645dd1e8dc --- /dev/null +++ b/object_detection/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/fsaf/metafile.yml b/object_detection/configs/fsaf/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..5434e9adfa620598c5454de1874371d9d2545981 --- /dev/null +++ b/object_detection/configs/fsaf/metafile.yml @@ -0,0 +1,80 @@ +Collections: + - Name: FSAF + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x Titan-XP GPUs + Architecture: + - FPN + - FSAF + - ResNet + Paper: + URL: https://arxiv.org/abs/1903.00621 + Title: 'Feature Selective Anchor-Free Module for Single-Shot Object Detection' + README: configs/fsaf/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/detectors/fsaf.py#L6 + Version: v2.1.0 + +Models: + - Name: fsaf_r50_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.15 + inference time (ms/im): + - value: 76.92 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco-94ccc51f.pth + + - Name: fsaf_r101_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.08 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.3 (37.9) + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco-9e71098f.pth + + - Name: fsaf_x101_64x4d_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.38 + inference time (ms/im): + - value: 178.57 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 (41.0) + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco-e3f6e6fd.pth diff --git a/object_detection/configs/gcnet/README.md b/object_detection/configs/gcnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c5c436afd85324cc69a4086a1c7ceb41f9791e8b --- /dev/null +++ b/object_detection/configs/gcnet/README.md @@ -0,0 +1,73 @@ +# GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond + +## Abstract + + + +The Non-Local Network (NLNet) presents a pioneering approach for capturing long-range dependencies, via aggregating query-specific global context to each query position. However, through a rigorous empirical analysis, we have found that the global contexts modeled by non-local network are almost the same for different query positions within an image. In this paper, we take advantage of this finding to create a simplified network based on a query-independent formulation, which maintains the accuracy of NLNet but with significantly less computation. We further observe that this simplified design shares similar structure with Squeeze-Excitation Network (SENet). Hence we unify them into a three-step general framework for global context modeling. Within the general framework, we design a better instantiation, called the global context (GC) block, which is lightweight and can effectively model the global context. The lightweight property allows us to apply it for multiple layers in a backbone network to construct a global context network (GCNet), which generally outperforms both simplified NLNet and SENet on major benchmarks for various recognition tasks. + + +
+ +
+ + + + +## Introduction + +By [Yue Cao](http://yue-cao.me), [Jiarui Xu](http://jerryxu.net), [Stephen Lin](https://scholar.google.com/citations?user=c3PYmxUAAAAJ&hl=en), Fangyun Wei, [Han Hu](https://sites.google.com/site/hanhushomepage/). + +We provide config files to reproduce the results in the paper for +["GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond"](https://arxiv.org/abs/1904.11492) on COCO object detection. + + + +**GCNet** is initially described in [arxiv](https://arxiv.org/abs/1904.11492). Via absorbing advantages of Non-Local Networks (NLNet) and Squeeze-Excitation Networks (SENet), GCNet provides a simple, fast and effective approach for global context modeling, which generally outperforms both NLNet and SENet on major benchmarks for various recognition tasks. + +## Citation + +```latex +@article{cao2019GCNet, + title={GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond}, + author={Cao, Yue and Xu, Jiarui and Lin, Stephen and Wei, Fangyun and Hu, Han}, + journal={arXiv preprint arXiv:1904.11492}, + year={2019} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the below table. + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | | 39.7 | 35.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915-187da160.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.0 | 39.9 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204-17235656.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204_024626.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 11.4 | 41.3 | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205-e58ae947.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205_192835.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.6 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206-af22dc9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206_112128.log.json) | + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :-------: | +| R-50-FPN | Mask | - | 1x | 4.4 | 16.6 | 38.4 | 34.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202-bb3eb55c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202_214122.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | 15.5 | 40.4 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202_174907.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.1 | 40.7 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-101-FPN | Mask | - | 1x | 6.4 | 13.3 | 40.5 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210-81658c8a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210_220422.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 12.0 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207-945e77ca.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207_015330.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.8 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| X-101-FPN | Mask | - | 1x | 7.6 | 11.3 | 42.4 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211-7584841c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211_054326.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r16) | 1x | 8.8 | 9.8 | 43.5 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-cbed3d2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_164715.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r4) | 1x | 9.0 | 9.7 | 43.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212-68164964.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212_070942.log.json) | +| X-101-FPN | Cascade Mask | - | 1x | 9.2 | 8.4 | 44.7 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310-d5ad2a5e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310_115217.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r16) | 1x | 10.3 | 7.7 | 46.2 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-10bf2463.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_184154.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r4) | 1x | 10.6 | | 46.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653-ed035291.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653.log.json) | +| X-101-FPN | DCN Cascade Mask | - | 1x | | | 47.5 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019-abbc39ea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019.log.json)| +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r16) | 1x | | | 48.0 | 41.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648-44aa598a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648.log.json) | +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r4) | 1x | | | 47.9 | 41.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851-720338ec.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851.log.json) | + +**Notes:** + +- The `SyncBN` is added in the backbone for all models in **Table 2**. +- `GC` denotes Global Context (GC) block is inserted after 1x1 conv of backbone. +- `DCN` denotes replace 3x3 conv with 3x3 Deformable Convolution in `c3-c5` stages of backbone. +- `r4` and `r16` denote ratio 4 and ratio 16 in GC block respectively. diff --git a/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5118895f00345a42fdbc6d2edba084ccd3f1a3c8 --- /dev/null +++ b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..413499dd6d3fe88e91e357a62461f47f037fcedf --- /dev/null +++ b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..50689aadf6cab9414aab1a7a9e72ef8231355e4f --- /dev/null +++ b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..13672312a8f5c57c5799ca6df4d52fed103287b4 --- /dev/null +++ b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..50883ffeb16369ea6210f2ece8fc2d7e084b0134 --- /dev/null +++ b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..31fdd070595ac0512a39075bb045dd18035d3f14 --- /dev/null +++ b/object_detection/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad6ad47696e6aeb2b3505abab0bd2d49d3b7aa83 --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29f91674c6d54bfa6fdcfcb5b7e2ec2a2bbf81fa --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6e1c5d0cadfb9fb3a4f8645e28a8e67fc499e900 --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..781dba78d68e77fa7eee15f5bbcc539731f8378d --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..32972de857b3c4f43170dcd3e7fbce76425f094d --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d299b69f576a2547de1f7d9edd171d56ab002d0a --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ac908e60c1f964bdd6c3e61933a37c04d487bfb --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0308a567c147413688c9da679d06f93b0e154d88 --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e04780c50f96929997c279b23fe5fa427657039b --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..980f8191d4c07eb35e338bd87e3b73b06b3214ad --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f0c96e58b6131f2958f28c56b9d8384d5b4746f7 --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7fb8e82ece225ab6f88f1f4f83bea56a42cf1a57 --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1ddbee3b4b79e79bb2a3faf30604f2465612728 --- /dev/null +++ b/object_detection/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/object_detection/configs/gcnet/metafile.yml b/object_detection/configs/gcnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..1281122a776e56c8bfc93aad3efc44df60996ec0 --- /dev/null +++ b/object_detection/configs/gcnet/metafile.yml @@ -0,0 +1,440 @@ +Collections: + - Name: GCNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Global Context Block + - FPN + - RPN + - ResNet + - ResNeXt + Paper: + URL: https://arxiv.org/abs/1904.11492 + Title: 'GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond' + README: configs/gcnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/ops/context_block.py#L13 + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915-187da160.pth + + - Name: mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204-17235656.pth + + - Name: mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 87.72 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205-e58ae947.pth + + - Name: mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + inference time (ms/im): + - value: 86.21 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206-af22dc9d.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 60.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202-bb3eb55c.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + inference time (ms/im): + - value: 64.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.23 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 75.19 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210-81658c8a.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 83.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207-945e77ca.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + inference time (ms/im): + - value: 84.75 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211-7584841c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 8.8 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-cbed3d2c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212-68164964.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 119.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310-d5ad2a5e.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-10bf2463.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 10.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653-ed035291.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019-abbc39ea.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648-44aa598a.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851-720338ec.pth diff --git a/object_detection/configs/gfl/README.md b/object_detection/configs/gfl/README.md new file mode 100644 index 0000000000000000000000000000000000000000..46a7a08596ae1231b25fed433424f15c6f7d3dd6 --- /dev/null +++ b/object_detection/configs/gfl/README.md @@ -0,0 +1,46 @@ +# Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection + +## Abstract + + + +One-stage detector basically formulates object detection as dense classification and localization. The classification is usually optimized by Focal Loss and the box location is commonly learned under Dirac delta distribution. A recent trend for one-stage detectors is to introduce an individual prediction branch to estimate the quality of localization, where the predicted quality facilitates the classification to improve detection performance. This paper delves into the representations of the above three fundamental elements: quality estimation, classification and localization. Two problems are discovered in existing practices, including (1) the inconsistent usage of the quality estimation and classification between training and inference and (2) the inflexible Dirac delta distribution for localization when there is ambiguity and uncertainty in complex scenes. To address the problems, we design new representations for these elements. Specifically, we merge the quality estimation into the class prediction vector to form a joint representation of localization quality and classification, and use a vector to represent arbitrary distribution of box locations. The improved representations eliminate the inconsistency risk and accurately depict the flexible distribution in real data, but contain continuous labels, which is beyond the scope of Focal Loss. We then propose Generalized Focal Loss (GFL) that generalizes Focal Loss from its discrete form to the continuous version for successful optimization. On COCO test-dev, GFL achieves 45.0\% AP using ResNet-101 backbone, surpassing state-of-the-art SAPD (43.5\%) and ATSS (43.6\%) with higher or comparable inference speed, under the same backbone and training settings. Notably, our best model can achieve a single-model single-scale AP of 48.2\%, at 10 FPS on a single 2080Ti GPU. + + +
+ +
+ + + + +## Citation + + + +We provide config files to reproduce the object detection results in the paper [Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection](https://arxiv.org/abs/2006.04388) + +```latex +@article{li2020generalized, + title={Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection}, + author={Li, Xiang and Wang, Wenhai and Wu, Lijun and Chen, Shuo and Hu, Xiaolin and Li, Jun and Tang, Jinhui and Yang, Jian}, + journal={arXiv preprint arXiv:2006.04388}, + year={2020} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Multi-scale Training| Inf time (fps) | box AP | Config | Download | +|:-----------------:|:-------:|:-------:|:-------------------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | No | 19.5 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244.log.json) | +| R-50 | pytorch | 2x | Yes | 19.5 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802-37bb1edc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802.log.json) | +| R-101 | pytorch | 2x | Yes | 14.7 | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126.log.json) | +| R-101-dcnv2 | pytorch | 2x | Yes | 12.9 | 47.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d | pytorch | 2x | Yes | 12.1 | 45.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002-50c1ffdb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d-dcnv2 | pytorch | 2x | Yes | 10.7 | 48.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002-14a2bf25.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002.log.json) | + +[1] *1x and 2x mean the model is trained for 90K and 180K iterations, respectively.* \ +[2] *All results are obtained with a single model and without any test time data augmentation such as multi-scale, flipping and etc..* \ +[3] *`dcnv2` denotes deformable convolutional networks v2.* \ +[4] *FPS is tested with a single GeForce RTX 2080Ti GPU, using a batch size of 1.* diff --git a/object_detection/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py b/object_detection/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b72c2b6eddfb51a0a61610826e00296e2b76f827 --- /dev/null +++ b/object_detection/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py b/object_detection/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e33b5c0d27883d5b495c4dae88f550ffbb26a318 --- /dev/null +++ b/object_detection/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/gfl/gfl_r50_fpn_1x_coco.py b/object_detection/configs/gfl/gfl_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cfd4b02391a3d4cae0c060990be1f99b3edebabe --- /dev/null +++ b/object_detection/configs/gfl/gfl_r50_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='GFL', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='GFLHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + reg_max=16, + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py b/object_detection/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b8be60145758c191543ef0683234e63f02d8fe60 --- /dev/null +++ b/object_detection/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,22 @@ +_base_ = './gfl_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py b/object_detection/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..25398075cb866db8dd49d0bbd48cad19566e77e5 --- /dev/null +++ b/object_detection/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py @@ -0,0 +1,18 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, False, True, True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py b/object_detection/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..effda195cb0f18b3137c2b923d59f8cba025ba8e --- /dev/null +++ b/object_detection/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/gfl/metafile.yml b/object_detection/configs/gfl/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..8f049c6bc9209120c5b9526552b3d7b6f157cc93 --- /dev/null +++ b/object_detection/configs/gfl/metafile.yml @@ -0,0 +1,134 @@ +Collections: + - Name: Generalized Focal Loss + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Generalized Focal Loss + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/2006.04388 + Title: 'Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection' + README: configs/gfl/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/detectors/gfl.py#L6 + Version: v2.2.0 + +Models: + - Name: gfl_r50_fpn_1x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r50_fpn_1x_coco.py + Metadata: + inference time (ms/im): + - value: 51.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth + + - Name: gfl_r50_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 51.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802-37bb1edc.pth + + - Name: gfl_r101_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth + + - Name: gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth + + - Name: gfl_x101_32x4d_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002-50c1ffdb.pth + + - Name: gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 93.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002-14a2bf25.pth diff --git a/object_detection/configs/ghm/README.md b/object_detection/configs/ghm/README.md new file mode 100644 index 0000000000000000000000000000000000000000..97b6090a67dbfffd723b44a20a6d5e81983fa593 --- /dev/null +++ b/object_detection/configs/ghm/README.md @@ -0,0 +1,37 @@ +# Gradient Harmonized Single-stage Detector + +## Abstract + + + +Despite the great success of two-stage detectors, single-stage detector is still a more elegant and efficient way, yet suffers from the two well-known disharmonies during training, i.e. the huge difference in quantity between positive and negative examples as well as between easy and hard examples. In this work, we first point out that the essential effect of the two disharmonies can be summarized in term of the gradient. Further, we propose a novel gradient harmonizing mechanism (GHM) to be a hedging for the disharmonies. The philosophy behind GHM can be easily embedded into both classification loss function like cross-entropy (CE) and regression loss function like smooth-L1 (SL1) loss. To this end, two novel loss functions called GHM-C and GHM-R are designed to balancing the gradient flow for anchor classification and bounding box refinement, respectively. Ablation study on MS COCO demonstrates that without laborious hyper-parameter tuning, both GHM-C and GHM-R can bring substantial improvement for single-stage detector. Without any whistles and bells, our model achieves 41.6 mAP on COCO test-dev set which surpasses the state-of-the-art method, Focal Loss (FL) + SL1, by 0.8. + + +
+ +
+ + + + +## Citation + + + +``` +@inproceedings{li2019gradient, + title={Gradient Harmonized Single-stage Detector}, + author={Li, Buyu and Liu, Yu and Wang, Xiaogang}, + booktitle={AAAI Conference on Artificial Intelligence}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 4.0 | 3.3 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130-a437fda3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130_004213.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 4.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130-c148ee8f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130_145259.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 5.1 | 40.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131-e4333bd0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131_113653.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 5.2 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131-dd381cef.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131_113723.log.json) | diff --git a/object_detection/configs/ghm/metafile.yml b/object_detection/configs/ghm/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..b4f488c43659eb25f81ea0e573524ffff3738b4c --- /dev/null +++ b/object_detection/configs/ghm/metafile.yml @@ -0,0 +1,101 @@ +Collections: + - Name: GHM + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - GHM-C + - GHM-R + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1811.05181 + Title: 'Gradient Harmonized Single-stage Detector' + README: configs/ghm/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/losses/ghm_loss.py#L21 + Version: v2.0.0 + +Models: + - Name: retinanet_ghm_r50_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 303.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130-a437fda3.pth + + - Name: retinanet_ghm_r101_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 227.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130-c148ee8f.pth + + - Name: retinanet_ghm_x101_32x4d_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 196.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131-e4333bd0.pth + + - Name: retinanet_ghm_x101_64x4d_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 192.31 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131-dd381cef.pth diff --git a/object_detection/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py b/object_detection/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aaf6fc26d323a99a92b0ce266c7c7dc8a919d6f3 --- /dev/null +++ b/object_detection/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py b/object_detection/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..61b9751057f10f2173b8e7edde12cca53ebbd2d0 --- /dev/null +++ b/object_detection/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py @@ -0,0 +1,19 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + loss_cls=dict( + _delete_=True, + type='GHMC', + bins=30, + momentum=0.75, + use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict( + _delete_=True, + type='GHMR', + mu=0.02, + bins=10, + momentum=0.7, + loss_weight=10.0))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cd2e4cc34b4526ff32d193c30d5884b16c6adf5c --- /dev/null +++ b/object_detection/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b6107d8c31bd64dee3a70a1ea5e0167247af6b73 --- /dev/null +++ b/object_detection/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/gn+ws/README.md b/object_detection/configs/gn+ws/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5cde531c4de39ceb629e99b55f2cb0904370e752 --- /dev/null +++ b/object_detection/configs/gn+ws/README.md @@ -0,0 +1,58 @@ +# Weight Standardization + +## Abstract + + + +Batch Normalization (BN) has become an out-of-box technique to improve deep network training. However, its effectiveness is limited for micro-batch training, i.e., each GPU typically has only 1-2 images for training, which is inevitable for many computer vision tasks, e.g., object detection and semantic segmentation, constrained by memory consumption. To address this issue, we propose Weight Standardization (WS) and Batch-Channel Normalization (BCN) to bring two success factors of BN into micro-batch training: 1) the smoothing effects on the loss landscape and 2) the ability to avoid harmful elimination singularities along the training trajectory. WS standardizes the weights in convolutional layers to smooth the loss landscape by reducing the Lipschitz constants of the loss and the gradients; BCN combines batch and channel normalizations and leverages estimated statistics of the activations in convolutional layers to keep networks away from elimination singularities. We validate WS and BCN on comprehensive computer vision tasks, including image classification, object detection, instance segmentation, video recognition and semantic segmentation. All experimental results consistently show that WS and BCN improve micro-batch training significantly. Moreover, using WS and BCN with micro-batch training is even able to match or outperform the performances of BN with large-batch training. + + +
+ +
+ + + + +## Citation + + + +``` +@article{weightstandardization, + author = {Siyuan Qiao and Huiyu Wang and Chenxi Liu and Wei Shen and Alan Yuille}, + title = {Weight Standardization}, + journal = {arXiv preprint arXiv:1903.10520}, + year = {2019}, +} +``` + +## Results and Models + +Faster R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 1x | 5.9 | 11.7 | 39.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130_210936.log.json) | +| R-101-FPN | pytorch | GN+WS | 1x | 8.9 | 9.0 | 41.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205-a93b0d75.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205_232146.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 1x | 7.0 | 10.3 | 40.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203-839c5d9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203_220113.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 1x | 10.8 | 7.6 | 42.1 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212-27da1bc2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212_195302.log.json) | + +Mask R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:---------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 2x | 7.3 | 10.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226-16acb762.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226_062128.log.json) | +| R-101-FPN | pytorch | GN+WS | 2x | 10.3 | 8.6 | 42.0 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212-ea357cd9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212_213627.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 2x | 8.4 | 9.3 | 41.1 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216-649fdb6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216_201500.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 2x | 12.2 | 7.1 | 42.1 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319-33fb95b5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319_104101.log.json) | +| R-50-FPN | pytorch | GN+WS | 20-23-24e | 7.3 | - | 41.1 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213-487d1283.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213_035123.log.json) | +| R-101-FPN | pytorch | GN+WS | 20-23-24e | 10.3 | - | 43.1 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213-57b5a50f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213_130142.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 8.4 | - | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226-969bcb2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226_093732.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 12.2 | - | 42.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316-e6cd35ef.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316_013741.log.json) | + +Note: + +- GN+WS requires about 5% more memory than GN, and it is only 5% slower than GN. +- In the paper, a 20-23-24e lr schedule is used instead of 2x. +- The X-50-GN and X-101-GN pretrained models are also shared by the authors. diff --git a/object_detection/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py b/object_detection/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cd2cb2b6348a9555b8c80c3f1398d8989ef3f7a0 --- /dev/null +++ b/object_detection/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet101_gn_ws'))) diff --git a/object_detection/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py b/object_detection/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b326b88e7309ee217646b5550a23a6796ad5c0b --- /dev/null +++ b/object_detection/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet50_gn_ws')), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg))) diff --git a/object_detection/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py b/object_detection/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f64ae89178ed351dbe4be80318b9a1da385853c2 --- /dev/null +++ b/object_detection/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext101_32x4d_gn_ws'))) diff --git a/object_detection/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py b/object_detection/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..246851b9f2be4d0e0f129d20692d22acf194308a --- /dev/null +++ b/object_detection/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext50_32x4d_gn_ws'))) diff --git a/object_detection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py b/object_detection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a790d932152420f5be0a05b21ac122087d315398 --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py b/object_detection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a9fa6a2445020979a217ee3b648d49e5577d2357 --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet101_gn_ws'))) diff --git a/object_detection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py b/object_detection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..55168085cd085c241bfbb85a76bb230241378faa --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py b/object_detection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..63be60ff8c117402aa46811ef86ba16aebc76a45 --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,20 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet50_gn_ws')), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg), + mask_head=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/object_detection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cfa14c99543382328b2cb4ac7c2d0dbb2a562017 --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py b/object_detection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6498b03fb4fda52a995b5b76da8b02385697ebc1 --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,19 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext101_32x4d_gn_ws'))) diff --git a/object_detection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/object_detection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..79ce0adf1bf760c371bd1a1c3a9b028cef51c4b4 --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py b/object_detection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7fac3175e3a4e900f5051bd0385a6dd828cef9c7 --- /dev/null +++ b/object_detection/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,19 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext50_32x4d_gn_ws'))) diff --git a/object_detection/configs/gn+ws/metafile.yml b/object_detection/configs/gn+ws/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..bc89359cec36e124ff3f31b21c981968f2e21206 --- /dev/null +++ b/object_detection/configs/gn+ws/metafile.yml @@ -0,0 +1,263 @@ +Collections: + - Name: Weight Standardization + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + - Weight Standardization + Paper: + URL: https://arxiv.org/abs/1903.10520 + Title: 'Weight Standardization' + README: configs/gn+ws/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 5.9 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth + + - Name: faster_rcnn_r101_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 8.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205-a93b0d75.pth + + - Name: faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 97.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203-839c5d9d.pth + + - Name: faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 131.58 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212-27da1bc2.pth + + - Name: mask_rcnn_r50_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226-16acb762.pth + + - Name: mask_rcnn_r101_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212-ea357cd9.pth + + - Name: mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 8.4 + inference time (ms/im): + - value: 107.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216-649fdb6f.pth + + - Name: mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 12.2 + inference time (ms/im): + - value: 140.85 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319-33fb95b5.pth + + - Name: mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 7.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213-487d1283.pth + + - Name: mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 10.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213-57b5a50f.pth + + - Name: mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 8.4 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226-969bcb2c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 12.2 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316-e6cd35ef.pth diff --git a/object_detection/configs/gn/README.md b/object_detection/configs/gn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4a9d9b5e734686745f4b86a970406d87e5ce7fa9 --- /dev/null +++ b/object_detection/configs/gn/README.md @@ -0,0 +1,45 @@ +# Group Normalization + +## Abstract + + + +Batch Normalization (BN) is a milestone technique in the development of deep learning, enabling various networks to train. However, normalizing along the batch dimension introduces problems --- BN's error increases rapidly when the batch size becomes smaller, caused by inaccurate batch statistics estimation. This limits BN's usage for training larger models and transferring features to computer vision tasks including detection, segmentation, and video, which require small batches constrained by memory consumption. In this paper, we present Group Normalization (GN) as a simple alternative to BN. GN divides the channels into groups and computes within each group the mean and variance for normalization. GN's computation is independent of batch sizes, and its accuracy is stable in a wide range of batch sizes. On ResNet-50 trained in ImageNet, GN has 10.6% lower error than its BN counterpart when using a batch size of 2; when using typical batch sizes, GN is comparably good with BN and outperforms other normalization variants. Moreover, GN can be naturally transferred from pre-training to fine-tuning. GN can outperform its BN-based counterparts for object detection and segmentation in COCO, and for video classification in Kinetics, showing that GN can effectively replace the powerful BN in a variety of tasks. GN can be easily implemented by a few lines of code in modern libraries. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{wu2018group, + title={Group Normalization}, + author={Wu, Yuxin and He, Kaiming}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + year={2018} +} +``` + +## Results and Models + +| Backbone | model | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN (d) | Mask R-CNN | 2x | 7.1 | 11.0 | 40.2 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206_050355.log.json) | +| R-50-FPN (d) | Mask R-CNN | 3x | 7.1 | - | 40.5 | 36.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214-8b23b1e5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214_063512.log.json) | +| R-101-FPN (d) | Mask R-CNN | 2x | 9.9 | 9.0 | 41.9 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205-d96b1b50.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205_234402.log.json) | +| R-101-FPN (d) | Mask R-CNN | 3x | 9.9 | | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609-0df864f4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609.log.json) | +| R-50-FPN (c) | Mask R-CNN | 2x | 7.1 | 10.9 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207-20d3e849.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207_225832.log.json) | +| R-50-FPN (c) | Mask R-CNN | 3x | 7.1 | - | 40.1 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225-542aefbc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225_235135.log.json) | + +**Notes:** + +- (d) means pretrained model converted from Detectron, and (c) means the contributed model pretrained by [@thangvubk](https://github.com/thangvubk). +- The `3x` schedule is epoch [28, 34, 36]. +- **Memory, Train/Inf time is outdated.** diff --git a/object_detection/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py b/object_detection/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a505ba0e26246772c9d18874a5552831e2efe33f --- /dev/null +++ b/object_detection/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_gn'))) diff --git a/object_detection/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py b/object_detection/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..12a9d17e5592ade405605e3ffb2d4d2fa632d03e --- /dev/null +++ b/object_detection/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r101_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1de7d98e1034f7330552958cae5ef3ad402caed7 --- /dev/null +++ b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py @@ -0,0 +1,49 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_gn')), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f9177196cb91c6bbc6dd4383837819f053b334bb --- /dev/null +++ b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2f430fdab1a825211582b48b0eacab98b55c2167 --- /dev/null +++ b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://contrib/resnet50_gn')), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..66834f08ba398e7621aa8c5a3bfe12a646aecde2 --- /dev/null +++ b/object_detection/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/gn/metafile.yml b/object_detection/configs/gn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..4a1ecae09a7fd5b4ff51dcde677632ad10a2e0d7 --- /dev/null +++ b/object_detection/configs/gn/metafile.yml @@ -0,0 +1,162 @@ +Collections: + - Name: Group Normalization + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + Paper: + URL: https://arxiv.org/abs/1803.08494 + Title: 'Group Normalization' + README: configs/gn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_fpn_gn-all_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth + + - Name: mask_rcnn_r50_fpn_gn-all_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214-8b23b1e5.pth + + - Name: mask_rcnn_r101_fpn_gn-all_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205-d96b1b50.pth + + - Name: mask_rcnn_r101_fpn_gn-all_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609-0df864f4.pth + + - Name: mask_rcnn_r50_fpn_gn-all_contrib_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207-20d3e849.pth + + - Name: mask_rcnn_r50_fpn_gn-all_contrib_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225-542aefbc.pth diff --git a/object_detection/configs/grid_rcnn/README.md b/object_detection/configs/grid_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b53cc9ea9627d100642bd26f2835a4aa7b18efdd --- /dev/null +++ b/object_detection/configs/grid_rcnn/README.md @@ -0,0 +1,51 @@ +# Grid R-CNN + +## Abstract + + + +This paper proposes a novel object detection framework named Grid R-CNN, which adopts a grid guided localization mechanism for accurate object detection. Different from the traditional regression based methods, the Grid R-CNN captures the spatial information explicitly and enjoys the position sensitive property of fully convolutional architecture. Instead of using only two independent points, we design a multi-point supervision formulation to encode more clues in order to reduce the impact of inaccurate prediction of specific points. To take the full advantage of the correlation of points in a grid, we propose a two-stage information fusion strategy to fuse feature maps of neighbor grid points. The grid guided localization approach is easy to be extended to different state-of-the-art detection frameworks. Grid R-CNN leads to high quality object localization, and experiments demonstrate that it achieves a 4.1% AP gain at IoU=0.8 and a 10.0% AP gain at IoU=0.9 on COCO benchmark compared to Faster R-CNN with Res50 backbone and FPN architecture. + +Grid R-CNN is a well-performed objection detection framework. It transforms the traditional box offset regression problem into a grid point estimation problem. With the guidance of the grid points, it can obtain high-quality localization results. However, the speed of Grid R-CNN is not so satisfactory. In this technical report we present Grid R-CNN Plus, a better and faster version of Grid R-CNN. We have made several updates that significantly speed up the framework and simultaneously improve the accuracy. On COCO dataset, the Res50-FPN based Grid R-CNN Plus detector achieves an mAP of 40.4%, outperforming the baseline on the same model by 3.0 points with similar inference time. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{lu2019grid, + title={Grid r-cnn}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} + +@article{lu2019grid, + title={Grid R-CNN Plus: Faster and Better}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + journal={arXiv preprint arXiv:1906.05688}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 2x | 5.1 | 15.0 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130_221140.log.json) | +| R-101 | 2x | 7.0 | 12.6 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309-d6eca030.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309_164224.log.json) | +| X-101-32x4d | 2x | 8.3 | 10.8 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130-d8f0e3ff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130_215413.log.json) | +| X-101-64x4d | 2x | 11.3 | 7.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204-ec76a754.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204_080641.log.json) | + +**Notes:** + +- All models are trained with 8 GPUs instead of 32 GPUs in the original paper. +- The warming up lasts for 1 epoch and `2x` here indicates 25 epochs. diff --git a/object_detection/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py b/object_detection/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1bb5889bc0ce4013ae3e6bf87d04f94417e84ff5 --- /dev/null +++ b/object_detection/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py b/object_detection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4aa00ece55280697fc67bd727077a8c9a58cfa44 --- /dev/null +++ b/object_detection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = ['grid_rcnn_r50_fpn_gn-head_2x_coco.py'] +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8, 11]) +checkpoint_config = dict(interval=1) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py b/object_detection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..df63cd5d82a3c622ffad6d044e80ebe5f7c8c122 --- /dev/null +++ b/object_detection/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py @@ -0,0 +1,131 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='GridRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='GridRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + with_reg=False, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False), + grid_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + grid_head=dict( + type='GridHead', + grid_points=9, + num_convs=8, + in_channels=256, + point_feat_channels=64, + norm_cfg=dict(type='GN', num_groups=36), + loss_grid=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=15))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_radius=1, + pos_weight=-1, + max_num_grid=192, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.03, + nms=dict(type='nms', iou_threshold=0.3), + max_per_img=100))) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/object_detection/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py b/object_detection/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3bc8516e223e3f74b003b5566876706ee8398fb1 --- /dev/null +++ b/object_detection/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,24 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/object_detection/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py b/object_detection/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c78f8f6501130a3e4f76269030b92f7f9e29fe07 --- /dev/null +++ b/object_detection/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/grid_rcnn/metafile.yml b/object_detection/configs/grid_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..d1aa85137df3c2d03ad98a5ea6f2990b4c78e15d --- /dev/null +++ b/object_detection/configs/grid_rcnn/metafile.yml @@ -0,0 +1,101 @@ +Collections: + - Name: Grid R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - Dilated Convolution + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1906.05688 + Title: 'Grid R-CNN' + README: configs/grid_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/grid_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: grid_rcnn_r50_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth + + - Name: grid_rcnn_r101_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 79.37 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309-d6eca030.pth + + - Name: grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 8.3 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130-d8f0e3ff.pth + + - Name: grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 11.3 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204-ec76a754.pth diff --git a/object_detection/configs/groie/README.md b/object_detection/configs/groie/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5a45be734aaa48a1c34db6165475853e62e5611b --- /dev/null +++ b/object_detection/configs/groie/README.md @@ -0,0 +1,77 @@ +# A novel Region of Interest Extraction Layer for Instance Segmentation + +## Abstract + + + +Given the wide diffusion of deep neural network architectures for computer vision tasks, several new applications are nowadays more and more feasible. Among them, a particular attention has been recently given to instance segmentation, by exploiting the results achievable by two-stage networks (such as Mask R-CNN or Faster R-CNN), derived from R-CNN. In these complex architectures, a crucial role is played by the Region of Interest (RoI) extraction layer, devoted to extracting a coherent subset of features from a single Feature Pyramid Network (FPN) layer attached on top of a backbone. +This paper is motivated by the need to overcome the limitations of existing RoI extractors which select only one (the best) layer from FPN. Our intuition is that all the layers of FPN retain useful information. Therefore, the proposed layer (called Generic RoI Extractor - GRoIE) introduces non-local building blocks and attention mechanisms to boost the performance. +A comprehensive ablation study at component level is conducted to find the best set of algorithms and parameters for the GRoIE layer. Moreover, GRoIE can be integrated seamlessly with every two-stage architecture for both object detection and instance segmentation tasks. Therefore, the improvements brought about by the use of GRoIE in different state-of-the-art architectures are also evaluated. The proposed layer leads up to gain a 1.1% AP improvement on bounding box detection and 1.7% AP improvement on instance segmentation. + + +
+ +
+ + + + +## Introduction + +By Leonardo Rossi, Akbar Karimi and Andrea Prati from +[IMPLab](http://implab.ce.unipr.it/). + +We provide configs to reproduce the results in the paper for +"*A novel Region of Interest Extraction Layer for Instance Segmentation*" +on COCO object detection. + + + + +This paper is motivated by the need to overcome to the limitations of existing +RoI extractors which select only one (the best) layer from FPN. + +Our intuition is that all the layers of FPN retain useful information. + +Therefore, the proposed layer (called Generic RoI Extractor - **GRoIE**) +introduces non-local building blocks and attention mechanisms to boost the +performance. + +## Results and models + +The results on COCO 2017 minival (5k images) are shown in the below table. + +### Application of GRoIE to different architectures + +| Backbone | Method | Lr schd | box AP | mask AP | Config | Download| +| :-------: | :--------------: | :-----: | :----: | :-----: | :-------:| :--------:| +| R-50-FPN | Faster Original | 1x | 37.4 | | [config](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | + GRoIE | 1x | 38.3 | | [config](./faster_rcnn_r50_fpn_groie_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | Grid R-CNN | 1x | 39.1 | | [config](./grid_rcnn_r50_fpn_gn-head_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059-64f00ee8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059.log.json) | +| R-50-FPN | + GRoIE | 1x | | | [config](./grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py)|| +| R-50-FPN | Mask R-CNN | 1x | 38.2 | 34.7 | [config](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN | + GRoIE | 1x | 39.0 | 36.0 | [config](./mask_rcnn_r50_fpn_groie_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715-50d90c74.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | GC-Net | 1x | 40.7 | 36.5 | [config](../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-50-FPN | + GRoIE | 1x | 41.0 | 37.8 | [config](./mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | +| R-101-FPN | GC-Net | 1x | 42.2 | 37.8 | [config](../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| R-101-FPN | + GRoIE | 1x | 42.6 | 38.7 | [config](./mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507-8daae01c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507.log.json) | + +## Citation + +If you use this work or benchmark in your research, please cite this project. + +```latex +@inproceedings{rossi2021novel, + title={A novel region of interest extraction layer for instance segmentation}, + author={Rossi, Leonardo and Karimi, Akbar and Prati, Andrea}, + booktitle={2020 25th International Conference on Pattern Recognition (ICPR)}, + pages={2203--2209}, + year={2021}, + organization={IEEE} +} +``` + +## Contact + +The implementation of GRoIE is currently maintained by +[Leonardo Rossi](https://github.com/hachreak/). diff --git a/object_detection/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py b/object_detection/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0fc528bfd49bfc9a262692db78a5f94b46c285af --- /dev/null +++ b/object_detection/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,25 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/object_detection/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py b/object_detection/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8e4b4ab23513a97adf4471ab3b33ca8abdb6dbe5 --- /dev/null +++ b/object_detection/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + grid_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/object_detection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/object_detection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8b83722197c69a51907f43bcb05883deedc37f0c --- /dev/null +++ b/object_detection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/object_detection/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py b/object_detection/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..81dfb4873bdb587626200a3007dc4d57a92c0fd9 --- /dev/null +++ b/object_detection/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/object_detection/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/object_detection/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..852c5ca7c5c4ba04f6a5f7dd6dbaf6b2c357a2fa --- /dev/null +++ b/object_detection/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/object_detection/configs/groie/metafile.yml b/object_detection/configs/groie/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..269cb39367e6345ae5362cfe90f7f3b9a0855170 --- /dev/null +++ b/object_detection/configs/groie/metafile.yml @@ -0,0 +1,93 @@ +Collections: + - Name: GRoIE + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Generic RoI Extractor + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/2004.13665 + Title: 'A novel Region of Interest Extraction Layer for Instance Segmentation' + README: configs/groie/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/roi_heads/roi_extractors/groie.py#L15 + Version: v2.1.0 + +Models: + - Name: faster_rcnn_r50_fpn_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth + + - Name: grid_rcnn_r50_fpn_gn-head_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + + - Name: mask_rcnn_r50_fpn_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715-50d90c74.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507-8daae01c.pth diff --git a/object_detection/configs/guided_anchoring/README.md b/object_detection/configs/guided_anchoring/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d99ced9eb83ec5ce416d1fb331a974c0c11ce173 --- /dev/null +++ b/object_detection/configs/guided_anchoring/README.md @@ -0,0 +1,63 @@ +# Region Proposal by Guided Anchoring + +## Abstract + + + +Region anchors are the cornerstone of modern object detection techniques. State-of-the-art detectors mostly rely on a dense anchoring scheme, where anchors are sampled uniformly over the spatial domain with a predefined set of scales and aspect ratios. In this paper, we revisit this foundational stage. Our study shows that it can be done much more effectively and efficiently. Specifically, we present an alternative scheme, named Guided Anchoring, which leverages semantic features to guide the anchoring. The proposed method jointly predicts the locations where the center of objects of interest are likely to exist as well as the scales and aspect ratios at different locations. On top of predicted anchor shapes, we mitigate the feature inconsistency with a feature adaption module. We also study the use of high-quality proposals to improve detection performance. The anchoring scheme can be seamlessly integrated into proposal methods and detectors. With Guided Anchoring, we achieve 9.1% higher recall on MS COCO with 90% fewer anchors than the RPN baseline. We also adopt Guided Anchoring in Fast R-CNN, Faster R-CNN and RetinaNet, respectively improving the detection mAP by 2.2%, 2.7% and 1.2%. + + +
+ +
+ + + + +## Citation + + + +We provide config files to reproduce the results in the CVPR 2019 paper for [Region Proposal by Guided Anchoring](https://arxiv.org/abs/1901.03278). + +```latex +@inproceedings{wang2019region, + title={Region Proposal by Guided Anchoring}, + author={Jiaqi Wang and Kai Chen and Shuo Yang and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val). + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | AR 1000 | Config | Download | +| :----: | :-------------: | :-----: | :-----: | :------: | :------------: | :-----: | :------: | :--------: | +| GA-RPN | R-50-FPN | caffe | 1x | 5.3 | 15.8 | 68.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531-899008a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531_011819.log.json) | +| GA-RPN | R-101-FPN | caffe | 1x | 7.3 | 13.0 | 69.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531-ca9ba8fb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531_011812.log.json) | +| GA-RPN | X-101-32x4d-FPN | pytorch | 1x | 8.5 | 10.0 | 70.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220-c28d1b18.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220_221326.log.json) | +| GA-RPN | X-101-64x4d-FPN | pytorch | 1x | 7.1 | 7.5 | 71.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225-3c6e1aa2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225_152704.log.json) | + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------------: | :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| GA-Faster RCNN | R-50-FPN | caffe | 1x | 5.5 | | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718.log.json) | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | 7.5 | | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_bbox_mAP-0.415_20200505_115528-fb82e499.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_20200505_115528.log.json) | +| GA-Faster RCNN | X-101-32x4d-FPN | pytorch | 1x | 8.7 | 9.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215-1ded9da3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215_184547.log.json) | +| GA-Faster RCNN | X-101-64x4d-FPN | pytorch | 1x | 11.8 | 7.3 | 43.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215-0fa7bde7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215_104455.log.json) | +| GA-RetinaNet | R-50-FPN | caffe | 1x | 3.5 | 16.8 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020_225450.log.json) | +| GA-RetinaNet | R-101-FPN | caffe | 1x | 5.5 | 12.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531-6266453c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531_012847.log.json) | +| GA-RetinaNet | X-101-32x4d-FPN | pytorch | 1x | 6.9 | 10.6 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219-40c56caa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219_223025.log.json) | +| GA-RetinaNet | X-101-64x4d-FPN | pytorch | 1x | 9.9 | 7.7 | 41.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226-ef9f7f1f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226_221123.log.json) | + +- In the Guided Anchoring paper, `score_thr` is set to 0.001 in Fast/Faster RCNN and 0.05 in RetinaNet for both baselines and Guided Anchoring. + +- Performance on COCO test-dev benchmark are shown as follows. + +| Method | Backbone | Style | Lr schd | Aug Train | Score thr | AP | AP_50 | AP_75 | AP_small | AP_medium | AP_large | Download | +| :------------: | :-------: | :---: | :-----: | :-------: | :-------: | :---: | :---: | :---: | :------: | :-------: | :------: | :------: | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.001 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 2x | T | 0.05 | | | | | | | | diff --git a/object_detection/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc203c6ed2b31b4672ae4525c65afbcdc6579ed --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(num=256))), + test_cfg=dict(rcnn=dict(score_thr=1e-3))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a40e7c6fd7e2355081e7a31b40a893314e4eb303 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ga_faster_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b0add92c398b62aa8fd2141f595cf0941f55d421 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(nms_post=1000, max_per_img=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(nms_post=1000, max_per_img=300), rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e3d8238956f4d4874de1fde662a1a3ded1918189 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(nms_post=1000, max_per_img=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(nms_post=1000, max_per_img=300), rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f1dda9495c2595b2743e3056abf65a1795ea5971 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fb9e2afc9cff8c8c94b2ace544785a026a61f45e --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b1cccd0dd15123c35044367001e465b691f6f24 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ga_retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py b/object_detection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..260895b401106c91a6133a054260ab94e92c75c5 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py @@ -0,0 +1,169 @@ +_base_ = '../_base_/default_runtime.py' + +# model settings +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5), + bbox_head=dict( + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + center_ratio=0.2, + ignore_ratio=0.5, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + keep_ratio=True, + multiscale_mode='range'), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[16, 22]) +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..33512011abb612ff5c762e75ee4492b382902fa4 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..769472352d06a8f2c30d73ae1f57c393f77adfa2 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c5eb34f5fa2d1061c7eb4a3adfb8b7e1ede51b55 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5c69a6f848f278b0b81082a8f38b01e154db0e84 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..039703ec6635f6665be16919baf157511c7b3431 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = './ga_rpn_r50_caffe_fpn_1x_coco.py' +# model settings +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7830894af1b5824d9ff442f6aa90f6e68c9ef29c --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = '../rpn/rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)), + test_cfg=dict(rpn=dict(nms_post=1000))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..27ab3e733bda1fb1c7c50cbd0f26597650b4c2e7 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = '../rpn/rpn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)), + test_cfg=dict(rpn=dict(nms_post=1000))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cccc985f9eb2c3e9c06f91af6107ec909aefd9d1 --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e134d23ad428eaca19bc8069325a9545683cd8e --- /dev/null +++ b/object_detection/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/guided_anchoring/metafile.yml b/object_detection/configs/guided_anchoring/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..3019d4a1d47d96b527a24dd4e4ccfa26afd2add2 --- /dev/null +++ b/object_detection/configs/guided_anchoring/metafile.yml @@ -0,0 +1,246 @@ +Collections: + - Name: Guided Anchoring + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Guided Anchoring + - ResNet + Paper: + URL: https://arxiv.org/abs/1901.03278 + Title: 'Region Proposal by Guided Anchoring' + README: configs/guided_anchoring/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/dense_heads/ga_retina_head.py#L10 + Version: v2.0.0 + +Models: + - Name: ga_rpn_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.3 + inference time (ms/im): + - value: 63.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 68.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531-899008a6.pth + + - Name: ga_rpn_r101_caffe_fpn_1x_coco.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 76.92 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 69.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531-ca9ba8fb.pth + + - Name: ga_rpn_x101_32x4d_fpn_1x_coco.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py.py + Metadata: + Training Memory (GB): 8.5 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 70.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220-c28d1b18.pth + + - Name: ga_rpn_x101_64x4d_fpn_1x_coco.py.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py.py.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 133.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 70.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225-3c6e1aa2.pth + + - Name: ga_faster_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth + + - Name: ga_faster_r101_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_bbox_mAP-0.415_20200505_115528-fb82e499.pth + + - Name: ga_faster_x101_32x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.7 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215-1ded9da3.pth + + - Name: ga_faster_x101_64x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 11.8 + inference time (ms/im): + - value: 136.99 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215-0fa7bde7.pth + + - Name: ga_retinanet_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.5 + inference time (ms/im): + - value: 59.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth + + - Name: ga_retinanet_r101_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531-6266453c.pth + + - Name: ga_retinanet_x101_32x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.9 + inference time (ms/im): + - value: 94.34 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219-40c56caa.pth + + - Name: ga_retinanet_x101_64x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226-ef9f7f1f.pth diff --git a/object_detection/configs/hrnet/README.md b/object_detection/configs/hrnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..faeb673fa3f221beeccd1dbdf839f8712bcb36e1 --- /dev/null +++ b/object_detection/configs/hrnet/README.md @@ -0,0 +1,105 @@ +# High-resolution networks (HRNets) for object detection + +## Abstract + + + +This is an official pytorch implementation of Deep High-Resolution Representation Learning for Human Pose Estimation. In this work, we are interested in the human pose estimation problem with a focus on learning reliable high-resolution representations. Most existing methods recover high-resolution representations from low-resolution representations produced by a high-to-low resolution network. Instead, our proposed network maintains high-resolution representations through the whole process. We start from a high-resolution subnetwork as the first stage, gradually add high-to-low resolution subnetworks one by one to form more stages, and connect the mutli-resolution subnetworks in parallel. We conduct repeated multi-scale fusions such that each of the high-to-low resolution representations receives information from other parallel representations over and over, leading to rich high-resolution representations. As a result, the predicted keypoint heatmap is potentially more accurate and spatially more precise. We empirically demonstrate the effectiveness of our network through the superior pose estimation results over two benchmark datasets: the COCO keypoint detection dataset and the MPII Human Pose dataset. + +High-resolution representation learning plays an essential role in many vision problems, e.g., pose estimation and semantic segmentation. The high-resolution network (HRNet), recently developed for human pose estimation, maintains high-resolution representations through the whole process by connecting high-to-low resolution convolutions in parallel and produces strong high-resolution representations by repeatedly conducting fusions across parallel convolutions. +In this paper, we conduct a further study on high-resolution representations by introducing a simple yet effective modification and apply it to a wide range of vision tasks. We augment the high-resolution representation by aggregating the (upsampled) representations from all the parallel convolutions rather than only the representation from the high-resolution convolution as done in HRNet. This simple modification leads to stronger representations, evidenced by superior results. We show top results in semantic segmentation on Cityscapes, LIP, and PASCAL Context, and facial landmark detection on AFLW, COFW, 300W, and WFLW. In addition, we build a multi-level representation from the high-resolution representation and apply it to the Faster R-CNN object detection framework and the extended frameworks. The proposed approach achieves superior results to existing single-model networks on COCO object detection. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{SunXLW19, + title={Deep High-Resolution Representation Learning for Human Pose Estimation}, + author={Ke Sun and Bin Xiao and Dong Liu and Jingdong Wang}, + booktitle={CVPR}, + year={2019} +} + +@article{SunZJCXLMWLW19, + title={High-Resolution Representations for Labeling Pixels and Regions}, + author={Ke Sun and Yang Zhao and Borui Jiang and Tianheng Cheng and Bin Xiao + and Dong Liu and Yadong Mu and Xinggang Wang and Wenyu Liu and Jingdong Wang}, + journal = {CoRR}, + volume = {abs/1904.04514}, + year={2019} +} +``` + +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:| :--------:| +| HRNetV2p-W18 | pytorch | 1x | 6.6 | 13.4 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130_211246.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 6.6 | - | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731-a4ec0611.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.0 | 12.4 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130-6e286425.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130_204442.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.0 | - | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927-976a9c15.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.4 | 10.5 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210-95c1f5ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210_125315.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.4 | - | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033-0f236ef4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 1x | 7.0 | 11.7 | 37.7 | 34.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205-1c3d78ed.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205_232523.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 7.0 | - | 39.8 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212-b3c825b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212_134222.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.4 | 11.3 | 41.2 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207-b29f616e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207_055017.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.4 | - | 42.5 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213-45b75b4d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213_150518.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.9 | | 42.1 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646-66738b35.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.9 | | 42.8 | 38.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732-aed5e4ab.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------: | :--------: | +| HRNetV2p-W18 | pytorch | 20e | 7.0 | 11.0 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210-434be9d7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210_105632.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 9.4 | 11.0 | 43.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208-928455a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208_160511.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 10.8 | | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112-75e47b04.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 8.5 | 8.5 |41.6 |36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210-b543cd2b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210_093149.log.json) | +| HRNetV2p-W32 | pytorch | 20e | | 8.3 |44.3 |38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043-39d9cf7b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 12.5 | |45.1 |39.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922-969c4610.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922.log.json) | + +### Hybrid Task Cascade (HTC) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 10.8 | 4.7 | 42.8 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210-b266988c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210_182735.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 13.1 | 4.9 | 45.4 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207-7639fa12.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207_193153.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 14.6 | | 46.4 | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411-417c4d5b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411.log.json) | + +### FCOS + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:------:|:------:|:------:|:------:|:--------:| +|HRNetV2p-W18| pytorch | Y | N | 1x | 13.0 | 12.9 | 35.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710-4ad151de.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710.log.json) | +|HRNetV2p-W18| pytorch | Y | N | 2x | 13.0 | - | 38.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110-5c575fa5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 1x | 17.5 | 12.9 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730-cb8055c0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 2x | 17.5 | - | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133-77b6b9bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133.log.json) | +|HRNetV2p-W18| pytorch | Y | Y | 2x | 13.0 | 12.9 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651-441e9d9f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651.log.json) | +|HRNetV2p-W32| pytorch | Y | Y | 2x | 17.5 | 12.4 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846-b6f2b49f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846.log.json) | +|HRNetV2p-W48| pytorch | Y | Y | 2x | 20.3 | 10.8 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752-f22d2ce5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752.log.json) | + +**Note:** + +- The `28e` schedule in HTC indicates decreasing the lr at 24 and 27 epochs, with a total of 28 epochs. +- HRNetV2 ImageNet pretrained models are in [HRNets for Image Classification](https://github.com/HRNet/HRNet-Image-Classification). diff --git a/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py b/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..839cf3eb62590368ab0e99efdadcbdd4ad81eeb5 --- /dev/null +++ b/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py b/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9942602762d8eba5d4c3ad20f2190fdb9f1df906 --- /dev/null +++ b/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,40 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py b/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..10d5e83c67ebfb7f3017abc164d9559681185268 --- /dev/null +++ b/object_detection/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,12 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py b/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebd5e202d955e87870b3cf8efd94683668dd5929 --- /dev/null +++ b/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py b/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e7f89a9edae81d02a2229229b1c66cf50a9282e0 --- /dev/null +++ b/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,40 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py b/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..265e8d636f53f448f59372074c9bbe590cb26d9a --- /dev/null +++ b/object_detection/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,12 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1df2c3db1c00a6c0c34f96bc71cf35bfc0e0fbe6 --- /dev/null +++ b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a4b987a19ae32453d524fc2f7a4fb6b6b87f1f32 --- /dev/null +++ b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './faster_rcnn_hrnetv2p_w18_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..be058099a4c59b06ec5598ea25d194163e45601a --- /dev/null +++ b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..63c8717182f2284ff1062be31bae43b4360c6887 --- /dev/null +++ b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..886a7c90a453e684b3c0646b2eb3dea903671358 --- /dev/null +++ b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..585cc2c332fd88a9f0164b14084d45d7a3783b11 --- /dev/null +++ b/object_detection/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py b/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fd662bd10e3eb84fccbda080d9c902084f2fb490 --- /dev/null +++ b/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py b/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..34975959f27f0ef8b985ab7d2857c7f2d70e47ae --- /dev/null +++ b/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py b/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..37bfdae98f177914cbaa99d5b117c7928b6f84dd --- /dev/null +++ b/object_detection/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py b/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..10617f24c46f8dee164f06babecb00ae5d289466 --- /dev/null +++ b/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py @@ -0,0 +1,70 @@ +_base_ = '../fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256, + stride=2, + num_outs=5)) +img_norm_cfg = dict( + mean=[103.53, 116.28, 123.675], std=[57.375, 57.12, 58.395], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py b/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7b3813071c7591caa72412e5622e4101f7c05920 --- /dev/null +++ b/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py b/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..482f88729ff6c08e482a5ca5c6d48b75f14f7ca8 --- /dev/null +++ b/object_detection/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +img_norm_cfg = dict( + mean=[103.53, 116.28, 123.675], std=[57.375, 57.12, 58.395], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py b/object_detection/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0ae9dbe3aca8d9d6e0af785dd60131909f420a89 --- /dev/null +++ b/object_detection/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,11 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/object_detection/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py b/object_detection/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3c2eb1dd4e08830d0e57ecfe321f0353c8bf6cb1 --- /dev/null +++ b/object_detection/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/object_detection/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py b/object_detection/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..545cb83eaca50f9d5de1fa6b3f3e569faab7d5f2 --- /dev/null +++ b/object_detection/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,37 @@ +_base_ = '../htc/htc_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/object_detection/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py b/object_detection/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..94bff1bc01c09a98579f469dcac19df27cfc60b9 --- /dev/null +++ b/object_detection/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/object_detection/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py b/object_detection/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7067e8b602efb4f61549d376ec393e89deee8c3e --- /dev/null +++ b/object_detection/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_hrnetv2p_w40_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +runner = dict(type='EpochBasedRunner', max_epochs=28) diff --git a/object_detection/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py b/object_detection/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..815f2857f99791232664ecc9e82ea860fdcaa268 --- /dev/null +++ b/object_detection/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = '../htc/htc_x101_64x4d_fpn_16x1_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +runner = dict(type='EpochBasedRunner', max_epochs=28) diff --git a/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cb12200edb5fe0a31b0cba8966e858ad06024b7c --- /dev/null +++ b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ca62682a3b2d328cc9a8fd08887bcc1bac53104d --- /dev/null +++ b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d5f0eb56b7e97bc764b98a2b88a277a69633caa6 --- /dev/null +++ b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..63d5d139e7b56843f5dcc85bda48945d56cfc49e --- /dev/null +++ b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5a76f4b056367f0cc69b5fc601ae5cdb1ac98cf8 --- /dev/null +++ b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3a2a510689308e556af803968a641dcf2594fe77 --- /dev/null +++ b/object_detection/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/hrnet/metafile.yml b/object_detection/configs/hrnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..37703aaa5940de96c9f6c825f5af2e278e9ac429 --- /dev/null +++ b/object_detection/configs/hrnet/metafile.yml @@ -0,0 +1,604 @@ +Collections: + - Name: HRNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_hrnetv2p_w18_1x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py + Metadata: + Training Memory (GB): 6.6 + inference time (ms/im): + - value: 74.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth + + - Name: faster_rcnn_hrnetv2p_w18_2x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py + Metadata: + Training Memory (GB): 6.6 + inference time (ms/im): + - value: 74.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731-a4ec0611.pth + + - Name: faster_rcnn_hrnetv2p_w32_1x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130-6e286425.pth + + - Name: faster_rcnn_hrnetv2p_w32_2x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927-976a9c15.pth + + - Name: faster_rcnn_hrnetv2p_w40_1x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py + Metadata: + Training Memory (GB): 10.4 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210-95c1f5ce.pth + + - Name: faster_rcnn_hrnetv2p_w40_2x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py + Metadata: + Training Memory (GB): 10.4 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033-0f236ef4.pth + + - Name: mask_rcnn_hrnetv2p_w18_1x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205-1c3d78ed.pth + + - Name: mask_rcnn_hrnetv2p_w18_2x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212-b3c825b1.pth + + - Name: mask_rcnn_hrnetv2p_w32_1x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207-b29f616e.pth + + - Name: mask_rcnn_hrnetv2p_w32_2x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213-45b75b4d.pth + + - Name: mask_rcnn_hrnetv2p_w40_1x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py + Metadata: + Training Memory (GB): 10.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646-66738b35.pth + + - Name: mask_rcnn_hrnetv2p_w40_2x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py + Metadata: + Training Memory (GB): 10.9 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732-aed5e4ab.pth + + - Name: cascade_rcnn_hrnetv2p_w18_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210-434be9d7.pth + + - Name: cascade_rcnn_hrnetv2p_w32_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208-928455a4.pth + + - Name: cascade_rcnn_hrnetv2p_w40_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 10.8 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112-75e47b04.pth + + - Name: cascade_mask_rcnn_hrnetv2p_w18_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 8.5 + inference time (ms/im): + - value: 117.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210-b543cd2b.pth + + - Name: cascade_mask_rcnn_hrnetv2p_w32_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py + Metadata: + inference time (ms/im): + - value: 120.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043-39d9cf7b.pth + + - Name: cascade_mask_rcnn_hrnetv2p_w40_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 12.5 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922-969c4610.pth + + - Name: htc_hrnetv2p_w18_20e_coco + In Collection: HRNet + Config: configs/hrnet/htc_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 212.77 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210-b266988c.pth + + - Name: htc_hrnetv2p_w32_20e_coco + In Collection: HRNet + Config: configs/hrnet/htc_hrnetv2p_w32_20e_coco.py + Metadata: + Training Memory (GB): 13.1 + inference time (ms/im): + - value: 204.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207-7639fa12.pth + + - Name: htc_hrnetv2p_w40_20e_coco + In Collection: HRNet + Config: configs/hrnet/htc_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 14.6 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411-417c4d5b.pth + + - Name: fcos_hrnetv2p_w18_gn-head_4x4_1x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710-4ad151de.pth + + - Name: fcos_hrnetv2p_w18_gn-head_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110-5c575fa5.pth + + - Name: fcos_hrnetv2p_w32_gn-head_4x4_1x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730-cb8055c0.pth + + - Name: fcos_hrnetv2p_w32_gn-head_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133-77b6b9bb.pth + + - Name: fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651-441e9d9f.pth + + - Name: fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846-b6f2b49f.pth + + - Name: fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 20.3 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752-f22d2ce5.pth diff --git a/object_detection/configs/htc/README.md b/object_detection/configs/htc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d722a80ca0a177ce552f7ec9fbe9574c5628e57a --- /dev/null +++ b/object_detection/configs/htc/README.md @@ -0,0 +1,71 @@ +# Hybrid Task Cascade for Instance Segmentation + +## Abstract + + + +Cascade is a classic yet powerful architecture that has boosted performance on various tasks. However, how to introduce cascade to instance segmentation remains an open question. A simple combination of Cascade R-CNN and Mask R-CNN only brings limited gain. In exploring a more effective approach, we find that the key to a successful instance segmentation cascade is to fully leverage the reciprocal relationship between detection and segmentation. In this work, we propose a new framework, Hybrid Task Cascade (HTC), which differs in two important aspects: (1) instead of performing cascaded refinement on these two tasks separately, it interweaves them for a joint multi-stage processing; (2) it adopts a fully convolutional branch to provide spatial context, which can help distinguishing hard foreground from cluttered background. Overall, this framework can learn more discriminative features progressively while integrating complementary features together in each stage. Without bells and whistles, a single HTC obtains 38.4 and 1.5 improvement over a strong Cascade Mask R-CNN baseline on MSCOCO dataset. Moreover, our overall system achieves 48.6 mask AP on the test-challenge split, ranking 1st in the COCO 2018 Challenge Object Detection Task. + + +
+ +
+ + + + +## Citation + + + +We provide config files to reproduce the results in the CVPR 2019 paper for [Hybrid Task Cascade](https://arxiv.org/abs/1901.07518). + +```latex +@inproceedings{chen2019hybrid, + title={Hybrid task cascade for instance segmentation}, + author={Chen, Kai and Pang, Jiangmiao and Wang, Jiaqi and Xiong, Yu and Li, Xiaoxiao and Sun, Shuyang and Feng, Wansen and Liu, Ziwei and Shi, Jianping and Ouyang, Wanli and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Dataset + +HTC requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 8.2 | 5.8 | 42.3 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317-7332cf16.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317_070435.log.json) | +| R-50-FPN | pytorch | 20e | 8.2 | - | 43.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319-fe28c577.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319_070313.log.json) | +| R-101-FPN | pytorch | 20e | 10.2 | 5.5 | 44.8 | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317-9b41b48f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317_153107.log.json) | +| X-101-32x4d-FPN | pytorch |20e| 11.4 | 5.0 | 46.1 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318-de97ae01.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318_034519.log.json) | +| X-101-64x4d-FPN | pytorch |20e| 14.5 | 4.4 | 47.0 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318-b181fd7a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318_081711.log.json) | + +- In the HTC paper and COCO 2018 Challenge, `score_thr` is set to 0.001 for both baselines and HTC. +- We use 8 GPUs with 2 images/GPU for R-50 and R-101 models, and 16 GPUs with 1 image/GPU for X-101 models. + If you would like to train X-101 HTC with 8 GPUs, you need to change the lr from 0.02 to 0.01. + +We also provide a powerful HTC with DCN and multi-scale training model. No testing augmentation is used. + +| Backbone | Style | DCN | training scales | Lr schd | box AP | mask AP | Config | Download | +|:----------------:|:-------:|:-----:|:---------------:|:-------:|:------:|:-------:|:------:|:--------:| +| X-101-64x4d-FPN | pytorch | c3-c5 | 400~1400 | 20e | 50.4 | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312-946fd751.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312_203410.log.json) | diff --git a/object_detection/configs/htc/htc_r101_fpn_20e_coco.py b/object_detection/configs/htc/htc_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b42297bf14723f4068ebddaffdeb84a29d2fee44 --- /dev/null +++ b/object_detection/configs/htc/htc_r101_fpn_20e_coco.py @@ -0,0 +1,9 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/htc/htc_r50_fpn_1x_coco.py b/object_detection/configs/htc/htc_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e8e18a0d6ea2f7572add5af1b2bfd1480fd70af --- /dev/null +++ b/object_detection/configs/htc/htc_r50_fpn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = './htc_without_semantic_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + semantic_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[8]), + semantic_head=dict( + type='FusedSemanticHead', + num_ins=5, + fusion_level=1, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + loss_seg=dict( + type='CrossEntropyLoss', ignore_index=255, loss_weight=0.2)))) +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict( + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/htc/htc_r50_fpn_20e_coco.py b/object_detection/configs/htc/htc_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7d2e0116e7d3533d3d6e9567f310a0d1d86cdb42 --- /dev/null +++ b/object_detection/configs/htc/htc_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py b/object_detection/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..565104f4aa984eb0685548e3bbdf2497cf72b5e9 --- /dev/null +++ b/object_detection/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py @@ -0,0 +1,236 @@ +_base_ = [ + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='HybridTaskCascade', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='HybridTaskCascadeRoIHead', + interleaved=True, + mask_info_flow=True, + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=[ + dict( + type='HTCMaskHead', + with_conv_res=False, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)) + ]), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.001, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + val=dict(pipeline=test_pipeline), test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py b/object_detection/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0c834f28357a506cdf520b2c23cfe396b5c68709 --- /dev/null +++ b/object_detection/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,19 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py b/object_detection/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8b0d962b2e920121f6c31df406e8fb6159cbe9f0 --- /dev/null +++ b/object_detection/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,19 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py b/object_detection/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c8d870334c31fdbbe16a87b15b34d11b5b90fb81 --- /dev/null +++ b/object_detection/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py @@ -0,0 +1,43 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict( + type='Resize', + img_scale=[(1600, 400), (1600, 1400)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +data = dict( + samples_per_gpu=1, workers_per_gpu=1, train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/htc/metafile.yml b/object_detection/configs/htc/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..acd038c707b85ac107beb80b6addfc202e7650d9 --- /dev/null +++ b/object_detection/configs/htc/metafile.yml @@ -0,0 +1,165 @@ +Collections: + - Name: HTC + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - HTC + - RPN + - ResNet + - ResNeXt + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1901.07518 + Title: 'Hybrid Task Cascade for Instance Segmentation' + README: configs/htc/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/htc.py#L6 + Version: v2.0.0 + +Models: + - Name: htc_r50_fpn_1x_coco + In Collection: HTC + Config: configs/htc/htc_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.2 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317-7332cf16.pth + + - Name: htc_r50_fpn_20e_coco + In Collection: HTC + Config: configs/htc/htc_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 8.2 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319-fe28c577.pth + + - Name: htc_r101_fpn_20e_coco + In Collection: HTC + Config: configs/htc/htc_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 10.2 + inference time (ms/im): + - value: 181.82 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317-9b41b48f.pth + + - Name: htc_x101_32x4d_fpn_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Training Memory (GB): 11.4 + inference time (ms/im): + - value: 200 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318-de97ae01.pth + + - Name: htc_x101_64x4d_fpn_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Training Memory (GB): 14.5 + inference time (ms/im): + - value: 227.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318-b181fd7a.pth + + - Name: htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312-946fd751.pth diff --git a/object_detection/configs/instaboost/README.md b/object_detection/configs/instaboost/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e948ccbff034e6a14471739312785c2811d79236 --- /dev/null +++ b/object_detection/configs/instaboost/README.md @@ -0,0 +1,63 @@ +# Instaboost: Boosting instance segmentation via probability map guided copy-pasting + +## Abstract + + + +Instance segmentation requires a large number of training samples to achieve satisfactory performance and benefits from proper data augmentation. To enlarge the training set and increase the diversity, previous methods have investigated using data annotation from other domain (e.g. bbox, point) in a weakly supervised mechanism. In this paper, we present a simple, efficient and effective method to augment the training set using the existing instance mask annotations. Exploiting the pixel redundancy of the background, we are able to improve the performance of Mask R-CNN for 1.7 mAP on COCO dataset and 3.3 mAP on Pascal VOC dataset by simply introducing random jittering to objects. Furthermore, we propose a location probability map based approach to explore the feasible locations that objects can be placed based on local appearance similarity. With the guidance of such map, we boost the performance of R101-Mask R-CNN on instance segmentation from 35.7 mAP to 37.9 mAP without modifying the backbone or network structure. Our method is simple to implement and does not increase the computational complexity. It can be integrated into the training pipeline of any instance segmentation model without affecting the training and inference efficiency. + + +
+ +
+ + + + +## Introduction + + + +Configs in this directory is the implementation for ICCV2019 paper "InstaBoost: Boosting Instance Segmentation Via Probability Map Guided Copy-Pasting" and provided by the authors of the paper. InstaBoost is a data augmentation method for object detection and instance segmentation. The paper has been released on [`arXiv`](https://arxiv.org/abs/1908.07801). + +## Citation + +```latex +@inproceedings{fang2019instaboost, + title={Instaboost: Boosting instance segmentation via probability map guided copy-pasting}, + author={Fang, Hao-Shu and Sun, Jianhua and Wang, Runzhong and Gou, Minghao and Li, Yong-Lu and Lu, Cewu}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + pages={682--691}, + year={2019} +} +``` + +## Usage + +### Requirements + +You need to install `instaboostfast` before using it. + +```shell +pip install instaboostfast +``` + +The code and more details can be found [here](https://github.com/GothicAi/Instaboost). + +### Integration with MMDetection + +InstaBoost have been already integrated in the data pipeline, thus all you need is to add or change **InstaBoost** configurations after **LoadImageFromFile**. We have provided examples like [this](mask_rcnn_r50_fpn_instaboost_4x#L121). You can refer to [`InstaBoostConfig`](https://github.com/GothicAi/InstaBoost-pypi#instaboostconfig) for more details. + +## Results and Models + +- All models were trained on `coco_2017_train` and tested on `coco_2017_val` for convenience of evaluation and comparison. In the paper, the results are obtained from `test-dev`. +- To balance accuracy and training time when using InstaBoost, models released in this page are all trained for 48 Epochs. Other training and testing configs strictly follow the original framework. +- For results and models in MMDetection V1.x, please refer to [Instaboost](https://github.com/GothicAi/Instaboost). + +| Network | Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :--------: | :-----: | :------: | :------------: | :------:| :-----: | :------: | :-----------------: | +| Mask R-CNN | R-50-FPN | 4x | 4.4 | 17.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-d025f83a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223635.log.json) | +| Mask R-CNN | R-101-FPN | 4x | 6.4 | | 42.5 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738-f23f3a5f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738.log.json) | +| Mask R-CNN | X-101-64x4d-FPN | 4x | 10.7 | | 44.7 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947-8ed58c1b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947.log.json) | +| Cascade R-CNN | R-101-FPN | 4x | 6.0 | 12.0 | 43.7 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-c19d98d9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223646.log.json) | diff --git a/object_detection/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py b/object_detection/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9d0515d73d4276883f495d8b30b793afd9fa2dc5 --- /dev/null +++ b/object_detection/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py b/object_detection/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a89a81f5c76586d6d1b15abf74f3740e9f439762 --- /dev/null +++ b/object_detection/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +runner = dict(type='EpochBasedRunner', max_epochs=48) diff --git a/object_detection/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/object_detection/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d67b7992ab1fa5d8190ff1a0d0c52a0e832c205d --- /dev/null +++ b/object_detection/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py b/object_detection/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebbb43e918753e464a8e1e7f9ff1fed702c1b64d --- /dev/null +++ b/object_detection/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py b/object_detection/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..55ca62b7bc6c9cdc97018bcfbe5b109038470dd3 --- /dev/null +++ b/object_detection/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +runner = dict(type='EpochBasedRunner', max_epochs=48) diff --git a/object_detection/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/object_detection/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2010f44819f625f7da5196270f3721274a390881 --- /dev/null +++ b/object_detection/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/instaboost/metafile.yml b/object_detection/configs/instaboost/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..325283d369628ab51552dabe2ab05a8a9a5f2d13 --- /dev/null +++ b/object_detection/configs/instaboost/metafile.yml @@ -0,0 +1,99 @@ +Collections: + - Name: InstaBoost + Metadata: + Training Data: COCO + Training Techniques: + - InstaBoost + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Paper: + URL: https://arxiv.org/abs/1908.07801 + Title: 'Instaboost: Boosting instance segmentation via probability map guided copy-pasting' + README: configs/instaboost/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/datasets/pipelines/instaboost.py#L7 + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-d025f83a.pth + + - Name: mask_rcnn_r101_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738-f23f3a5f.pth + + - Name: mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947-8ed58c1b.pth + + - Name: cascade_mask_rcnn_r50_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 83.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-c19d98d9.pth diff --git a/object_detection/configs/lad/README.md b/object_detection/configs/lad/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b48b0632cae8573e3231a8b166351be8aabbd9a7 --- /dev/null +++ b/object_detection/configs/lad/README.md @@ -0,0 +1,50 @@ +# Improving Object Detection by Label Assignment Distillation + + +## Abstract + + + +Label assignment in object detection aims to assign targets, foreground or background, to sampled regions in an image. Unlike labeling for image classification, this problem is not well defined due to the object's bounding box. In this paper, we investigate the problem from a perspective of distillation, hence we call Label Assignment Distillation (LAD). Our initial motivation is very simple, we use a teacher network to generate labels for the student. This can be achieved in two ways: either using the teacher's prediction as the direct targets (soft label), or through the hard labels dynamically assigned by the teacher (LAD). Our experiments reveal that: (i) LAD is more effective than soft-label, but they are complementary. (ii) Using LAD, a smaller teacher can also improve a larger student significantly, while soft-label can't. We then introduce Co-learning LAD, in which two networks simultaneously learn from scratch and the role of teacher and student are dynamically interchanged. Using PAA-ResNet50 as a teacher, our LAD techniques can improve detectors PAA-ResNet101 and PAA-ResNeXt101 to 46AP and 47.5AP on the COCO test-dev set. With a stronger teacher PAA-SwinB, we improve the students PAA-ResNet50 to 43.7AP by only 1x schedule training and standard setting, and PAA-ResNet101 to 47.9AP, significantly surpassing the current methods. + + +
+ +
+ + + + +## Citation + + + + +```latex +@inproceedings{nguyen2021improving, + title={Improving Object Detection by Label Assignment Distillation}, + author={Chuong H. Nguyen and Thuy C. Nguyen and Tuan N. Tang and Nam L. H. Phan}, + booktitle = {WACV}, + year={2022} +} +``` + +## Results and Models + +We provide config files to reproduce the object detection results in the +WACV 2022 paper for Improving Object Detection by Label Assignment +Distillation. + +### PAA with LAD + +| Teacher | Student | Training schedule | AP (val) | Config | +| :-------: | :-----: | :---------------: | :------: | :----------------------------------------------------: | +| -- | R-50 | 1x | 40.4 | | +| -- | R-101 | 1x | 42.6 | | +| R-101 | R-50 | 1x | 41.6 | [config](configs/lad/lad_r50_paa_r101_fpn_coco_1x.py) | +| R-50 | R-101 | 1x | 43.2 | [config](configs/lad/lad_r101_paa_r50_fpn_coco_1x.py) | + +## Note + +- Meaning of Config name: lad_r50(student model)_paa(based on paa)_r101(teacher model)_fpn(neck)_coco(dataset)_1x(12 epoch).py +- Results may fluctuate by about 0.2 mAP. diff --git a/object_detection/configs/lad/lad_r101_paa_r50_fpn_coco_1x.py b/object_detection/configs/lad/lad_r101_paa_r50_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..c7e3fcf6d54d6c026fede1ee9c821069fe8c65fa --- /dev/null +++ b/object_detection/configs/lad/lad_r101_paa_r50_fpn_coco_1x.py @@ -0,0 +1,121 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth' # noqa +model = dict( + type='LAD', + # student + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # teacher + teacher_ckpt=teacher_ckpt, + teacher_backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + teacher_neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + teacher_bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + score_voting=True, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +data = dict(samples_per_gpu=8, workers_per_gpu=4) +optimizer = dict(lr=0.01) +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/lad/lad_r50_paa_r101_fpn_coco_1x.py b/object_detection/configs/lad/lad_r50_paa_r101_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..3c46f7db831ae50247b9300b57168849529005e8 --- /dev/null +++ b/object_detection/configs/lad/lad_r50_paa_r101_fpn_coco_1x.py @@ -0,0 +1,120 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +teacher_ckpt = 'http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth' # noqa +model = dict( + type='LAD', + # student + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # teacher + teacher_ckpt=teacher_ckpt, + teacher_backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + teacher_neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + teacher_bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + score_voting=True, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +data = dict(samples_per_gpu=8, workers_per_gpu=4) +optimizer = dict(lr=0.01) +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/ld/README.md b/object_detection/configs/ld/README.md new file mode 100644 index 0000000000000000000000000000000000000000..baa824753ba5b59cc03f620106e0b5a799504a82 --- /dev/null +++ b/object_detection/configs/ld/README.md @@ -0,0 +1,45 @@ +# Localization Distillation for Object Detection + +## Abstract + + + +Knowledge distillation (KD) has witnessed its powerful capability in learning compact models in object detection. Previous KD methods for object detection mostly focus on imitating deep features within the imitation regions instead of mimicking classification logits due to its inefficiency in distilling localization information. In this paper, by reformulating the knowledge distillation process on localization, we present a novel localization distillation (LD) method which can efficiently transfer the localization knowledge from the teacher to the student. Moreover, we also heuristically introduce the concept of valuable localization region that can aid to selectively distill the semantic and localization knowledge for a certain region. Combining these two new components, for the first time, we show that logit mimicking can outperform feature imitation and localization knowledge distillation is more important and efficient than semantic knowledge for distilling object detectors. Our distillation scheme is simple as well as effective and can be easily applied to different dense object detectors. Experiments show that our LD can boost the AP score of GFocal-ResNet-50 with a single-scale 1× training schedule from 40.1 to 42.1 on the COCO benchmark without any sacrifice on the inference speed. + + +
+ +
+ + + + +## Citation + + + +```latex +@Article{zheng2021LD, + title={Localization Distillation for Object Detection}, + author= {Zhaohui Zheng, Rongguang Ye, Ping Wang, Jun Wang, Dongwei Ren, Wangmeng Zuo}, + journal={arXiv:2102.12252}, + year={2021} +} +``` + +### GFocalV1 with LD + +| Teacher | Student | Training schedule | Mini-batch size | AP (val) | AP50 (val) | AP75 (val) | Config | +| :-------: | :-----: | :---------------: | :-------------: | :------: | :--------: | :--------: | :--------------: | +| -- | R-18 | 1x | 6 | 35.8 | 53.1 | 38.2 | | +| R-101 | R-18 | 1x | 6 | 36.5 | 52.9 | 39.3 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py) | +| -- | R-34 | 1x | 6 | 38.9 | 56.6 | 42.2 | | +| R-101 | R-34 | 1x | 6 | 39.8 | 56.6 | 43.1 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py) | +| -- | R-50 | 1x | 6 | 40.1 | 58.2 | 43.1 | | +| R-101 | R-50 | 1x | 6 | 41.1 | 58.7 | 44.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py) | +| -- | R-101 | 2x | 6 | 44.6 | 62.9 | 48.4 | | +| R-101-DCN | R-101 | 2x | 6 | 45.4 | 63.1 | 49.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_1x.py) | + +## Note + +- Meaning of Config name: ld_r18(student model)_gflv1(based on gflv1)_r101(teacher model)_fpn(neck)_coco(dataset)_1x(12 epoch).py diff --git a/object_detection/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py b/object_detection/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..1cbdb4cf5a5d5afa60327d80b31475500d5f3c6c --- /dev/null +++ b/object_detection/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py @@ -0,0 +1,44 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth' # noqa +model = dict( + teacher_config='configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py', + teacher_ckpt=teacher_ckpt, + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) + +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py b/object_detection/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..18dce814be9036e6af70389fc60a5b4e42bc8efe --- /dev/null +++ b/object_detection/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth' # noqa +model = dict( + type='KnowledgeDistillationSingleStageDetector', + teacher_config='configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py', + teacher_ckpt=teacher_ckpt, + backbone=dict( + type='ResNet', + depth=18, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet18')), + neck=dict( + type='FPN', + in_channels=[64, 128, 256, 512], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='LDHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + loss_ld=dict( + type='KnowledgeDistillationKLDivLoss', loss_weight=0.25, T=10), + reg_max=16, + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py b/object_detection/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..3b6996d49b06ffcd0803e86cb33f8a35b02911dc --- /dev/null +++ b/object_detection/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,19 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +model = dict( + backbone=dict( + type='ResNet', + depth=34, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet34')), + neck=dict( + type='FPN', + in_channels=[64, 128, 256, 512], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) diff --git a/object_detection/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py b/object_detection/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..2b18785ae41f6fd11a933ca046a34b967306f9b6 --- /dev/null +++ b/object_detection/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,19 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) diff --git a/object_detection/configs/ld/metafile.yml b/object_detection/configs/ld/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..cd833bf02359cba2f4bd7bb18727100060270a49 --- /dev/null +++ b/object_detection/configs/ld/metafile.yml @@ -0,0 +1,72 @@ +Collections: + - Name: Localization Distillation + Metadata: + Training Data: COCO + Training Techniques: + - Localization Distillation + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/2102.12252 + Title: 'Localization Distillation for Object Detection' + README: configs/ld/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.11.0/mmdet/models/dense_heads/ld_head.py#L11 + Version: v2.11.0 + +Models: + - Name: ld_r18_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + box AP@0.5: 52.9 + box AP@0.75: 39.3 + + - Name: ld_r34_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + box AP@0.5: 56.6 + box AP@0.75: 43.1 + + - Name: ld_r50_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + box AP@0.5: 58.7 + box AP@0.75: 44.9 + + - Name: ld_r101_gflv1_r101dcn_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_1x.py + Metadata: + Teacher: R-101-DCN + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + box AP@0.5: 63.1 + box AP@0.75: 49.5 diff --git a/object_detection/configs/legacy_1.x/README.md b/object_detection/configs/legacy_1.x/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8f92c8f7437f45b89b883aa856b77c9af257dbd7 --- /dev/null +++ b/object_detection/configs/legacy_1.x/README.md @@ -0,0 +1,53 @@ +# Legacy Configs in MMDetection V1.x + + + +Configs in this directory implement the legacy configs used by MMDetection V1.x and its model zoos. + +To help users convert their models from V1.x to MMDetection V2.0, we provide v1.x configs to inference the converted v1.x models. +Due to the BC-breaking changes in MMDetection V2.0 from MMDetection V1.x, running inference with the same model weights in these two version will produce different results. The difference will cause within 1% AP absolute difference as can be found in the following table. + +## Usage + +To upgrade the model version, the users need to do the following steps. + +### 1. Convert model weights + +There are three main difference in the model weights between V1.x and V2.0 codebases. + +1. Since the class order in all the detector's classification branch is reordered, all the legacy model weights need to go through the conversion process. +2. The regression and segmentation head no longer contain the background channel. Weights in these background channels should be removed to fix in the current codebase. +3. For two-stage detectors, their weights need to be upgraded since MMDetection V2.0 refactors all the two-stage detectors with `RoIHead`. + +The users can do the same modification as mentioned above for the self-implemented +detectors. We provide a scripts `tools/model_converters/upgrade_model_version.py` to convert the model weights in the V1.x model zoo. + +```bash +python tools/model_converters/upgrade_model_version.py ${OLD_MODEL_PATH} ${NEW_MODEL_PATH} --num-classes ${NUM_CLASSES} + +``` + +- OLD_MODEL_PATH: the path to load the model weights in 1.x version. +- NEW_MODEL_PATH: the path to save the converted model weights in 2.0 version. +- NUM_CLASSES: number of classes of the original model weights. Usually it is 81 for COCO dataset, 21 for VOC dataset. + The number of classes in V2.0 models should be equal to that in V1.x models - 1. + +### 2. Use configs with legacy settings + +After converting the model weights, checkout to the v1.2 release to find the corresponding config file that uses the legacy settings. +The V1.x models usually need these three legacy modules: `LegacyAnchorGenerator`, `LegacyDeltaXYWHBBoxCoder`, and `RoIAlign(align=False)`. +For models using ResNet Caffe backbones, they also need to change the pretrain name and the corresponding `img_norm_cfg`. +An example is in [`retinanet_r50_caffe_fpn_1x_coco_v1.py`](retinanet_r50_caffe_fpn_1x_coco_v1.py) +Then use the config to test the model weights. For most models, the obtained results should be close to that in V1.x. +We provide configs of some common structures in this directory. + +## Performance + +The performance change after converting the models in this directory are listed as the following. +| Method | Style | Lr schd | V1.x box AP | V1.x mask AP | V2.0 box AP | V2.0 mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------:| :-----: |:------:| :-----: | :-------: |:------------------------------------------------------------------------------------------------------------------------------: | +| Mask R-CNN R-50-FPN | pytorch | 1x | 37.3 | 34.2 | 36.8 | 33.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth)| +| RetinaNet R-50-FPN | caffe | 1x | 35.8 | - | 35.4 | - | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_caffe_1x_coco_v1.py) | +| RetinaNet R-50-FPN | pytorch | 1x | 35.6 |-|35.2| -| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/retinanet_r50_fpn_1x_20181125-7b0c2548.pth) | +| Cascade Mask R-CNN R-50-FPN | pytorch | 1x | 41.2 | 35.7 |40.8| 35.6| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/cascade_mask_rcnn_r50_fpn_1x_20181123-88b170c9.pth) | +| SSD300-VGG16 | caffe | 120e | 25.7 |-|25.4|-| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/ssd300_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/ssd300_coco_vgg16_caffe_120e_20181221-84d7110b.pth) | diff --git a/object_detection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py b/object_detection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..fc9d0048188406348416fe5012af9985f62bbb56 --- /dev/null +++ b/object_detection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0])), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067])), + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +dist_params = dict(backend='nccl', port=29515) diff --git a/object_detection/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py b/object_detection/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..8c573bef34628babaee43183b260cd06e22b7c46 --- /dev/null +++ b/object_detection/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='FasterRCNN', + backbone=dict( + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + rpn_head=dict( + type='RPNHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn_proposal=dict(max_per_img=2000), + rcnn=dict(assigner=dict(match_low_quality=True)))) diff --git a/object_detection/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py b/object_detection/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..04581bbc901d0fda0ec8c6b4a8078ae04f21473a --- /dev/null +++ b/object_detection/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,34 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + + # model training and testing settings + train_cfg=dict( + rpn_proposal=dict(max_per_img=2000), + rcnn=dict(assigner=dict(match_low_quality=True)))) diff --git a/object_detection/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py b/object_detection/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..a63d248c435c8b7035f00299a6f97f1fc18e3be5 --- /dev/null +++ b/object_detection/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py @@ -0,0 +1,41 @@ +_base_ = './retinanet_r50_fpn_1x_coco_v1.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py b/object_detection/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..6198b9717957374ce734ca74de5f54dda44123b9 --- /dev/null +++ b/object_detection/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py @@ -0,0 +1,17 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + type='RetinaHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0))) diff --git a/object_detection/configs/legacy_1.x/ssd300_coco_v1.py b/object_detection/configs/legacy_1.x/ssd300_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..b194e7651ede006c5101bff1056749edf4d249cd --- /dev/null +++ b/object_detection/configs/legacy_1.x/ssd300_coco_v1.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +input_size = 300 +model = dict( + bbox_head=dict( + type='SSDHead', + anchor_generator=dict( + type='LegacySSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) +dist_params = dict(backend='nccl', port=29555) diff --git a/object_detection/configs/libra_rcnn/README.md b/object_detection/configs/libra_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..dac10491ca98218268a3f62b8bfee1febdc9cabc --- /dev/null +++ b/object_detection/configs/libra_rcnn/README.md @@ -0,0 +1,56 @@ +# Libra R-CNN: Towards Balanced Learning for Object Detection + +## Abstract + + + +Compared with model architectures, the training process, which is also crucial to the success of detectors, has received relatively less attention in object detection. In this work, we carefully revisit the standard training practice of detectors, and find that the detection performance is often limited by the imbalance during the training process, which generally consists in three levels - sample level, feature level, and objective level. To mitigate the adverse effects caused thereby, we propose Libra R-CNN, a simple but effective framework towards balanced learning for object detection. It integrates three novel components: IoU-balanced sampling, balanced feature pyramid, and balanced L1 loss, respectively for reducing the imbalance at sample, feature, and objective level. Benefitted from the overall balanced design, Libra R-CNN significantly improves the detection performance. Without bells and whistles, it achieves 2.5 points and 2.0 points higher Average Precision (AP) than FPN Faster R-CNN and RetinaNet respectively on MSCOCO. + +Instance recognition is rapidly advanced along with the developments of various deep convolutional neural networks. Compared to the architectures of networks, the training process, which is also crucial to the success of detectors, has received relatively less attention. In this work, we carefully revisit the standard training practice of detectors, and find that the detection performance is often limited by the imbalance during the training process, which generally consists in three levels - sample level, feature level, and objective level. To mitigate the adverse effects caused thereby, we propose Libra R-CNN, a simple yet effective framework towards balanced learning for instance recognition. It integrates IoU-balanced sampling, balanced feature pyramid, and objective re-weighting, respectively for reducing the imbalance at sample, feature, and objective level. Extensive experiments conducted on MS COCO, LVIS and Pascal VOC datasets prove the effectiveness of the overall balanced design. + +
+ +
+ + + + +## Citation + + + +We provide config files to reproduce the results in the CVPR 2019 paper [Libra R-CNN](https://arxiv.org/pdf/1904.02701.pdf). + +The extended version of [Libra R-CNN](https://arxiv.org/pdf/2108.10175.pdf) is accpeted by IJCV. + +``` +@inproceedings{pang2019libra, + title={Libra R-CNN: Towards Balanced Learning for Object Detection}, + author={Pang, Jiangmiao and Chen, Kai and Shi, Jianping and Feng, Huajun and Ouyang, Wanli and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} + +@article{pang2021towards, + title={Towards Balanced Learning for Instance Recognition}, + author={Pang, Jiangmiao and Chen, Kai and Li, Qi and Xu, Zhihai and Feng, Huajun and Shi, Jianping and Ouyang, Wanli and Lin, Dahua}, + journal={International Journal of Computer Vision}, + volume={129}, + number={5}, + pages={1376--1393}, + year={2021}, + publisher={Springer} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50-FPN | pytorch | 1x | 4.6 | 19.0 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| Fast R-CNN | R-50-FPN | pytorch | 1x | | | | | +| Faster R-CNN | R-101-FPN | pytorch | 1x | 6.5 | 14.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203-8dba6a5a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203_001405.log.json) | +| Faster R-CNN | X-101-64x4d-FPN | pytorch | 1x | 10.8 | 8.5 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315-3a7d0488.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315_231625.log.json) | +| RetinaNet | R-50-FPN | pytorch | 1x | 4.2 | 17.7 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205-804d94ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205_112757.log.json) | diff --git a/object_detection/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..efbedc863c7eeeaef331121416141334906fef3d --- /dev/null +++ b/object_detection/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3))))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +data = dict( + train=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_train2017.pkl'), + val=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl'), + test=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl')) diff --git a/object_detection/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e899706b8ca7780a95b41de14b85b05b427f9595 --- /dev/null +++ b/object_detection/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89a0d7b2bd83216dfc4db120fe9f610b23376681 --- /dev/null +++ b/object_detection/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict(sampler=dict(neg_pos_ub=5), allowed_border=-1), + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3))))) diff --git a/object_detection/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..06740a778f821d74b5206a9cada969bfee0a84cf --- /dev/null +++ b/object_detection/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py b/object_detection/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..be2742098fb8f1e46bbb16c9d3e2e20c2e3083aa --- /dev/null +++ b/object_detection/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,26 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=1, + refine_type='non_local') + ], + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=0.11, + loss_weight=1.0))) diff --git a/object_detection/configs/libra_rcnn/metafile.yml b/object_detection/configs/libra_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..8c3279595f5743a6c616a9199b4c8c9614b89e80 --- /dev/null +++ b/object_detection/configs/libra_rcnn/metafile.yml @@ -0,0 +1,99 @@ +Collections: + - Name: Libra R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - IoU-Balanced Sampling + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Balanced Feature Pyramid + Paper: + URL: https://arxiv.org/abs/1904.02701 + Title: 'Libra R-CNN: Towards Balanced Learning for Object Detection' + README: configs/libra_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/necks/bfp.py#L10 + Version: v2.0.0 + +Models: + - Name: libra_faster_rcnn_r50_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + inference time (ms/im): + - value: 52.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth + + - Name: libra_faster_rcnn_r101_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + inference time (ms/im): + - value: 69.44 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203-8dba6a5a.pth + + - Name: libra_faster_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 117.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315-3a7d0488.pth + + - Name: libra_retinanet_r50_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 56.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205-804d94ce.pth diff --git a/object_detection/configs/lvis/README.md b/object_detection/configs/lvis/README.md new file mode 100644 index 0000000000000000000000000000000000000000..daf27e8aa344b41990b4974ffa4e428f018489db --- /dev/null +++ b/object_detection/configs/lvis/README.md @@ -0,0 +1,58 @@ +# LVIS: A Dataset for Large Vocabulary Instance Segmentation + +## Abstract + + + +Progress on object detection is enabled by datasets that focus the research community's attention on open challenges. This process led us from simple images to complex scenes and from bounding boxes to segmentation masks. In this work, we introduce LVIS (pronounced `el-vis'): a new dataset for Large Vocabulary Instance Segmentation. We plan to collect ~2 million high-quality instance segmentation masks for over 1000 entry-level object categories in 164k images. Due to the Zipfian distribution of categories in natural images, LVIS naturally has a long tail of categories with few training samples. Given that state-of-the-art deep learning methods for object detection perform poorly in the low-sample regime, we believe that our dataset poses an important and exciting new scientific challenge. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{gupta2019lvis, + title={{LVIS}: A Dataset for Large Vocabulary Instance Segmentation}, + author={Gupta, Agrim and Dollar, Piotr and Girshick, Ross}, + booktitle={Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Common Setting + +* Please follow [install guide](../../docs/get_started.md#install-mmdetection) to install open-mmlab forked cocoapi first. +* Run following scripts to install our forked lvis-api. + + ```shell + pip install git+https://github.com/lvis-dataset/lvis-api.git + ``` + +* All experiments use oversample strategy [here](../../docs/tutorials/customize_dataset.md#class-balanced-dataset) with oversample threshold `1e-3`. +* The size of LVIS v0.5 is half of COCO, so schedule `2x` in LVIS is roughly the same iterations as `1x` in COCO. + +## Results and models of LVIS v0.5 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: |:--------: | +| R-50-FPN | pytorch | 2x | - | - | 26.1 | 25.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis-dbd06831.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_20200531_160435.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 27.1 | 27.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis-54582ee2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_20200601_134748.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 26.7 | 26.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis-3cf55ea2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_20200531_221749.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 26.4 | 26.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis-1c99a5ad.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_20200601_194651.log.json) | + +## Results and models of LVIS v1 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 9.1 | - | 22.5 | 21.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-aa78ac3d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_061305.log.json) | +| R-101-FPN | pytorch | 1x | 10.8 | - | 24.6 | 23.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-ec55ce32.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_070959.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 11.8 | - | 26.7 | 25.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-ebbc5c81.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_071317.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 14.6 | - | 27.2 | 25.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-43d9edfe.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200830_060206.log.json) | diff --git a/object_detection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/object_detection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0f017f585c78d9d8e1eebaeca0a9a6c518a6295a --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/object_detection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..637f4a63a55d24133a994eacc1e7a6521bfa3b9f --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/object_detection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..92ddb526d7ea7a011e10aa82cbd1bd62773b35d6 --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1203), mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/object_detection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/object_detection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..d53c5dc6a1470e4cca209a26c8261dd66c60e9b1 --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v0.5_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1230), mask_head=dict(num_classes=1230)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/object_detection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/object_detection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..a6115c1ad03317e32915102212cf878101fa671d --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/object_detection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..96b625230f37906e32ad872b6e947285432f60d6 --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/object_detection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0f95a7321d9a7b7f9cb98adf31d6238156c21de6 --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/object_detection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..986acda589899e49c7d22df6455200e22bc5a940 --- /dev/null +++ b/object_detection/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/mask_rcnn/README.md b/object_detection/configs/mask_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5080a0493e7cfdf4f5c188c1254db54e489a34d1 --- /dev/null +++ b/object_detection/configs/mask_rcnn/README.md @@ -0,0 +1,63 @@ +# Mask R-CNN + +## Abstract + + + +We present a conceptually simple, flexible, and general framework for object instance segmentation. Our approach efficiently detects objects in an image while simultaneously generating a high-quality segmentation mask for each instance. The method, called Mask R-CNN, extends Faster R-CNN by adding a branch for predicting an object mask in parallel with the existing branch for bounding box recognition. Mask R-CNN is simple to train and adds only a small overhead to Faster R-CNN, running at 5 fps. Moreover, Mask R-CNN is easy to generalize to other tasks, e.g., allowing us to estimate human poses in the same framework. We show top results in all three tracks of the COCO suite of challenges, including instance segmentation, bounding-box object detection, and person keypoint detection. Without bells and whistles, Mask R-CNN outperforms all existing, single-model entries on every task, including the COCO 2016 challenge winners. We hope our simple and effective approach will serve as a solid baseline and help ease future research in instance-level recognition. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{He_2017, + title={Mask R-CNN}, + journal={2017 IEEE International Conference on Computer Vision (ICCV)}, + publisher={IEEE}, + author={He, Kaiming and Gkioxari, Georgia and Dollar, Piotr and Girshick, Ross}, + year={2017}, + month={Oct} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.3 | | 38.0 | 34.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.38__segm_mAP-0.344_20200504_231812-0ebd1859.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_20200504_231812.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN (FP16) | pytorch | 1x | 3.6 | 24.1 | 38.1 | 34.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205-59faf7e4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205_130539.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 39.2 | 35.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_bbox_mAP-0.392__segm_mAP-0.354_20200505_003907-3e542a40.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_20200505_003907.log.json) | +| R-101-FPN | caffe | 1x | | | 40.4 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758-805e06c1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758.log.json)| +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 40.8 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_bbox_mAP-0.408__segm_mAP-0.366_20200505_071027-14b391c7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_20200505_071027.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 11.3 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.422__segm_mAP-0.378_20200506_004702-faef898c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_20200506_004702.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | 8.0 | 42.8 | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201-9352eb0d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201_124310.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 42.7 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208-39d6f70c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208.log.json)| +| X-101-32x8d-FPN | pytorch | 1x | - | - | 42.8 | 38.3 | | + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | caffe | 2x | 4.3 | | 40.3 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_bbox_mAP-0.403__segm_mAP-0.365_20200504_231822-a75c98ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_20200504_231822.log.json) +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 4.3 | | 40.8 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_20200504_163245.log.json) +| [R-50-FPN](./mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 4.1 | | 40.9 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154-21b550bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154.log.json) +| [R-101-FPN](./mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 5.9 | | 42.9 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339-3c33ce02.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339.log.json) +| [R-101-FPN](./mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 6.1 | | 42.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244-5675c317.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244.log.json) +| [x101-32x4d-FPN](./mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 7.3 | | 43.6 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410-abcd7859.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410.log.json) +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 1x | - | | 43.6 | 39.0 | +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x | 10.3 | | 44.3 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042-8bd2c639.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042.log.json) +| [X-101-64x4d-FPN](./mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x | 10.4 | | 44.5 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447-c376f129.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447.log.json) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..95b324f59144e6a894ad30e01859af148aa699d6 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e39781dcffff82430cbee16e15af8f54c2e44814 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,55 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + depth=101, + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b7986e8576642e631cfcdc9b274c49a17671e8b1 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c9059d5385a960172dfe01c6d9a25d3089d96649 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0696cbe756a1f885e2660adc626ef055542fb54f --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,10 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a44c01831b508da0a5e1ca3720bb437bcea086d1 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5a23f8c7cd21ef5025def03d4743d03103d821c5 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,40 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6308e40416a2be6ab5cbcc6826faff8556bc0b16 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,49 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4f7150ca718e2ead46eb63e74b6be06f50aa0fce --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b48a2104baf0df935954897ae4a991b38684d78 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bebbaaab05c099f575d94fbb5ae6bef57d4b4177 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..3f8079d3629e35c246819136cf2f292865b99d41 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py @@ -0,0 +1,61 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6a6c92460f1d58b8e8d361fb56ee123f2668ad9f --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..932b1f905155a0d3285daefc4891f5194705e30d --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fb8289b06c40697db5d42f37f80a0e67ff4fb4e7 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3d9242cd222d9da0bb7cc531130456f1031266f --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9eb6d57e0d25370a59472a4ceb1a3b9da6574608 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a8b3799b3482c840a4fcb5201a7dede23a0e073c --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2cd3cee5a102b49750e5b265ec6775907f1a9545 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r101_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b698a7d219320ca93e49b8dd5fd807005f469dce --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,18 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..108ea4e34717953be59795b63f4f932f4329468f --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6b912f692b7a833556e6f7ef02b483c4e33781ef --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,60 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8ba0e9c200fdc4ff196184c0b8e2320804037fbb --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,85 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2333b03a835a7d1d09df09749ebdc492db499f63 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6074cca29f462e821206a6509ff8869ec86b5a68 --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9f9cb1c4393b344fd9c5a25c04047e7e7a3cb54d --- /dev/null +++ b/object_detection/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,18 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/mask_rcnn/metafile.yml b/object_detection/configs/mask_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..f74bdf300459d1c980f99b2a118ca624224c7b58 --- /dev/null +++ b/object_detection/configs/mask_rcnn/metafile.yml @@ -0,0 +1,447 @@ +Collections: + - Name: Mask R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Softmax + - RPN + - Convolution + - Dense Connections + - FPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1703.06870v3 + Title: "Mask R-CNN" + README: configs/mask_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/mask_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_caffe_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.38__segm_mAP-0.344_20200504_231812-0ebd1859.pth + + - Name: mask_rcnn_r50_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth + + - Name: mask_rcnn_r50_fpn_fp16_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 3.6 + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205-59faf7e4.pth + + - Name: mask_rcnn_r50_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_bbox_mAP-0.392__segm_mAP-0.354_20200505_003907-3e542a40.pth + + - Name: mask_rcnn_r101_caffe_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758-805e06c1.pth + + - Name: mask_rcnn_r101_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth + + - Name: mask_rcnn_r101_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_bbox_mAP-0.408__segm_mAP-0.366_20200505_071027-14b391c7.pth + + - Name: mask_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth + + - Name: mask_rcnn_x101_32x4d_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.422__segm_mAP-0.378_20200506_004702-faef898c.pth + + - Name: mask_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201-9352eb0d.pth + + - Name: mask_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208-39d6f70c.pth + + - Name: mask_rcnn_x101_32x8d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + + - Name: mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_bbox_mAP-0.403__segm_mAP-0.365_20200504_231822-a75c98ce.pth + + - Name: mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth + + - Name: mask_rcnn_r50_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 4.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154-21b550bb.pth + + - Name: mask_rcnn_r101_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 6.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244-5675c317.pth + + - Name: mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339-3c33ce02.pth + + - Name: mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 7.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410-abcd7859.pth + + - Name: mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + + - Name: mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco + Metadata: + Training Memory (GB): 10.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042-8bd2c639.pth + + - Name: mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py + Metadata: + Epochs: 36 + Training Memory (GB): 10.4 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447-c376f129.pth diff --git a/object_detection/configs/ms_rcnn/README.md b/object_detection/configs/ms_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5a80c0d0c50aa43c6a6137d3bb239aa7c101fd2c --- /dev/null +++ b/object_detection/configs/ms_rcnn/README.md @@ -0,0 +1,40 @@ +# Mask Scoring R-CNN + +## Abstract + + + +Letting a deep network be aware of the quality of its own predictions is an interesting yet important problem. In the task of instance segmentation, the confidence of instance classification is used as mask quality score in most instance segmentation frameworks. However, the mask quality, quantified as the IoU between the instance mask and its ground truth, is usually not well correlated with classification score. In this paper, we study this problem and propose Mask Scoring R-CNN which contains a network block to learn the quality of the predicted instance masks. The proposed network block takes the instance feature and the corresponding predicted mask together to regress the mask IoU. The mask scoring strategy calibrates the misalignment between mask quality and mask score, and improves instance segmentation performance by prioritizing more accurate mask predictions during COCO AP evaluation. By extensive evaluations on the COCO dataset, Mask Scoring R-CNN brings consistent and noticeable gain with different models, and outperforms the state-of-the-art Mask R-CNN. We hope our simple and effective approach will provide a new direction for improving instance segmentation. + + +
+ +
+ + + + +## Citation + + + +``` +@inproceedings{huang2019msrcnn, + title={Mask Scoring R-CNN}, + author={Zhaojin Huang and Lichao Huang and Yongchao Gong and Chang Huang and Xinggang Wang}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019}, +} +``` + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.5 | | 38.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848.log.json) | +| R-50-FPN | caffe | 2x | - | - | 38.8 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_bbox_mAP-0.388__segm_mAP-0.363_20200506_004738-ee87b137.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_20200506_004738.log.json) | +| R-101-FPN | caffe | 1x | 6.5 | | 40.4 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.404__segm_mAP-0.376_20200506_004755-b9b12a37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_20200506_004755.log.json) | +| R-101-FPN | caffe | 2x | - | - | 41.1 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_bbox_mAP-0.411__segm_mAP-0.381_20200506_011134-5f3cc74f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_20200506_011134.log.json) | +| R-X101-32x4d | pytorch | 2x | 7.9 | 11.0 | 41.8 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206-81fd1740.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206_100113.log.json) | +| R-X101-64x4d | pytorch | 1x | 11.0 | 8.0 | 43.0 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206-86ba88d2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206_091744.log.json) | +| R-X101-64x4d | pytorch | 2x | 11.0 | 8.0 | 42.6 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308-02a445e2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308_012247.log.json) | diff --git a/object_detection/configs/ms_rcnn/metafile.yml b/object_detection/configs/ms_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..a6c7dc595c7ded3bcf1933ea77fa34cb353bca30 --- /dev/null +++ b/object_detection/configs/ms_rcnn/metafile.yml @@ -0,0 +1,159 @@ +Collections: + - Name: Mask Scoring R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - FPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1903.00241 + Title: 'Mask Scoring R-CNN' + README: configs/ms_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/mask_scoring_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: ms_rcnn_r50_caffe_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth + + - Name: ms_rcnn_r50_caffe_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_bbox_mAP-0.388__segm_mAP-0.363_20200506_004738-ee87b137.pth + + - Name: ms_rcnn_r101_caffe_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.404__segm_mAP-0.376_20200506_004755-b9b12a37.pth + + - Name: ms_rcnn_r101_caffe_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_bbox_mAP-0.411__segm_mAP-0.381_20200506_011134-5f3cc74f.pth + + - Name: ms_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206-81fd1740.pth + + - Name: ms_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 11.0 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206-86ba88d2.pth + + - Name: ms_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 11.0 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308-02a445e2.pth diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9b7dcbbf145bb9705ae9628440349f6a5fecc438 --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..202bccedae84657737b0315394199208d0307ae4 --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r101_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5845125a7b3ee70deeaa545c16d1211b4fcb1d06 --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80)), + # model training and testing settings + train_cfg=dict(rcnn=dict(mask_thr_binary=0.5))) diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..008a70ae67454c3fd470c29ffd000b18db391c8e --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a163ce445c35d51a9d8940e46697c5c6a39d354 --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80)), + # model training and testing settings + train_cfg=dict(rcnn=dict(mask_thr_binary=0.5))) diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..20479bbd70ce039789d8df346d270fde898bbc26 --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee5b7341663049f6eb8b99c8fec1f54964c698aa --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py b/object_detection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..54c605b94aa5fc8b1ddf2267ed349c2fcd08cc9e --- /dev/null +++ b/object_detection/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_x101_64x4d_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/nas_fcos/README.md b/object_detection/configs/nas_fcos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2e3b56ec6fe439e2298e762ae78fee19f6413778 --- /dev/null +++ b/object_detection/configs/nas_fcos/README.md @@ -0,0 +1,39 @@ +# NAS-FCOS: Fast Neural Architecture Search for Object Detection + +## Abstract + + + +The success of deep neural networks relies on significant architecture engineering. Recently neural architecture search (NAS) has emerged as a promise to greatly reduce manual effort in network design by automatically searching for optimal architectures, although typically such algorithms need an excessive amount of computational resources, e.g., a few thousand GPU-days. To date, on challenging vision tasks such as object detection, NAS, especially fast versions of NAS, is less studied. Here we propose to search for the decoder structure of object detectors with search efficiency being taken into consideration. To be more specific, we aim to efficiently search for the feature pyramid network (FPN) as well as the prediction head of a simple anchor-free object detector, namely FCOS, using a tailored reinforcement learning paradigm. With carefully designed search space, search algorithms and strategies for evaluating network quality, we are able to efficiently search a top-performing detection architecture within 4 days using 8 V100 GPUs. The discovered architecture surpasses state-of-the-art object detection models (such as Faster R-CNN, RetinaNet and FCOS) by 1.5 to 3.5 points in AP on the COCO dataset, with comparable computation complexity and memory footprint, demonstrating the efficacy of the proposed NAS for object detection. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{wang2019fcos, + title={Nas-fcos: Fast neural architecture search for object detection}, + author={Wang, Ning and Gao, Yang and Chen, Hao and Wang, Peng and Tian, Zhi and Shen, Chunhua}, + journal={arXiv preprint arXiv:1906.04423}, + year={2019} +} +``` + +## Results and Models + +| Head | Backbone | Style | GN-head | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:---------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| NAS-FCOSHead | R-50 | caffe | Y | 1x | | | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520.log.json) | +| FCOSHead | R-50 | caffe | Y | 1x | | | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521-7fdcbce0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521.log.json) | + +**Notes:** + +- To be consistent with the author's implementation, we use 4 GPUs with 4 images/GPU. diff --git a/object_detection/configs/nas_fcos/metafile.yml b/object_detection/configs/nas_fcos/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..1ea28cfc34034c75d96f8d2f52b54dffa13c75d5 --- /dev/null +++ b/object_detection/configs/nas_fcos/metafile.yml @@ -0,0 +1,44 @@ +Collections: + - Name: NAS-FCOS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 4x V100 GPUs + Architecture: + - FPN + - NAS-FCOS + - ResNet + Paper: + URL: https://arxiv.org/abs/1906.04423 + Title: 'NAS-FCOS: Fast Neural Architecture Search for Object Detection' + README: configs/nas_fcos/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/detectors/nasfcos.py#L6 + Version: v2.1.0 + +Models: + - Name: nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco + In Collection: NAS-FCOS + Config: configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth + + - Name: nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco + In Collection: NAS-FCOS + Config: configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521-7fdcbce0.pth diff --git a/object_detection/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/object_detection/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a455c9285cc892c8766df28d526fcd106272a09e --- /dev/null +++ b/object_detection/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,100 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/object_detection/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/object_detection/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b779492527850ca8ea52f7aa8c17d6c3543fa368 --- /dev/null +++ b/object_detection/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,99 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='NASFCOSHead', + num_classes=80, + in_channels=256, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/object_detection/configs/nas_fpn/README.md b/object_detection/configs/nas_fpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b2836d743091dd839b44b77d39f213d489db8039 --- /dev/null +++ b/object_detection/configs/nas_fpn/README.md @@ -0,0 +1,40 @@ +# NAS-FPN: Learning Scalable Feature Pyramid Architecture for Object Detection + +## Abstract + + + +Current state-of-the-art convolutional architectures for object detection are manually designed. Here we aim to learn a better architecture of feature pyramid network for object detection. We adopt Neural Architecture Search and discover a new feature pyramid architecture in a novel scalable search space covering all cross-scale connections. The discovered architecture, named NAS-FPN, consists of a combination of top-down and bottom-up connections to fuse features across scales. NAS-FPN, combined with various backbone models in the RetinaNet framework, achieves better accuracy and latency tradeoff compared to state-of-the-art object detection models. NAS-FPN improves mobile detection accuracy by 2 AP compared to state-of-the-art SSDLite with MobileNetV2 model in [32] and achieves 48.3 AP which surpasses Mask R-CNN [10] detection accuracy with less computation time. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{ghiasi2019fpn, + title={Nas-fpn: Learning scalable feature pyramid architecture for object detection}, + author={Ghiasi, Golnaz and Lin, Tsung-Yi and Le, Quoc V}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={7036--7045}, + year={2019} +} +``` + +## Results and Models + +We benchmark the new training schedule (crop training, large batch, unfrozen BN, 50 epochs) introduced in NAS-FPN. RetinaNet is used in the paper. + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50-FPN | 50e | 12.9 | 22.9 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco-9b953d76.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco_20200529_095329.log.json) | +| R-50-NASFPN | 50e | 13.2 | 23.0 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco_20200528_230008.log.json) | + +**Note**: We find that it is unstable to train NAS-FPN and there is a small chance that results can be 3% mAP lower. diff --git a/object_detection/configs/nas_fpn/metafile.yml b/object_detection/configs/nas_fpn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..ab8d649795d7847bdb1596f8aee845dbe02fb291 --- /dev/null +++ b/object_detection/configs/nas_fpn/metafile.yml @@ -0,0 +1,59 @@ +Collections: + - Name: NAS-FPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - NAS-FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.07392 + Title: 'NAS-FPN: Learning Scalable Feature Pyramid Architecture for Object Detection' + README: configs/nas_fpn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/necks/nas_fpn.py#L67 + Version: v2.0.0 + +Models: + - Name: retinanet_r50_fpn_crop640_50e_coco + In Collection: NAS-FPN + Config: configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py + Metadata: + Training Memory (GB): 12.9 + inference time (ms/im): + - value: 43.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco-9b953d76.pth + + - Name: retinanet_r50_nasfpn_crop640_50e_coco + In Collection: NAS-FPN + Config: configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py + Metadata: + Training Memory (GB): 13.2 + inference time (ms/im): + - value: 43.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth diff --git a/object_detection/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py b/object_detection/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6ea44a05f52143694365737af5da0eb750c282f5 --- /dev/null +++ b/object_detection/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,80 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + relu_before_extra_convs=True, + no_norm_on_lateral=True, + norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg), + # training and testing settings + train_cfg=dict(assigner=dict(neg_iou_thr=0.5))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=50) diff --git a/object_detection/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py b/object_detection/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3e039199ae6fb1b17af258c1fafa678625ddb3ea --- /dev/null +++ b/object_detection/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +# model settings +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict(type='NASFPN', stack_times=7, norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg), + # training and testing settings + train_cfg=dict(assigner=dict(neg_iou_thr=0.5))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=128), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=50) diff --git a/object_detection/configs/paa/README.md b/object_detection/configs/paa/README.md new file mode 100644 index 0000000000000000000000000000000000000000..05e65d2bfbcac241fbbee1f9ff3305da081e0c98 --- /dev/null +++ b/object_detection/configs/paa/README.md @@ -0,0 +1,51 @@ +# Probabilistic Anchor Assignment with IoU Prediction for Object Detection + +## Abstract + + + +In object detection, determining which anchors to assign as positive or negative samples, known as anchor assignment, has been revealed as a core procedure that can significantly affect a model's performance. In this paper we propose a novel anchor assignment strategy that adaptively separates anchors into positive and negative samples for a ground truth bounding box according to the model's learning status such that it is able to reason about the separation in a probabilistic manner. To do so we first calculate the scores of anchors conditioned on the model and fit a probability distribution to these scores. The model is then trained with anchors separated into positive and negative samples according to their probabilities. Moreover, we investigate the gap between the training and testing objectives and propose to predict the Intersection-over-Unions of detected boxes as a measure of localization quality to reduce the discrepancy. The combined score of classification and localization qualities serving as a box selection metric in non-maximum suppression well aligns with the proposed anchor assignment strategy and leads significant performance improvements. The proposed methods only add a single convolutional layer to RetinaNet baseline and does not require multiple anchors per location, so are efficient. Experimental results verify the effectiveness of the proposed methods. Especially, our models set new records for single-stage detectors on MS COCO test-dev dataset with various backbones. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{paa-eccv2020, + title={Probabilistic Anchor Assignment with IoU Prediction for Object Detection}, + author={Kim, Kang and Lee, Hee Seok}, + booktitle = {ECCV}, + year={2020} +} +``` + +## Results and Models + +We provide config files to reproduce the object detection results in the +ECCV 2020 paper for Probabilistic Anchor Assignment with IoU +Prediction for Object Detection. + +| Backbone | Lr schd | Mem (GB) | Score voting | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------------:|:------:|:------:|:--------:| +| R-50-FPN | 12e | 3.7 | True | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.log.json) | +| R-50-FPN | 12e | 3.7 | False | 40.2 | - | +| R-50-FPN | 18e | 3.7 | True | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1.5x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.log.json) | +| R-50-FPN | 18e | 3.7 | False | 41.2 | - | +| R-50-FPN | 24e | 3.7 | True | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.log.json) | +| R-50-FPN | 36e | 3.7 | True | 43.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722-06a6880b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722.log.json) | +| R-101-FPN | 12e | 6.2 | True | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.log.json) | +| R-101-FPN | 12e | 6.2 | False | 42.4 | - | +| R-101-FPN | 24e | 6.2 | True | 43.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.log.json) | +| R-101-FPN | 36e | 6.2 | True | 45.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202-83250d22.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202.log.json) | + +**Note**: + +1. We find that the performance is unstable with 1x setting and may fluctuate by about 0.2 mAP. We report the best results. diff --git a/object_detection/configs/paa/metafile.yml b/object_detection/configs/paa/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..e08b663a7c45ff70e51b8da46fba27cfcc8aca88 --- /dev/null +++ b/object_detection/configs/paa/metafile.yml @@ -0,0 +1,104 @@ +Collections: + - Name: PAA + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Probabilistic Anchor Assignment + - ResNet + Paper: + URL: https://arxiv.org/abs/2007.08103 + Title: 'Probabilistic Anchor Assignment with IoU Prediction for Object Detection' + README: configs/paa/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.4.0/mmdet/models/detectors/paa.py#L6 + Version: v2.4.0 + +Models: + - Name: paa_r50_fpn_1x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth + + - Name: paa_r50_fpn_1.5x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_1.5x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.pth + + - Name: paa_r50_fpn_2x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.pth + + - Name: paa_r50_fpn_mstrain_3x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722-06a6880b.pth + + - Name: paa_r101_fpn_1x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth + + - Name: paa_r101_fpn_2x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.pth + + - Name: paa_r101_fpn_mstrain_3x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202-83250d22.pth diff --git a/object_detection/configs/paa/paa_r101_fpn_1x_coco.py b/object_detection/configs/paa/paa_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..94f1c278dc16c1befbca510ca0ac5ba407969f6d --- /dev/null +++ b/object_detection/configs/paa/paa_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/paa/paa_r101_fpn_2x_coco.py b/object_detection/configs/paa/paa_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..641ef764d2713184845b624b20db1771cfcd6739 --- /dev/null +++ b/object_detection/configs/paa/paa_r101_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r101_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/paa/paa_r101_fpn_mstrain_3x_coco.py b/object_detection/configs/paa/paa_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..71858ed65c7fa998fdc960161689be083bdb4e62 --- /dev/null +++ b/object_detection/configs/paa/paa_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './paa_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/paa/paa_r50_fpn_1.5x_coco.py b/object_detection/configs/paa/paa_r50_fpn_1.5x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aabce4af987aa5504e1748e10b9955f760a013e1 --- /dev/null +++ b/object_detection/configs/paa/paa_r50_fpn_1.5x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[12, 16]) +runner = dict(type='EpochBasedRunner', max_epochs=18) diff --git a/object_detection/configs/paa/paa_r50_fpn_1x_coco.py b/object_detection/configs/paa/paa_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4c9c4aa73e1190da0edf1f20ffc3e60654cf87b1 --- /dev/null +++ b/object_detection/configs/paa/paa_r50_fpn_1x_coco.py @@ -0,0 +1,70 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='PAA', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='PAAHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/paa/paa_r50_fpn_2x_coco.py b/object_detection/configs/paa/paa_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..663d2c0ded52086663360a8a3dce89702584fc1f --- /dev/null +++ b/object_detection/configs/paa/paa_r50_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/paa/paa_r50_fpn_mstrain_3x_coco.py b/object_detection/configs/paa/paa_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..91fa28cde470cb323f90f89a56d8acb6f9f0a22e --- /dev/null +++ b/object_detection/configs/paa/paa_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,20 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/pafpn/README.md b/object_detection/configs/pafpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ec3fdb81dc7a1d2fec1f02d089fba89b37739225 --- /dev/null +++ b/object_detection/configs/pafpn/README.md @@ -0,0 +1,38 @@ +# Path Aggregation Network for Instance Segmentation + +## Abstract + + + +The way that information propagates in neural networks is of great importance. In this paper, we propose Path Aggregation Network (PANet) aiming at boosting information flow in proposal-based instance segmentation framework. Specifically, we enhance the entire feature hierarchy with accurate localization signals in lower layers by bottom-up path augmentation, which shortens the information path between lower layers and topmost feature. We present adaptive feature pooling, which links feature grid and all feature levels to make useful information in each feature level propagate directly to following proposal subnetworks. A complementary branch capturing different views for each proposal is created to further improve mask prediction. These improvements are simple to implement, with subtle extra computational overhead. Our PANet reaches the 1st place in the COCO 2017 Challenge Instance Segmentation task and the 2nd place in Object Detection task without large-batch training. It is also state-of-the-art on MVD and Cityscapes. + + +
+ +
+ + + + +## Citation + + + +``` +@inproceedings{liu2018path, + author = {Shu Liu and + Lu Qi and + Haifang Qin and + Jianping Shi and + Jiaya Jia}, + title = {Path Aggregation Network for Instance Segmentation}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2018} +} +``` + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 4.0 | 17.2 | 37.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_20200503_105836.log.json) | diff --git a/object_detection/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py b/object_detection/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b2fdef91c5cc8396baee9c2d8a09556162443078 --- /dev/null +++ b/object_detection/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + neck=dict( + type='PAFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/pafpn/metafile.yml b/object_detection/configs/pafpn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..f9cf97c8c41378c9f1eb3d16b62c4ac1a23dbf89 --- /dev/null +++ b/object_detection/configs/pafpn/metafile.yml @@ -0,0 +1,38 @@ +Collections: + - Name: PAFPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PAFPN + Paper: + URL: https://arxiv.org/abs/1803.01534 + Title: 'Path Aggregation Network for Instance Segmentation' + README: configs/pafpn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/necks/pafpn.py#L11 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_pafpn_1x_coco + In Collection: PAFPN + Config: configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 58.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth diff --git a/object_detection/configs/panoptic_fpn/README.md b/object_detection/configs/panoptic_fpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..083fb6efad9e529dfb94891f3dadf567009e0bfb --- /dev/null +++ b/object_detection/configs/panoptic_fpn/README.md @@ -0,0 +1,64 @@ +# Panoptic feature pyramid networks +## Abstract + + + +The recently introduced panoptic segmentation task has renewed our community's interest in unifying the tasks of instance segmentation (for thing classes) and semantic segmentation (for stuff classes). However, current state-of-the-art methods for this joint task use separate and dissimilar networks for instance and semantic segmentation, without performing any shared computation. In this work, we aim to unify these methods at the architectural level, designing a single network for both tasks. Our approach is to endow Mask R-CNN, a popular instance segmentation method, with a semantic segmentation branch using a shared Feature Pyramid Network (FPN) backbone. Surprisingly, this simple baseline not only remains effective for instance segmentation, but also yields a lightweight, top-performing method for semantic segmentation. In this work, we perform a detailed study of this minimally extended version of Mask R-CNN with FPN, which we refer to as Panoptic FPN, and show it is a robust and accurate baseline for both tasks. Given its effectiveness and conceptual simplicity, we hope our method can serve as a strong baseline and aid future research in panoptic segmentation. + + +
+ +
+ + + + +## Citation + + +The base method for panoptic segmentation task. + +``` +@inproceedings{kirillov2018panopticfpn, + author = { + Alexander Kirillov, + Ross Girshick, + Kaiming He, + Piotr Dollar, + }, + title = {Panoptic Feature Pyramid Networks}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2019} +} +``` + +## Dataset + +PanopticFPN requires COCO and [COCO-panoptic](http://images.cocodataset.org/annotations/panoptic_annotations_trainval2017.zip) dataset for training and evaluation. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ │ ├── panoptic_train2017.json +│ │ │ ├── panoptic_train2017 +│ │ │ ├── panoptic_val2017.json +│ │ │ ├── panoptic_val2017 +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +``` + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | PQ | SQ | RQ | PQ_th | SQ_th | RQ_th | PQ_st | SQ_st | RQ_st | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:----:|:----:|:----:|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 4.7 | | 40.2 | 77.8 | 49.3 | 47.8 | 80.9 | 57.5 | 28.9 | 73.1 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco/panoptic_fpn_r50_fpn_1x_coco_20210821_101153-9668fd13.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco/panoptic_fpn_r50_fpn_1x_coco_20210821_101153.log.json) | +| R-50-FPN | pytorch | 3x | - | - | 42.5 | 78.1 | 51.7 | 50.3 | 81.5 | 60.3 | 30.7 | 73.0 | 38.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco/panoptic_fpn_r50_fpn_mstrain_3x_coco_20210824_171155-5650f98b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco/panoptic_fpn_r50_fpn_mstrain_3x_coco_20210824_171155.log.json) | +| R-101-FPN | pytorch | 1x | 6.7 | | 42.2 | 78.3 | 51.4 | 50.1 | 81.4 | 59.9 | 30.3 | 73.6 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco/panoptic_fpn_r101_fpn_1x_coco_20210820_193950-ab9157a2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco/panoptic_fpn_r101_fpn_1x_coco_20210820_193950.log.json) | +| R-101-FPN | pytorch | 3x | - | - | 44.1 | 78.9 | 53.6 | 52.1 | 81.7 | 62.3 | 32.0 | 74.6 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco/panoptic_fpn_r101_fpn_mstrain_3x_coco_20210823_114712-9c99acc4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco/panoptic_fpn_r101_fpn_mstrain_3x_coco_20210823_114712.log.json) | diff --git a/object_detection/configs/panoptic_fpn/metafile.yml b/object_detection/configs/panoptic_fpn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..8c9d39dcee8c35c483f002c7ebeca73288370d63 --- /dev/null +++ b/object_detection/configs/panoptic_fpn/metafile.yml @@ -0,0 +1,70 @@ +Collections: + - Name: PanopticFPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PanopticFPN + Paper: + URL: https://arxiv.org/pdf/1901.02446 + Title: 'Panoptic feature pyramid networks' + README: configs/panoptic_fpn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.16.0/mmdet/models/detectors/panoptic_fpn.py#L7 + Version: v2.16.0 + +Models: + - Name: panoptic_fpn_r50_fpn_1x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 12 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco/panoptic_fpn_r50_fpn_1x_coco_20210821_101153-9668fd13.pth + + - Name: panoptic_fpn_r50_fpn_mstrain_3x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 36 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco/panoptic_fpn_r50_fpn_mstrain_3x_coco_20210824_171155-5650f98b.pth + + - Name: panoptic_fpn_r101_fpn_1x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + Epochs: 12 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 42.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco/panoptic_fpn_r101_fpn_1x_coco_20210820_193950-ab9157a2.pth + + - Name: panoptic_fpn_r101_fpn_mstrain_3x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.5 + Epochs: 36 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 44.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco/panoptic_fpn_r101_fpn_mstrain_3x_coco_20210823_114712-9c99acc4.pth diff --git a/object_detection/configs/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py b/object_detection/configs/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..78b80798d3ab678b903775e3a4594d5c9dd92b92 --- /dev/null +++ b/object_detection/configs/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './panoptic_fpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py b/object_detection/configs/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..057e4811ebfca7cc1aea6ef2a6d10d2d2c34a1a7 --- /dev/null +++ b/object_detection/configs/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './panoptic_fpn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py b/object_detection/configs/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29955246032fd8e5ce624ceea586945fa3c91cce --- /dev/null +++ b/object_detection/configs/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py @@ -0,0 +1,33 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_panoptic.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='PanopticFPN', + semantic_head=dict( + type='PanopticFPNHead', + num_things_classes=80, + num_stuff_classes=53, + in_channels=256, + inner_channels=128, + start_level=0, + end_level=4, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + conv_cfg=None, + loss_seg=dict( + type='CrossEntropyLoss', ignore_index=255, loss_weight=0.5)), + panoptic_fusion_head=dict( + type='HeuristicFusionHead', + num_things_classes=80, + num_stuff_classes=53), + test_cfg=dict( + panoptic=dict( + score_thr=0.6, + max_per_img=100, + mask_thr_binary=0.5, + mask_overlap=0.5, + nms=dict(type='nms', iou_threshold=0.5, class_agnostic=True), + stuff_area_limit=4096))) + +custom_hooks = [] diff --git a/object_detection/configs/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py b/object_detection/configs/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b510935358f55275434d5bcfe565545f861fbec9 --- /dev/null +++ b/object_detection/configs/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,61 @@ +_base_ = './panoptic_fpn_r50_fpn_1x_coco.py' + +# dataset settings +dataset_type = 'CocoPanopticDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadPanopticAnnotations', + with_bbox=True, + with_mask=True, + with_seg=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 4), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + train=dict( + _delete_=True, + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'annotations/panoptic_train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/pascal_voc/README.md b/object_detection/configs/pascal_voc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..af33edb5e48c1c319b9743488958d37ed1485946 --- /dev/null +++ b/object_detection/configs/pascal_voc/README.md @@ -0,0 +1,41 @@ +# The Pascal Visual Object Classes (VOC) Challenge + +## Abstract + + + +The Pascal Visual Object Classes (VOC) challenge is a benchmark in visual object category recognition and detection, providing the vision and machine learning communities with a standard dataset of images and annotation, and standard evaluation procedures. Organised annually from 2005 to present, the challenge and its associated dataset has become accepted as the benchmark for object detection. + +This paper describes the dataset and evaluation procedure. We review the state-of-the-art in evaluated methods for both classification and detection, analyse whether the methods are statistically different, what they are learning from the images (e.g. the object or its context), and what the methods find easy or confuse. The paper concludes with lessons learnt in the three year history of the challenge, and proposes directions for future improvement and extension. + + +
+ +
+ + + + +## Citation + + + +``` +@Article{Everingham10, + author = "Everingham, M. and Van~Gool, L. and Williams, C. K. I. and Winn, J. and Zisserman, A.", + title = "The Pascal Visual Object Classes (VOC) Challenge", + journal = "International Journal of Computer Vision", + volume = "88", + year = "2010", + number = "2", + month = jun, + pages = "303--338", +} +``` + +## Results and Models + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50 | pytorch | 1x | 2.6 | - | 79.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/faster_rcnn_r50_fpn_1x_voc0712_20200624-c9895d40.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/20200623_015208.log.json) | +| Retinanet | R-50 | pytorch | 1x | 2.1 | - | 77.3 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200617-47cbdd0e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200616_014642.log.json) | diff --git a/object_detection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py b/object_detection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..7866acebea689e7a863a836c326b1407de733fe8 --- /dev/null +++ b/object_detection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py @@ -0,0 +1,14 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/object_detection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py b/object_detection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py new file mode 100644 index 0000000000000000000000000000000000000000..12eee2c1ecdaa5f9e84a3bd2084b00493f2f76c0 --- /dev/null +++ b/object_detection/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py @@ -0,0 +1,75 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) + +CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', + 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', + 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor') + +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1000, 600), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1000, 600), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file='data/voc0712_trainval.json', + img_prefix='data/VOCdevkit', + pipeline=train_pipeline, + classes=CLASSES)), + val=dict( + type=dataset_type, + ann_file='data/voc07_test.json', + img_prefix='data/VOCdevkit', + pipeline=test_pipeline, + classes=CLASSES), + test=dict( + type=dataset_type, + ann_file='data/voc07_test.json', + img_prefix='data/VOCdevkit', + pipeline=test_pipeline, + classes=CLASSES)) +evaluation = dict(interval=1, metric='bbox') + +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/object_detection/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py b/object_detection/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..b4b050dda5d2d752c0db3c83c434879c8765a272 --- /dev/null +++ b/object_detection/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py @@ -0,0 +1,14 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(bbox_head=dict(num_classes=20)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/object_detection/configs/pascal_voc/ssd300_voc0712.py b/object_detection/configs/pascal_voc/ssd300_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..4d6bbd9cf42ab16119b4d4a929f708fd902c2a0a --- /dev/null +++ b/object_detection/configs/pascal_voc/ssd300_voc0712.py @@ -0,0 +1,69 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + num_classes=20, anchor_generator=dict(basesize_ratio_range=(0.2, + 0.9)))) +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + type='RepeatDataset', times=10, dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 20]) +checkpoint_config = dict(interval=1) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/pascal_voc/ssd512_voc0712.py b/object_detection/configs/pascal_voc/ssd512_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..f4627c2dc236ede39e258785ee5d153d23f40cf0 --- /dev/null +++ b/object_detection/configs/pascal_voc/ssd512_voc0712.py @@ -0,0 +1,57 @@ +_base_ = 'ssd300_voc0712.py' +input_size = 512 +model = dict( + neck=dict( + out_channels=(512, 1024, 512, 256, 256, 256, 256), + level_strides=(2, 2, 2, 2, 1), + level_paddings=(1, 1, 1, 1, 1), + last_kernel_size=4), + bbox_head=dict( + in_channels=(512, 1024, 512, 256, 256, 256, 256), + anchor_generator=dict( + input_size=input_size, + strides=[8, 16, 32, 64, 128, 256, 512], + basesize_ratio_range=(0.15, 0.9), + ratios=([2], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2])))) +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(512, 512), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(512, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/pisa/README.md b/object_detection/configs/pisa/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d1ea3a4624277a42a09726651fb04415c46f79dd --- /dev/null +++ b/object_detection/configs/pisa/README.md @@ -0,0 +1,54 @@ +# Prime Sample Attention in Object Detection + +## Abstract + + + +It is a common paradigm in object detection frameworks to treat all samples equally and target at maximizing the performance on average. In this work, we revisit this paradigm through a careful study on how different samples contribute to the overall performance measured in terms of mAP. Our study suggests that the samples in each mini-batch are neither independent nor equally important, and therefore a better classifier on average does not necessarily mean higher mAP. Motivated by this study, we propose the notion of Prime Samples, those that play a key role in driving the detection performance. We further develop a simple yet effective sampling and learning strategy called PrIme Sample Attention (PISA) that directs the focus of the training process towards such samples. Our experiments demonstrate that it is often more effective to focus on prime samples than hard samples when training a detector. Particularly, On the MSCOCO dataset, PISA outperforms the random sampling baseline and hard mining schemes, e.g., OHEM and Focal Loss, consistently by around 2% on both single-stage and two-stage detectors, even with a strong backbone ResNeXt-101. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{cao2019prime, + title={Prime sample attention in object detection}, + author={Cao, Yuhang and Chen, Kai and Loy, Chen Change and Lin, Dahua}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2020} +} +``` + +## Results and models + +| PISA | Network | Backbone | Lr schd | box AP | mask AP | Config | Download | +|:----:|:-------:|:-------------------:|:-------:|:------:|:-------:|:------:|:--------:| +| × | Faster R-CNN | R-50-FPN | 1x | 36.4 | | - | +| √ | Faster R-CNN | R-50-FPN | 1x | 38.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco_20200506_185619.log.json) | +| × | Faster R-CNN | X101-32x4d-FPN | 1x | 40.1 | | - | +| √ | Faster R-CNN | X101-32x4d-FPN | 1x | 41.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco-e4accec4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco_20200505_181503.log.json) | +| × | Mask R-CNN | R-50-FPN | 1x | 37.3 | 34.2 | - | +| √ | Mask R-CNN | R-50-FPN | 1x | 39.1 | 35.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco-dfcedba6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco_20200508_150500.log.json) | +| × | Mask R-CNN | X101-32x4d-FPN | 1x | 41.1 | 37.1 | - | +| √ | Mask R-CNN | X101-32x4d-FPN | 1x | | | | +| × | RetinaNet | R-50-FPN | 1x | 35.6 | | - | +| √ | RetinaNet | R-50-FPN | 1x | 36.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco-76409952.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco_20200504_014311.log.json) | +| × | RetinaNet | X101-32x4d-FPN | 1x | 39.0 | | - | +| √ | RetinaNet | X101-32x4d-FPN | 1x | 40.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco-a0c13c73.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco_20200505_001404.log.json) | +| × | SSD300 | VGG16 | 1x | 25.6 | | - | +| √ | SSD300 | VGG16 | 1x | 27.6 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd300_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco-710e3ac9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco_20200504_144325.log.json) | +| × | SSD300 | VGG16 | 1x | 29.3 | | - | +| √ | SSD300 | VGG16 | 1x | 31.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd512_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco-247addee.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco_20200508_131030.log.json) | + +**Notes:** + +- In the original paper, all models are trained and tested on mmdet v1.x, thus results may not be exactly the same with this release on v2.0. +- It is noted PISA only modifies the training pipeline so the inference time remains the same with the baseline. diff --git a/object_detection/configs/pisa/metafile.yml b/object_detection/configs/pisa/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..cd43afb00b029e39036dd8a1e70bbc96548f2584 --- /dev/null +++ b/object_detection/configs/pisa/metafile.yml @@ -0,0 +1,110 @@ +Collections: + - Name: PISA + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - PISA + - RPN + - ResNet + - RoIPool + Paper: + URL: https://arxiv.org/abs/1904.04821 + Title: 'Prime Sample Attention in Object Detection' + README: configs/pisa/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/roi_heads/pisa_roi_head.py#L8 + Version: v2.1.0 + +Models: + - Name: pisa_faster_rcnn_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth + + - Name: pisa_faster_rcnn_x101_32x4d_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco-e4accec4.pth + + - Name: pisa_mask_rcnn_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco-dfcedba6.pth + + - Name: pisa_retinanet_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco-76409952.pth + + - Name: pisa_retinanet_x101_32x4d_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco-a0c13c73.pth + + - Name: pisa_ssd300_coco + In Collection: PISA + Config: configs/pisa/pisa_ssd300_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 27.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco-710e3ac9.pth + + - Name: pisa_ssd512_coco + In Collection: PISA + Config: configs/pisa/pisa_ssd512_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 31.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco-247addee.pth diff --git a/object_detection/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..71e65b0b2bc72379f4db73e491f76fc767cb786b --- /dev/null +++ b/object_detection/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/object_detection/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..16edd99de295161a3c246243e8c482ede4e5bdae --- /dev/null +++ b/object_detection/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/object_detection/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..047a293466a20ea90501e3054d7fcfe23fcdcb39 --- /dev/null +++ b/object_detection/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/object_detection/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2186a8f695ae6de9f27f5e96e398766f7a0e74bd --- /dev/null +++ b/object_detection/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/object_detection/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py b/object_detection/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..70f89e227ec64b5c7224375aac0cf7ae3a10a29e --- /dev/null +++ b/object_detection/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) diff --git a/object_detection/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b97b6720f0522ee19e3f8353bf490b74a5835308 --- /dev/null +++ b/object_detection/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../retinanet/retinanet_x101_32x4d_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) diff --git a/object_detection/configs/pisa/pisa_ssd300_coco.py b/object_detection/configs/pisa/pisa_ssd300_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b5cc006477eacaa9ab40d463312dc2156a59d634 --- /dev/null +++ b/object_detection/configs/pisa/pisa_ssd300_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd300_coco.py' + +model = dict( + bbox_head=dict(type='PISASSDHead'), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/pisa/pisa_ssd512_coco.py b/object_detection/configs/pisa/pisa_ssd512_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3219d6d667cb185e6fa4f1954d632ccad9512a48 --- /dev/null +++ b/object_detection/configs/pisa/pisa_ssd512_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd512_coco.py' + +model = dict( + bbox_head=dict(type='PISASSDHead'), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/point_rend/README.md b/object_detection/configs/point_rend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4a9b6f2c687a5073de819acd05b44c2e6dd5bd93 --- /dev/null +++ b/object_detection/configs/point_rend/README.md @@ -0,0 +1,37 @@ +# PointRend: Image Segmentation as Rendering + +## Abstract + + + +We present a new method for efficient high-quality image segmentation of objects and scenes. By analogizing classical computer graphics methods for efficient rendering with over- and undersampling challenges faced in pixel labeling tasks, we develop a unique perspective of image segmentation as a rendering problem. From this vantage, we present the PointRend (Point-based Rendering) neural network module: a module that performs point-based segmentation predictions at adaptively selected locations based on an iterative subdivision algorithm. PointRend can be flexibly applied to both instance and semantic segmentation tasks by building on top of existing state-of-the-art models. While many concrete implementations of the general idea are possible, we show that a simple design already achieves excellent results. Qualitatively, PointRend outputs crisp object boundaries in regions that are over-smoothed by previous methods. Quantitatively, PointRend yields significant gains on COCO and Cityscapes, for both instance and semantic segmentation. PointRend's efficiency enables output resolutions that are otherwise impractical in terms of memory or computation compared to existing approaches. + + +
+ +
+ + + + +## Citation + + + +```latex +@InProceedings{kirillov2019pointrend, + title={{PointRend}: Image Segmentation as Rendering}, + author={Alexander Kirillov and Yuxin Wu and Kaiming He and Ross Girshick}, + journal={ArXiv:1912.08193}, + year={2019} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.6 | | 38.4 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco_20200612_161407.log.json) | +| R-50-FPN | caffe | 3x | 4.6 | | 41.0 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco_20200614_002632.log.json) | + +Note: All models are trained with multi-scale, the input image shorter side is randomly scaled to one of (640, 672, 704, 736, 768, 800). diff --git a/object_detection/configs/point_rend/metafile.yml b/object_detection/configs/point_rend/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..82aea05be69c17bb75592423d2883512691586fd --- /dev/null +++ b/object_detection/configs/point_rend/metafile.yml @@ -0,0 +1,54 @@ +Collections: + - Name: PointRend + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PointRend + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1912.08193 + Title: 'PointRend: Image Segmentation as Rendering' + README: configs/point_rend/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/detectors/point_rend.py#L6 + Version: v2.2.0 + +Models: + - Name: point_rend_r50_caffe_fpn_mstrain_1x_coco + In Collection: PointRend + Config: configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth + + - Name: point_rend_r50_caffe_fpn_mstrain_3x_coco + In Collection: PointRend + Config: configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth diff --git a/object_detection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py b/object_detection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0c0e563d6fe307d05fbd3862cd28b6dc2a3e52b2 --- /dev/null +++ b/object_detection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,44 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# model settings +model = dict( + type='PointRend', + roi_head=dict( + type='PointRendRoIHead', + mask_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='concat', + roi_layer=dict( + _delete_=True, type='SimpleRoIAlign', output_size=14), + out_channels=256, + featmap_strides=[4]), + mask_head=dict( + _delete_=True, + type='CoarseMaskHead', + num_fcs=2, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + point_head=dict( + type='MaskPointHead', + num_fcs=3, + in_channels=256, + fc_channels=256, + num_classes=80, + coarse_pred_each_layer=True, + loss_point=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + mask_size=7, + num_points=14 * 14, + oversample_ratio=3, + importance_sample_ratio=0.75)), + test_cfg=dict( + rcnn=dict( + subdivision_steps=5, + subdivision_num_points=28 * 28, + scale_factor=2))) diff --git a/object_detection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py b/object_detection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..169278e5738b0abd4ae5e99594e4adbaaefa2d96 --- /dev/null +++ b/object_detection/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './point_rend_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/pvt/README.md b/object_detection/configs/pvt/README.md new file mode 100644 index 0000000000000000000000000000000000000000..368edb80e59211b72a3209294d36df8b93b068eb --- /dev/null +++ b/object_detection/configs/pvt/README.md @@ -0,0 +1,60 @@ +# Pyramid vision transformer: A versatile backbone for dense prediction without convolutions + +## Abstract + + + +Although using convolutional neural networks (CNNs) as backbones achieves great successes in computer vision, this work investigates a simple backbone network useful for many dense prediction tasks without convolutions. Unlike the recently-proposed Transformer model (e.g., ViT) that is specially designed for image classification, we propose Pyramid Vision Transformer~(PVT), which overcomes the difficulties of porting Transformer to various dense prediction tasks. PVT has several merits compared to prior arts. (1) Different from ViT that typically has low-resolution outputs and high computational and memory cost, PVT can be not only trained on dense partitions of the image to achieve high output resolution, which is important for dense predictions but also using a progressive shrinking pyramid to reduce computations of large feature maps. (2) PVT inherits the advantages from both CNN and Transformer, making it a unified backbone in various vision tasks without convolutions by simply replacing CNN backbones. (3) We validate PVT by conducting extensive experiments, showing that it boosts the performance of many downstream tasks, e.g., object detection, semantic, and instance segmentation. For example, with a comparable number of parameters, RetinaNet+PVT achieves 40.4 AP on the COCO dataset, surpassing RetinNet+ResNet50 (36.3 AP) by 4.1 absolute AP. We hope PVT could serve as an alternative and useful backbone for pixel-level predictions and facilitate future researches. + +Transformer recently has shown encouraging progresses in computer vision. In this work, we present new baselines by improving the original Pyramid Vision Transformer (abbreviated as PVTv1) by adding three designs, including (1) overlapping patch embedding, (2) convolutional feed-forward networks, and (3) linear complexity attention layers. +With these modifications, our PVTv2 significantly improves PVTv1 on three tasks e.g., classification, detection, and segmentation. Moreover, PVTv2 achieves comparable or better performances than recent works such as Swin Transformer. We hope this work will facilitate state-of-the-art Transformer researches in computer vision. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{wang2021pyramid, + title={Pyramid vision transformer: A versatile backbone for dense prediction without convolutions}, + author={Wang, Wenhai and Xie, Enze and Li, Xiang and Fan, Deng-Ping and Song, Kaitao and Liang, Ding and Lu, Tong and Luo, Ping and Shao, Ling}, + journal={arXiv preprint arXiv:2102.12122}, + year={2021} +} +``` + +```latex +@article{wang2021pvtv2, + title={PVTv2: Improved Baselines with Pyramid Vision Transformer}, + author={Wang, Wenhai and Xie, Enze and Li, Xiang and Fan, Deng-Ping and Song, Kaitao and Liang, Ding and Lu, Tong and Luo, Ping and Shao, Ling}, + journal={arXiv preprint arXiv:2106.13797}, + year={2021} +} +``` +## Results and Models + +### RetinaNet (PVTv1) + +| Backbone | Lr schd | Mem (GB) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------:|:------:|:--------:| +| PVT-Tiny | 12e |8.5 |36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_t_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-t_fpn_1x_coco/retinanet_pvt-t_fpn_1x_coco_20210831_103110-17b566bd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-t_fpn_1x_coco/retinanet_pvt-t_fpn_1x_coco_20210831_103110.log.json) | +| PVT-Small | 12e |14.5 |40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_s_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-s_fpn_1x_coco/retinanet_pvt-s_fpn_1x_coco_20210906_142921-b6c94a5b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-s_fpn_1x_coco/retinanet_pvt-s_fpn_1x_coco_20210906_142921.log.json) | +| PVT-Medium | 12e |20.9 |41.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_m_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-m_fpn_1x_coco/retinanet_pvt-m_fpn_1x_coco_20210831_103243-55effa1b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-m_fpn_1x_coco/retinanet_pvt-m_fpn_1x_coco_20210831_103243.log.json) | + +### RetinaNet (PVTv2) + +| Backbone | Lr schd | Mem (GB) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------:|:------:|:--------:| +| PVTv2-B0 | 12e |7.4 |37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b0_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b0_fpn_1x_coco/retinanet_pvtv2-b0_fpn_1x_coco_20210831_103157-13e9aabe.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b0_fpn_1x_coco/retinanet_pvtv2-b0_fpn_1x_coco_20210831_103157.log.json) | +| PVTv2-B1 | 12e |9.5 |41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b1_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b1_fpn_1x_coco/retinanet_pvtv2-b1_fpn_1x_coco_20210831_103318-7e169a7d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b1_fpn_1x_coco/retinanet_pvtv2-b1_fpn_1x_coco_20210831_103318.log.json) | +| PVTv2-B2 | 12e |16.2 |44.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b2_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b2_fpn_1x_coco/retinanet_pvtv2-b2_fpn_1x_coco_20210901_174843-529f0b9a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b2_fpn_1x_coco/retinanet_pvtv2-b2_fpn_1x_coco_20210901_174843.log.json) | +| PVTv2-B3 | 12e |23.0 |46.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b3_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b3_fpn_1x_coco/retinanet_pvtv2-b3_fpn_1x_coco_20210903_151512-8357deff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b3_fpn_1x_coco/retinanet_pvtv2-b3_fpn_1x_coco_20210903_151512.log.json) | +| PVTv2-B4 | 12e |17.0 |46.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b4_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b4_fpn_1x_coco/retinanet_pvtv2-b4_fpn_1x_coco_20210901_170151-83795c86.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b4_fpn_1x_coco/retinanet_pvtv2-b4_fpn_1x_coco_20210901_170151.log.json) | +| PVTv2-B5 | 12e |18.7 |46.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b5_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b5_fpn_1x_coco/retinanet_pvtv2-b5_fpn_1x_coco_20210902_201800-3420eb57.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b5_fpn_1x_coco/retinanet_pvtv2-b5_fpn_1x_coco_20210902_201800.log.json) | diff --git a/object_detection/configs/pvt/metafile.yml b/object_detection/configs/pvt/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..48a0e2c2ae1714585c9b6354fef2340dee4207cc --- /dev/null +++ b/object_detection/configs/pvt/metafile.yml @@ -0,0 +1,136 @@ +Collections: + - Name: PVT + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - RetinaNet + - PyramidVisionTransformer + - FPN + Paper: https://arxiv.org/abs/2102.12122 + README: configs/pvt/README.md + - Name: PVT-v2 + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - RetinaNet + - PyramidVisionTransformerV2 + - FPN + Paper: https://arxiv.org/abs/2106.13797 + README: configs/pvt/README.md +Models: + - Name: retinanet_pvt-t_fpn_1x_coco + In Collection: PVT + Config: configs/pvt/retinanet_pvt-t_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-t_fpn_1x_coco/retinanet_pvt-t_fpn_1x_coco_20210831_103110-17b566bd.pth + - Name: retinanet_pvt-s_fpn_1x_coco + In Collection: PVT + Config: configs/pvt/retinanet_pvt-s_fpn_1x_coco.py + Metadata: + Training Memory (GB): 14.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-s_fpn_1x_coco/retinanet_pvt-s_fpn_1x_coco_20210906_142921-b6c94a5b.pth + - Name: retinanet_pvt-m_fpn_1x_coco + In Collection: PVT + Config: configs/pvt/retinanet_pvt-m_fpn_1x_coco.py + Metadata: + Training Memory (GB): 20.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-m_fpn_1x_coco/retinanet_pvt-m_fpn_1x_coco_20210831_103243-55effa1b.pth + - Name: retinanet_pvtv2-b0_fpn_1x_coco + In Collection: PVT-v2 + Config: configs/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b0_fpn_1x_coco/retinanet_pvtv2-b0_fpn_1x_coco_20210831_103157-13e9aabe.pth + - Name: retinanet_pvtv2-b1_fpn_1x_coco + In Collection: PVT-v2 + Config: configs/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b1_fpn_1x_coco/retinanet_pvtv2-b1_fpn_1x_coco_20210831_103318-7e169a7d.pth + - Name: retinanet_pvtv2-b2_fpn_1x_coco + In Collection: PVT-v2 + Config: configs/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py + Metadata: + Training Memory (GB): 16.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b2_fpn_1x_coco/retinanet_pvtv2-b2_fpn_1x_coco_20210901_174843-529f0b9a.pth + - Name: retinanet_pvtv2-b3_fpn_1x_coco + In Collection: PVT-v2 + Config: configs/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py + Metadata: + Training Memory (GB): 23.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b3_fpn_1x_coco/retinanet_pvtv2-b3_fpn_1x_coco_20210903_151512-8357deff.pth + - Name: retinanet_pvtv2-b4_fpn_1x_coco + In Collection: PVT-v2 + Config: configs/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py + Metadata: + Training Memory (GB): 17.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b4_fpn_1x_coco/retinanet_pvtv2-b4_fpn_1x_coco_20210901_170151-83795c86.pth + - Name: retinanet_pvtv2-b5_fpn_1x_coco + In Collection: PVT-v2 + Config: configs/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py + Metadata: + Training Memory (GB): 18.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b5_fpn_1x_coco/retinanet_pvtv2-b5_fpn_1x_coco_20210902_201800-3420eb57.pth diff --git a/object_detection/configs/pvt/retinanet_pvt-l_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvt-l_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e299f2a098e7cd2299e369cdf1aba9c56980cb0d --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvt-l_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'retinanet_pvt-t_fpn_1x_coco.py' +model = dict( + backbone=dict( + num_layers=[3, 8, 27, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_large.pth'))) +fp16 = dict(loss_scale=dict(init_scale=512)) diff --git a/object_detection/configs/pvt/retinanet_pvt-m_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvt-m_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b888f788b6c7310491751774238451bb7107dccc --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvt-m_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = 'retinanet_pvt-t_fpn_1x_coco.py' +model = dict( + backbone=dict( + num_layers=[3, 4, 18, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_medium.pth'))) diff --git a/object_detection/configs/pvt/retinanet_pvt-s_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvt-s_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..46603488bb3ceb4fc1052139da53340a3d595256 --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvt-s_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = 'retinanet_pvt-t_fpn_1x_coco.py' +model = dict( + backbone=dict( + num_layers=[3, 4, 6, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_small.pth'))) diff --git a/object_detection/configs/pvt/retinanet_pvt-t_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvt-t_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a6cff7d033554d93b40e741278029cb914e36b68 --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvt-t_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='RetinaNet', + backbone=dict( + _delete_=True, + type='PyramidVisionTransformer', + num_layers=[2, 2, 2, 2], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_tiny.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) +# optimizer +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, weight_decay=0.0001) diff --git a/object_detection/configs/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cbe2295d8f66192a442653882c1f2b4d54a05b53 --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='RetinaNet', + backbone=dict( + _delete_=True, + type='PyramidVisionTransformerV2', + embed_dims=32, + num_layers=[2, 2, 2, 2], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b0.pth')), + neck=dict(in_channels=[32, 64, 160, 256])) +# optimizer +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, weight_decay=0.0001) diff --git a/object_detection/configs/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5374c50925f5c7ed8a761eda40dc4bf374df3aeb --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b1.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) diff --git a/object_detection/configs/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cf9a18debbe5f8b9918e0d086ad6d54d203ef310 --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 4, 6, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b2.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) diff --git a/object_detection/configs/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7a47f820324af7fecf773640d7d1829b0c115471 --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 4, 18, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b3.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) diff --git a/object_detection/configs/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ec9103b49b4338503fdd1c3f64a27ed1954b433e --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 8, 27, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b4.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) +# optimizer +optimizer = dict( + _delete_=True, type='AdamW', lr=0.0001 / 1.4, weight_decay=0.0001) +# dataset settings +data = dict(samples_per_gpu=1, workers_per_gpu=1) diff --git a/object_detection/configs/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py b/object_detection/configs/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d8e6d23ccbfc58db2a5c64bd713f3e85b640d01b --- /dev/null +++ b/object_detection/configs/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 6, 40, 3], + mlp_ratios=(4, 4, 4, 4), + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b5.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) +# optimizer +optimizer = dict( + _delete_=True, type='AdamW', lr=0.0001 / 1.4, weight_decay=0.0001) +# dataset settings +data = dict(samples_per_gpu=1, workers_per_gpu=1) diff --git a/object_detection/configs/queryinst/README.md b/object_detection/configs/queryinst/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7129dd4c890aab9c2b46b0367dd7aa112b2ccb38 --- /dev/null +++ b/object_detection/configs/queryinst/README.md @@ -0,0 +1,40 @@ +# Instances as Queries + +## Abstract + + + +We present QueryInst, a new perspective for instance segmentation. QueryInst is a multi-stage end-to-end system that treats instances of interest as learnable queries, enabling query based object detectors, e.g., Sparse R-CNN, to have strong instance segmentation performance. The attributes of instances such as categories, bounding boxes, instance masks, and instance association embeddings are represented by queries in a unified manner. In QueryInst, a query is shared by both detection and segmentation via dynamic convolutions and driven by parallelly-supervised multi-stage learning. We conduct extensive experiments on three challenging benchmarks, i.e., COCO, CityScapes, and YouTube-VIS to evaluate the effectiveness of QueryInst in object detection, instance segmentation, and video instance segmentation tasks. For the first time, we demonstrate that a simple end-to-end query based framework can achieve the state-of-the-art performance in various instance-level recognition tasks. + + +
+ +
+ + + + +## Citation + + + +``` +@InProceedings{Fang_2021_ICCV, + author = {Fang, Yuxin and Yang, Shusheng and Wang, Xinggang and Li, Yu and Fang, Chen and Shan, Ying and Feng, Bin and Liu, Wenyu}, + title = {Instances As Queries}, + booktitle = {Proceedings of the IEEE/CVF International Conference on Computer Vision (ICCV)}, + month = {October}, + year = {2021}, + pages = {6910-6919} +} +``` + +## Results and Models + +| Model | Backbone | Style | Lr schd | Number of Proposals |Multi-Scale| RandomCrop | box AP | mask AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:-------: |:-------: |:---------:|:------:|:------:|:------:|:--------:| +| QueryInst | R-50-FPN | pytorch | 1x | 100 | False | False | 42.0 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_1x_coco/queryinst_r50_fpn_1x_coco_20210907_084916-5a8f1998.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_1x_coco/queryinst_r50_fpn_1x_coco_20210907_084916.log.json) | +| QueryInst | R-50-FPN | pytorch | 3x | 100 | True | False | 44.8 | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco/queryinst_r50_fpn_mstrain_480-800_3x_coco_20210901_103643-7837af86.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco/queryinst_r50_fpn_mstrain_480-800_3x_coco_20210901_103643.log.json) | +| QueryInst | R-50-FPN | pytorch | 3x | 300 | True | True | 47.5 | 41.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_101802-85cffbd8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_101802.log.json) | +| QueryInst | R-101-FPN | pytorch | 3x | 100 | True | False | 46.4 | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco/queryinst_r101_fpn_mstrain_480-800_3x_coco_20210904_104048-91f9995b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco/queryinst_r101_fpn_mstrain_480-800_3x_coco_20210904_104048.log.json) | +| QueryInst | R-101-FPN | pytorch | 3x | 300 | True | True | 49.0 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_153621-76cce59f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_153621.log.json) | diff --git a/object_detection/configs/queryinst/metafile.yml b/object_detection/configs/queryinst/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..da7f0a72c4ec891ff5eb224a041ff2ea6b809bf5 --- /dev/null +++ b/object_detection/configs/queryinst/metafile.yml @@ -0,0 +1,100 @@ +Collections: + - Name: QueryInst + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - QueryInst + Paper: + URL: https://openaccess.thecvf.com/content/ICCV2021/papers/Fang_Instances_As_Queries_ICCV_2021_paper.pdf + Title: 'Instances as Queries' + README: configs/queryinst/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/detectors/queryinst.py + Version: v2.18.0 + +Models: + - Name: queryinst_r50_fpn_1x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_1x_coco/queryinst_r50_fpn_1x_coco_20210907_084916-5a8f1998.pth + + - Name: queryinst_r50_fpn_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco/queryinst_r50_fpn_mstrain_480-800_3x_coco_20210901_103643-7837af86.pth + + - Name: queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_101802-85cffbd8.pth + + - Name: queryinst_r101_fpn_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco/queryinst_r101_fpn_mstrain_480-800_3x_coco_20210904_104048-91f9995b.pth + + - Name: queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_153621-76cce59f.pth diff --git a/object_detection/configs/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/object_detection/configs/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fd138f5ac5928089352b616463ac7f6fe386ce99 --- /dev/null +++ b/object_detection/configs/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py b/object_detection/configs/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..07cae19cea544f0e0b201aaef80a6c8d7b492fb3 --- /dev/null +++ b/object_detection/configs/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './queryinst_r50_fpn_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/queryinst/queryinst_r50_fpn_1x_coco.py b/object_detection/configs/queryinst/queryinst_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..48f5773b054fe185e5cbfac2350b86536db0d1d3 --- /dev/null +++ b/object_detection/configs/queryinst/queryinst_r50_fpn_1x_coco.py @@ -0,0 +1,138 @@ +_base_ = [ + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +num_stages = 6 +num_proposals = 100 +model = dict( + type='QueryInst', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=0, + add_extra_convs='on_input', + num_outs=4), + rpn_head=dict( + type='EmbeddingRPNHead', + num_proposals=num_proposals, + proposal_feature_channel=256), + roi_head=dict( + type='SparseRoIHead', + num_stages=num_stages, + stage_loss_weights=[1] * num_stages, + proposal_feature_channel=256, + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='DIIHead', + num_classes=80, + num_ffn_fcs=2, + num_heads=8, + num_cls_fcs=1, + num_reg_fcs=3, + feedforward_channels=2048, + in_channels=256, + dropout=0.0, + ffn_act_cfg=dict(type='ReLU', inplace=True), + dynamic_conv_cfg=dict( + type='DynamicConv', + in_channels=256, + feat_channels=64, + out_channels=256, + input_feat_shape=7, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN')), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=2.0), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=False, + target_means=[0., 0., 0., 0.], + target_stds=[0.5, 0.5, 1., 1.])) for _ in range(num_stages) + ], + mask_head=[ + dict( + type='DynamicMaskHead', + dynamic_conv_cfg=dict( + type='DynamicConv', + in_channels=256, + feat_channels=64, + out_channels=256, + input_feat_shape=14, + with_proj=False, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN')), + num_convs=4, + num_classes=80, + roi_feat_size=14, + in_channels=256, + conv_kernel_size=3, + conv_out_channels=256, + class_agnostic=False, + norm_cfg=dict(type='BN'), + upsample_cfg=dict(type='deconv', scale_factor=2), + loss_mask=dict( + type='DiceLoss', + loss_weight=8.0, + use_sigmoid=True, + activate=False, + eps=1e-5)) for _ in range(num_stages) + ]), + # training and testing settings + train_cfg=dict( + rpn=None, + rcnn=[ + dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='FocalLossCost', weight=2.0), + reg_cost=dict(type='BBoxL1Cost', weight=5.0), + iou_cost=dict(type='IoUCost', iou_mode='giou', + weight=2.0)), + sampler=dict(type='PseudoSampler'), + pos_weight=1, + mask_size=28, + ) for _ in range(num_stages) + ]), + test_cfg=dict( + rpn=None, rcnn=dict(max_per_img=num_proposals, mask_thr_binary=0.5))) + +# optimizer +optimizer = dict( + _delete_=True, + type='AdamW', + lr=0.0001, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={'backbone': dict(lr_mult=0.1, decay_mult=1.0)})) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[8, 11], warmup_iters=1000) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/object_detection/configs/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3089b3c6c8af5d052849414572b8995d9ecd0828 --- /dev/null +++ b/object_detection/configs/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,54 @@ +_base_ = './queryinst_r50_fpn_mstrain_480-800_3x_coco.py' +num_proposals = 300 +model = dict( + rpn_head=dict(num_proposals=num_proposals), + test_cfg=dict( + _delete_=True, + rpn=None, + rcnn=dict(max_per_img=num_proposals, mask_thr_binary=0.5))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py b/object_detection/configs/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89e2cd10c5c7d1ac7fda4b43305f47221d2c7ac5 --- /dev/null +++ b/object_detection/configs/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,23 @@ +_base_ = './queryinst_r50_fpn_1x_coco.py' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +min_values = (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, value) for value in min_values], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] + +data = dict(train=dict(pipeline=train_pipeline)) +lr_config = dict(policy='step', step=[27, 33]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/regnet/README.md b/object_detection/configs/regnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..51f44df4122ad19ebec7894e9504ad17e46d0bf6 --- /dev/null +++ b/object_detection/configs/regnet/README.md @@ -0,0 +1,126 @@ +# Designing Network Design Spaces + +## Abstract + + + + In this work, we present a new network design paradigm. Our goal is to help advance the understanding of network design and discover design principles that generalize across settings. Instead of focusing on designing individual network instances, we design network design spaces that parametrize populations of networks. The overall process is analogous to classic manual design of networks, but elevated to the design space level. Using our methodology we explore the structure aspect of network design and arrive at a low-dimensional design space consisting of simple, regular networks that we call RegNet. The core insight of the RegNet parametrization is surprisingly simple: widths and depths of good networks can be explained by a quantized linear function. We analyze the RegNet design space and arrive at interesting findings that do not match the current practice of network design. The RegNet design space provides simple and fast networks that work well across a wide range of flop regimes. Under comparable training settings and flops, the RegNet models outperform the popular EfficientNet models while being up to 5x faster on GPUs. + + +
+ +
+ + + + +## Introduction + + + +We implement RegNetX and RegNetY models in detection systems and provide their first results on Mask R-CNN, Faster R-CNN and RetinaNet. + +The pre-trained modles are converted from [model zoo of pycls](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). + +## Citation + +```latex +@article{radosavovic2020designing, + title={Designing Network Design Spaces}, + author={Ilija Radosavovic and Raj Prateek Kosaraju and Ross Girshick and Kaiming He and Piotr Dollár}, + year={2020}, + eprint={2003.13678}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Usage + +To use a regnet model, there are two steps to do: + +1. Convert the model to ResNet-style supported by MMDetection +2. Modify backbone and neck in config accordingly + +### Convert model + +We already prepare models of FLOPs from 400M to 12G in our model zoo. + +For more general usage, we also provide script `regnet2mmdet.py` in the tools directory to convert the key of models pretrained by [pycls](https://github.com/facebookresearch/pycls/) to +ResNet-style checkpoints used in MMDetection. + +```bash +python -u tools/model_converters/regnet2mmdet.py ${PRETRAIN_PATH} ${STORE_PATH} +``` + +This script convert model from `PRETRAIN_PATH` and store the converted model in `STORE_PATH`. + +### Modify config + +The users can modify the config's `depth` of backbone and corresponding keys in `arch` according to the configs in the [pycls model zoo](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). +The parameter `in_channels` in FPN can be found in the Figure 15 & 16 of the paper (`wi` in the legend). +This directory already provides some configs with their performance, using RegNetX from 800MF to 12GF level. +For other pre-trained models or self-implemented regnet models, the users are responsible to check these parameters by themselves. + +**Note**: Although Fig. 15 & 16 also provide `w0`, `wa`, `wm`, `group_w`, and `bot_mul` for `arch`, they are quantized thus inaccurate, using them sometimes produces different backbone that does not match the key in the pre-trained model. + +## Results + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.4 | 12.0 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +|[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | +|[RegNetX-4.0GF-FPN](./mask_rcnn_regnetx-4GF_fpn_1x_coco.py)| pytorch | 1x |5.5||41.5|37.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217-32e9c92d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217.log.json) | +| [R-101-FPN](../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py)| pytorch | 1x | 6.4 | 10.3 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +|[RegNetX-6.4GF-FPN](./mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py)| pytorch | 1x |6.1 ||41.0|37.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439-3a7aae83.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439.log.json) | +| [X-101-32x4d-FPN](../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | pytorch | 1x | 7.6 | 9.4 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +|[RegNetX-8.0GF-FPN](./mask_rcnn_regnetx-8GF_fpn_1x_coco.py)| pytorch | 1x |6.4 ||41.7|37.5|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515-09daa87e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515.log.json) | +|[RegNetX-12GF-FPN](./mask_rcnn_regnetx-12GF_fpn_1x_coco.py)| pytorch | 1x |7.4 ||42.2|38|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552-b538bd8b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552.log.json) | +|[RegNetX-3.2GF-FPN-DCN-C3-C5](./mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726-75f40794.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726.log.json) | + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.0 | 18.2 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x | 4.5||39.9|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927-126fd9bf.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py)| pytorch | 2x | 4.5||41.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955-e2081918.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955.log.json) | + +### RetinaNet + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../retinanet/retinanet_r50_fpn_1x_coco.py) | pytorch | 1x | 3.8 | 16.6 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130_002941.log.json) | +|[RegNetX-800MF-FPN](./retinanet_regnetx-800MF_fpn_1x_coco.py)| pytorch | 1x |2.5||35.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403-f6f91d10.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-1.6GF-FPN](./retinanet_regnetx-1.6GF_fpn_1x_coco.py)| pytorch | 1x |3.3||37.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403-37009a9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-3.2GF-FPN](./retinanet_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |4.2 ||39.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141-cb1509e8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | + +### Pre-trained models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-----: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|Faster RCNN |[RegNetX-400MF-FPN](./faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |2.3 ||37.1|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112-e1967c37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112.log.json) | +|Faster RCNN |[RegNetX-800MF-FPN](./faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |2.8 ||38.8|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118-a2c70b20.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118.log.json) | +|Faster RCNN |[RegNetX-1.6GF-FPN](./faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |3.4 ||40.5|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325-94aa46cc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325.log.json) | +|Faster RCNN |[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.4 ||42.3|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152-e16a5227.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152.log.json) | +|Faster RCNN |[RegNetX-4GF-FPN](./faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.9 ||42.8|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201-65eaf841.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201.log.json) | +|Mask RCNN |[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |5.0 ||43.1|38.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221.log.json) | +|Mask RCNN |[RegNetX-400MF-FPN](./mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |2.5 ||37.6|34.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443-8aac57a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443.log.json) | +|Mask RCNN |[RegNetX-800MF-FPN](./mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |2.9 ||39.5|36.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641-715d51f5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641.log.json) | +|Mask RCNN |[RegNetX-1.6GF-FPN](./mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |3.6 ||40.9|37.5|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6764cff5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641.log.json) | +|Mask RCNN |[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | pytorch | 3x |5.0 ||43.1|38.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221.log.json) | +|Mask RCNN |[RegNetX-4GF-FPN](./mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x |5.1 ||43.4|39.2|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621-00f0331c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621.log.json) | +|Cascade Mask RCNN |[RegNetX-400MF-FPN](./cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.3||41.6|36.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210715_211619-5142f449.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210715_211619.log.json) | +|Cascade Mask RCNN |[RegNetX-800MF-FPN](./cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.8||42.8|37.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210715_211616-dcbd13f4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210715_211616.log.json) | +|Cascade Mask RCNN |[RegNetX-1.6GF-FPN](./cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |5.4||44.5|39.0|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-1_20210715_211616-75f29a61.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-1_20210715_211616.log.json) | +|Cascade Mask RCNN |[RegNetX-3.2GF-FPN](./cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |6.4||45.8|40.0|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-3_20210715_211616-b9c2c58b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-3_20210715_211616.log.json) | +|Cascade Mask RCNN |[RegNetX-4GF-FPN](./cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | pytorch | 3x |6.9||45.8|40.0|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210715_212034-cbb1be4c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210715_212034.log.json) | + +### Notice + +1. The models are trained using a different weight decay, i.e., `weight_decay=5e-5` according to the setting in ImageNet training. This brings improvement of at least 0.7 AP absolute but does not improve the model using ResNet-50. +2. RetinaNets using RegNets are trained with learning rate 0.02 with gradient clip. We find that using learning rate 0.02 could improve the results by at least 0.7 AP absolute and gradient clip is necessary to stabilize the training. However, this does not improve the performance of ResNet-50-FPN RetinaNet. diff --git a/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..358d85aa97a0d6cac41ac0daca2f54ce2f143d50 --- /dev/null +++ b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..84645718b37b4a1b7d9bb252c7b1207d20ae6e5e --- /dev/null +++ b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,63 @@ +_base_ = [ + '../common/mstrain_3x_coco_instance.py', + '../_base_/models/cascade_mask_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + # Images are converted to float32 directly after loading in PyCls + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict(weight_decay=0.00005) diff --git a/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2a8990a60d425859a3481a4fbc6fcca72fb5c8ce --- /dev/null +++ b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_400mf')), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3157863459213eb094f8a1c510ebd11be2d0e9c4 --- /dev/null +++ b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..41376ad88132dfbe956d721ceda88e48f75be435 --- /dev/null +++ b/object_detection/configs/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..385b5ca73b5f7432ad60b0a1528ee8c992b31d44 --- /dev/null +++ b/object_detection/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..88d270e3ce76f631acbef116cd3f7d3e6853ab59 --- /dev/null +++ b/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py b/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..612490b4342a1b6fc164ec80bbe0a6c6df147d76 --- /dev/null +++ b/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b7e6e1a3125d67f4fd7d99c0ef856bf02402ddb6 --- /dev/null +++ b/object_detection/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,61 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict(weight_decay=0.00005) diff --git a/object_detection/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a05f6e4e3c6aa2e85f5473872b5633cdb8bfc50 --- /dev/null +++ b/object_detection/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_400mf')), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..98b3fc2b5b6cd122a42cab4754336fd355d40cfb --- /dev/null +++ b/object_detection/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..67f448bdb797459da8898d1846b7e97786163cf4 --- /dev/null +++ b/object_detection/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7970c3c80531f5975013026390d6262a59363e7e --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ce3661cffbfee0aa4206c889c2f8517d6d1e0e58 --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_12gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_12gf')), + neck=dict( + type='FPN', + in_channels=[224, 448, 896, 2240], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..44bf0d1176bf3fd585b65dc10fbac455ce01c59c --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + # Images are converted to float32 directly after loading in PyCls + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5b53428125e5a8732bfd489195b0f6e179420b47 --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf'))) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aca64d335c7b299d985621adb254d9e4f471cca7 --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,66 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c38dfa6ab6e1b62b3e558a54c80d9c47fb26daf1 --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_400mf')), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..874d485bec139ec2bfd8253ac82e8f5861d3f9c2 --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f0b65eabfec8bb293fadef5427204a262a238ad0 --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..99387d8655eaa8bca5276dff7f2b7505afe185ed --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_6.4gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_6.4gf')), + neck=dict( + type='FPN', + in_channels=[168, 392, 784, 1624], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..335ebabf7919d84faf4cd59f2199b3337e46857b --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py b/object_detection/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e7832ff2605346e9743e54023dfd5872dc55567 --- /dev/null +++ b/object_detection/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_8.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_8.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 720, 1920], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/metafile.yml b/object_detection/configs/regnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..5390a3530d940ca615a5c77aed73940a58094f35 --- /dev/null +++ b/object_detection/configs/regnet/metafile.yml @@ -0,0 +1,437 @@ +Collections: + - Name: RegNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + +Models: + - Name: mask_rcnn_regnetx-3.2GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth + + - Name: mask_rcnn_regnetx-4GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217-32e9c92d.pth + + - Name: mask_rcnn_regnetx-6.4GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.1 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439-3a7aae83.pth + + - Name: mask_rcnn_regnetx-8GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515-09daa87e.pth + + - Name: mask_rcnn_regnetx-12GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552-b538bd8b.pth + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726-75f40794.pth + + - Name: faster_rcnn_regnetx-3.2GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927-126fd9bf.pth + + - Name: faster_rcnn_regnetx-3.2GF_fpn_2x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955-e2081918.pth + + - Name: retinanet_regnetx-800MF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 2.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403-f6f91d10.pth + + - Name: retinanet_regnetx-1.6GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403-37009a9d.pth + + - Name: retinanet_regnetx-3.2GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141-cb1509e8.pth + + - Name: faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 2.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112-e1967c37.pth + + - Name: faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 2.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118-a2c70b20.pth + + - Name: faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.4 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325-94aa46cc.pth + + - Name: faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.4 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152-e16a5227.pth + + - Name: faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201-65eaf841.pth + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth + + - Name: mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 2.5 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443-8aac57a4.pth + + - Name: mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 2.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641-715d51f5.pth + + - Name: mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6764cff5.pth + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6e63e19c.pth + + - Name: mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621-00f0331c.pth + + - Name: cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210715_211619-5142f449.pth + + - Name: cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210715_211616-dcbd13f4.pth + + - Name: cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.4 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-1_20210715_211616-75f29a61.pth + + - Name: cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-3_20210715_211616-b9c2c58b.pth + + - Name: cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210715_212034-cbb1be4c.pth diff --git a/object_detection/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py b/object_detection/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7395c1bfbfa16670294c721f9f3135da9b9e69ae --- /dev/null +++ b/object_detection/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py b/object_detection/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f05307c4364c565d410de35cc720db70d22be947 --- /dev/null +++ b/object_detection/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,59 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/object_detection/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py b/object_detection/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f6f8989320d6ffbcd55148471f62a962c52f9131 --- /dev/null +++ b/object_detection/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/object_detection/configs/reppoints/README.md b/object_detection/configs/reppoints/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8e53bd3e0c70b47f2487244a12590b1500bffdd3 --- /dev/null +++ b/object_detection/configs/reppoints/README.md @@ -0,0 +1,63 @@ +# RepPoints: Point Set Representation for Object Detection + +## Abstract + + + +Modern object detectors rely heavily on rectangular bounding boxes, such as anchors, proposals and the final predictions, to represent objects at various recognition stages. The bounding box is convenient to use but provides only a coarse localization of objects and leads to a correspondingly coarse extraction of object features. In this paper, we present RepPoints(representative points), a new finer representation of objects as a set of sample points useful for both localization and recognition. Given ground truth localization and recognition targets for training, RepPoints learn to automatically arrange themselves in a manner that bounds the spatial extent of an object and indicates semantically significant local areas. They furthermore do not require the use of anchors to sample a space of bounding boxes. We show that an anchor-free object detector based on RepPoints can be as effective as the state-of-the-art anchor-based detection methods, with 46.5 AP and 67.4 AP50 on the COCO test-dev detection benchmark, using ResNet-101 model. + + +
+ +
+ + + + +## Introdution + +By [Ze Yang](https://yangze.tech/), [Shaohui Liu](http://b1ueber2y.me/), and [Han Hu](https://ancientmooner.github.io/). + +We provide code support and configuration files to reproduce the results in the paper for +["RepPoints: Point Set Representation for Object Detection"](https://arxiv.org/abs/1904.11490) on COCO object detection. + + + +**RepPoints**, initially described in [arXiv](https://arxiv.org/abs/1904.11490), is a new representation method for visual objects, on which visual understanding tasks are typically centered. Visual object representation, aiming at both geometric description and appearance feature extraction, is conventionally achieved by `bounding box + RoIPool (RoIAlign)`. The bounding box representation is convenient to use; however, it provides only a rectangular localization of objects that lacks geometric precision and may consequently degrade feature quality. Our new representation, RepPoints, models objects by a `point set` instead of a `bounding box`, which learns to adaptively position themselves over an object in a manner that circumscribes the object’s `spatial extent` and enables `semantically aligned feature extraction`. This richer and more flexible representation maintains the convenience of bounding boxes while facilitating various visual understanding applications. This repo demonstrated the effectiveness of RepPoints for COCO object detection. + +Another feature of this repo is the demonstration of an `anchor-free detector`, which can be as effective as state-of-the-art anchor-based detection methods. The anchor-free detector can utilize either `bounding box` or `RepPoints` as the basic object representation. + +## Citation + +``` +@inproceedings{yang2019reppoints, + title={RepPoints: Point Set Representation for Object Detection}, + author={Yang, Ze and Liu, Shaohui and Hu, Han and Wang, Liwei and Lin, Stephen}, + booktitle={The IEEE International Conference on Computer Vision (ICCV)}, + month={Oct}, + year={2019} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the table below. + +| Method | Backbone | GN | Anchor | convert func | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------:|:---:|:------:|:------------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| BBox | R-50-FPN | Y | single | - | 1x | 3.9 | 15.9 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329-c98bfa96.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329_145916.log.json) | +| BBox | R-50-FPN | Y | none | - | 1x | 3.9 | 15.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330-00f73d58.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | N | none | moment | 1x | 3.3 | 18.5 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 1x | 3.9 | 17.5 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329-4b38409a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329_145952.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 2x | 3.9 | - | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329-91babaa2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329_150020.log.json) | +| RepPoints | R-101-FPN | Y | none | moment | 2x | 5.8 | 13.7 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329-4fbc7310.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329_132205.log.json) | +| RepPoints | R-101-FPN-DCN | Y | none | moment | 2x | 5.9 | 12.1 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-3309fbf2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132134.log.json) | +| RepPoints | X-101-FPN-DCN | Y | none | moment | 2x | 7.1 | 9.3 | 44.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-f87da1ea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132201.log.json) | + +**Notes:** + +- `R-xx`, `X-xx` denote the ResNet and ResNeXt architectures, respectively. +- `DCN` denotes replacing 3x3 conv with the 3x3 deformable convolution in `c3-c5` stages of backbone. +- `none` in the `anchor` column means 2-d `center point` (x,y) is used to represent the initial object hypothesis. `single` denotes one 4-d anchor box (x,y,w,h) with IoU based label assign criterion is adopted. +- `moment`, `partial MinMax`, `MinMax` in the `convert func` column are three functions to convert a point set to a pseudo box. +- Note the results here are slightly different from those reported in the paper, due to framework change. While the original paper uses an [MXNet](https://mxnet.apache.org/) implementation, we re-implement the method in [PyTorch](https://pytorch.org/) based on mmdetection. diff --git a/object_detection/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py b/object_detection/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b24c8db768423de12d1e8582bb26dd71218f52ee --- /dev/null +++ b/object_detection/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax', use_grid_points=True)) diff --git a/object_detection/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py b/object_detection/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8d5013d30a059f067c71e877dbc0bcef94790154 --- /dev/null +++ b/object_detection/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict( + bbox_head=dict(transform_method='minmax', use_grid_points=True), + # training and testing settings + train_cfg=dict( + init=dict( + assigner=dict( + _delete_=True, + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1)))) diff --git a/object_detection/configs/reppoints/metafile.yml b/object_detection/configs/reppoints/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..cd4312c4e1f9f431fac461e059d73c0302651c12 --- /dev/null +++ b/object_detection/configs/reppoints/metafile.yml @@ -0,0 +1,181 @@ +Collections: + - Name: RepPoints + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + - FPN + - RepPoints + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.11490 + Title: 'RepPoints: Point Set Representation for Object Detection' + README: configs/reppoints/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/reppoints_detector.py#L9 + Version: v2.0.0 + +Models: + - Name: bbox_r50_grid_fpn_gn-neck+head_1x_coco + In Collection: RepPoints + Config: configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 62.89 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329-c98bfa96.pth + + - Name: bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco + In Collection: RepPoints + Config: configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 64.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330-00f73d58.pth + + - Name: reppoints_moment_r50_fpn_1x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.3 + inference time (ms/im): + - value: 54.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth + + - Name: reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329-4b38409a.pth + + - Name: reppoints_moment_r50_fpn_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329-91babaa2.pth + + - Name: reppoints_moment_r101_fpn_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 5.8 + inference time (ms/im): + - value: 72.99 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329-4fbc7310.pth + + - Name: reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 5.9 + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-3309fbf2.pth + + - Name: reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 107.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-f87da1ea.pth diff --git a/object_detection/configs/reppoints/reppoints.png b/object_detection/configs/reppoints/reppoints.png new file mode 100644 index 0000000000000000000000000000000000000000..16d491b9ec62835d91b474b7d69c46bd25da25e5 --- /dev/null +++ b/object_detection/configs/reppoints/reppoints.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8c4c485b83297b7972632a0fc8dbc2b27a3620afecbc7b42aaf2183e3f98f6b +size 1198109 diff --git a/object_detection/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py b/object_detection/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0f56a46b3c002cdec630bb06df66a4fc9e7804a8 --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax')) diff --git a/object_detection/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py b/object_detection/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e223d80fab5eabf99da7ee28668d81d0f059d9cc --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py @@ -0,0 +1,8 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py b/object_detection/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..118547096e67abb82c563ad128dd1a18309dd775 --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py b/object_detection/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..158a90670b86a78d872e7db4cf80db72401481b8 --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py @@ -0,0 +1,67 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='RepPointsDetector', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + bbox_head=dict( + type='RepPointsHead', + num_classes=80, + in_channels=256, + feat_channels=256, + point_feat_channels=256, + stacked_convs=3, + num_points=9, + gradient_mul=0.1, + point_strides=[8, 16, 32, 64, 128], + point_base_scale=4, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_init=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.5), + loss_bbox_refine=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0), + transform_method='moment'), + # training and testing settings + train_cfg=dict( + init=dict( + assigner=dict(type='PointAssigner', scale=4, pos_num=1), + allowed_border=-1, + pos_weight=-1, + debug=False), + refine=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +optimizer = dict(lr=0.01) diff --git a/object_detection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py b/object_detection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..337f167c820979f345eef120a936195d8f5975c2 --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './reppoints_moment_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict(neck=dict(norm_cfg=norm_cfg), bbox_head=dict(norm_cfg=norm_cfg)) +optimizer = dict(lr=0.01) diff --git a/object_detection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py b/object_detection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..feca44aa67126b3326e45b1c9fbbf9e9c3bec11a --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py b/object_detection/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c0a12d00615aaa347ad6790c110be1304458501d --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py b/object_detection/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9a63bd0862be6d5f363c5d481bade3e8e2e8433a --- /dev/null +++ b/object_detection/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='partial_minmax')) diff --git a/object_detection/configs/res2net/README.md b/object_detection/configs/res2net/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3a76bef698086b3a1401e7355121332d7bcb6a72 --- /dev/null +++ b/object_detection/configs/res2net/README.md @@ -0,0 +1,81 @@ +# Res2Net: A New Multi-scale Backbone Architecture + +## Abstract + + + +Representing features at multiple scales is of great importance for numerous vision tasks. Recent advances in backbone convolutional neural networks (CNNs) continually demonstrate stronger multi-scale representation ability, leading to consistent performance gains on a wide range of applications. However, most existing methods represent the multi-scale features in a layer-wise manner. In this paper, we propose a novel building block for CNNs, namely Res2Net, by constructing hierarchical residual-like connections within one single residual block. The Res2Net represents multi-scale features at a granular level and increases the range of receptive fields for each network layer. The proposed Res2Net block can be plugged into the state-of-the-art backbone CNN models, e.g., ResNet, ResNeXt, and DLA. We evaluate the Res2Net block on all these models and demonstrate consistent performance gains over baseline models on widely-used datasets, e.g., CIFAR-100 and ImageNet. Further ablation studies and experimental results on representative computer vision tasks, i.e., object detection, class activation mapping, and salient object detection, further verify the superiority of the Res2Net over the state-of-the-art baseline methods. + + +
+ +
+ + + + +## Introduction + + + +We propose a novel building block for CNNs, namely Res2Net, by constructing hierarchical residual-like connections within one single residual block. The Res2Net represents multi-scale features at a granular level and increases the range of receptive fields for each network layer. + +| Backbone |Params. | GFLOPs | top-1 err. | top-5 err. | +| :-------------: |:----: | :-----: | :--------: | :--------: | +| ResNet-101 |44.6 M | 7.8 | 22.63 | 6.44 | +| ResNeXt-101-64x4d |83.5M | 15.5 | 20.40 | - | +| HRNetV2p-W48 | 77.5M | 16.1 | 20.70 | 5.50 | +| Res2Net-101 | 45.2M | 8.3 | 18.77 | 4.64 | + +Compared with other backbone networks, Res2Net requires fewer parameters and FLOPs. + +**Note:** + +- GFLOPs for classification are calculated with image size (224x224). + +## Citation + +```latex +@article{gao2019res2net, + title={Res2Net: A New Multi-scale Backbone Architecture}, + author={Gao, Shang-Hua and Cheng, Ming-Ming and Zhao, Kai and Zhang, Xin-Yu and Yang, Ming-Hsuan and Torr, Philip}, + journal={IEEE TPAMI}, + year={2020}, + doi={10.1109/TPAMI.2019.2938758}, +} +``` + +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|R2-101-FPN | pytorch | 2x | 7.4 | - | 43.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco-175f1da6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco_20200514_231734.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|R2-101-FPN | pytorch | 2x | 7.9 | - | 43.6 | 38.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco-17f061e8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco_20200515_002413.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|R2-101-FPN | pytorch | 20e | 7.8 | - | 45.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco-f4b7b7db.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco_20200515_091644.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +R2-101-FPN | pytorch | 20e | 9.5 | - | 46.4 | 40.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco-8a7b41e1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco_20200515_091645.log.json) | + +### Hybrid Task Cascade (HTC) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R2-101-FPN | pytorch | 20e | - | - | 47.5 | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/htc_r2_101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco-3a8d2112.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco_20200515_150029.log.json) | + +- Res2Net ImageNet pretrained models are in [Res2Net-PretrainedModels](https://github.com/Res2Net/Res2Net-PretrainedModels). +- More applications of Res2Net are in [Res2Net-Github](https://github.com/Res2Net/). diff --git a/object_detection/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py b/object_detection/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6b6c0010a44be43131defb002767eeb5b5d15600 --- /dev/null +++ b/object_detection/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/object_detection/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py b/object_detection/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..10dddbb467993a023f8e498b57f86775b142ce4f --- /dev/null +++ b/object_detection/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/object_detection/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py b/object_detection/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fc2221cbabf293b55098d543ef9f14d9f75f1909 --- /dev/null +++ b/object_detection/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/object_detection/configs/res2net/htc_r2_101_fpn_20e_coco.py b/object_detection/configs/res2net/htc_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..22d0c5da57aa00daa62ebccab73d29fbe5620938 --- /dev/null +++ b/object_detection/configs/res2net/htc_r2_101_fpn_20e_coco.py @@ -0,0 +1,13 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py b/object_detection/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..33aef1a54d4e6c7d30eb2a2abc67937005a24aae --- /dev/null +++ b/object_detection/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/object_detection/configs/res2net/metafile.yml b/object_detection/configs/res2net/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..71809f30974f42a3da171c71b269c228c660fce1 --- /dev/null +++ b/object_detection/configs/res2net/metafile.yml @@ -0,0 +1,94 @@ +Collections: + - Name: Res2Net + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Res2Net + Paper: + URL: https://arxiv.org/abs/1904.01169 + Title: 'Res2Net for object detection and instance segmentation' + README: configs/res2net/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/res2net.py#L239 + Version: v2.1.0 + +Models: + - Name: faster_rcnn_r2_101_fpn_2x_coco + In Collection: Res2Net + Config: configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco-175f1da6.pth + + - Name: mask_rcnn_r2_101_fpn_2x_coco + In Collection: Res2Net + Config: configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.9 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco-17f061e8.pth + + - Name: cascade_rcnn_r2_101_fpn_20e_coco + In Collection: Res2Net + Config: configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco-f4b7b7db.pth + + - Name: cascade_mask_rcnn_r2_101_fpn_20e_coco + In Collection: Res2Net + Config: configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 9.5 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco-8a7b41e1.pth + + - Name: htc_r2_101_fpn_20e_coco + In Collection: Res2Net + Config: configs/res2net/htc_r2_101_fpn_20e_coco.py + Metadata: + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco-3a8d2112.pth diff --git a/object_detection/configs/resnest/README.md b/object_detection/configs/resnest/README.md new file mode 100644 index 0000000000000000000000000000000000000000..de80430d324dab22e253c7c889ce09141bde3ec5 --- /dev/null +++ b/object_detection/configs/resnest/README.md @@ -0,0 +1,58 @@ +# ResNeSt: Split-Attention Networks + +## Abstract + + + +It is well known that featuremap attention and multi-path representation are important for visual recognition. In this paper, we present a modularized architecture, which applies the channel-wise attention on different network branches to leverage their success in capturing cross-feature interactions and learning diverse representations. Our design results in a simple and unified computation block, which can be parameterized using only a few variables. Our model, named ResNeSt, outperforms EfficientNet in accuracy and latency trade-off on image classification. In addition, ResNeSt has achieved superior transfer learning results on several public benchmarks serving as the backbone, and has been adopted by the winning entries of COCO-LVIS challenge. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{zhang2020resnest, +title={ResNeSt: Split-Attention Networks}, +author={Zhang, Hang and Wu, Chongruo and Zhang, Zhongyue and Zhu, Yi and Zhang, Zhi and Lin, Haibin and Sun, Yue and He, Tong and Muller, Jonas and Manmatha, R. and Li, Mu and Smola, Alexander}, +journal={arXiv preprint arXiv:2004.08955}, +year={2020} +} +``` + +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | 4.8 | - | 42.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20200926_125502-20289c16.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20200926_125502.log.json) | +|S-101-FPN | pytorch | 1x | 7.1 | - | 44.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201006_021058-421517f1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20201006_021058.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | 5.5 | - | 42.6 | 38.1 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20200926_125503-8a2c3d47.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20200926_125503.log.json) | +|S-101-FPN | pytorch | 1x | 7.8 | - | 45.2 | 40.2 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_215831-af60cdf9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20201005_215831.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | - | - | 44.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201122_213640-763cc7b5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20201005_113242.log.json) | +|S-101-FPN | pytorch | 1x | 8.4 | - | 46.8 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201005_113242-b9459f8f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20201122_213640.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | - | - | 45.4 | 39.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201122_104428-99eca4c7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20201122_104428.log.json) | +|S-101-FPN | pytorch | 1x | 10.5 | - | 47.7 | 41.4 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_113243-42607475.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20201005_113243.log.json) | diff --git a/object_detection/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/object_detection/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..406f39db91bb5c5abacb76db969b9181df453466 --- /dev/null +++ b/object_detection/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/object_detection/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/object_detection/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..83d75372fc561935e43542743c8814ca2734414d --- /dev/null +++ b/object_detection/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,118 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict(norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/object_detection/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a7476a3748b6ce80d25188284facfec13d9f86e --- /dev/null +++ b/object_detection/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/object_detection/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/object_detection/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6ed7730104ca42e23a004827bb7aa0a114fa5e70 --- /dev/null +++ b/object_detection/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,116 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], )) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=False, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/object_detection/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..40a2f1f2c9d62f173e88893e4ef809e70e2cbf5b --- /dev/null +++ b/object_detection/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/object_detection/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/object_detection/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eb1ecd224cb86d6c296363ab53fb733848f6224c --- /dev/null +++ b/object_detection/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=False, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/object_detection/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c882ba1421afdcc7100995da7ab10eb16bd3db25 --- /dev/null +++ b/object_detection/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/object_detection/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/object_detection/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e50deacbdecdccace68f77636edac7a29d4ef57 --- /dev/null +++ b/object_detection/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,64 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/resnest/metafile.yml b/object_detection/configs/resnest/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..3323fad027ae9a407c044200db2b900b294de064 --- /dev/null +++ b/object_detection/configs/resnest/metafile.yml @@ -0,0 +1,136 @@ +Collections: + - Name: ResNeSt + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ResNeSt + Paper: + URL: https://arxiv.org/abs/2004.08955 + Title: 'ResNeSt: Split-Attention Networks' + README: configs/resnest/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.7.0/mmdet/models/backbones/resnest.py#L273 + Version: v2.7.0 + +Models: + - Name: faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Training Memory (GB): 4.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20200926_125502-20289c16.pth + + - Name: faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Training Memory (GB): 7.1 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201006_021058-421517f1.pth + + - Name: mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20200926_125503-8a2c3d47.pth + + - Name: mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_215831-af60cdf9.pth + + - Name: cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201122_213640-763cc7b5.pth + + - Name: cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Training Memory (GB): 8.4 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201005_113242-b9459f8f.pth + + - Name: cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201122_104428-99eca4c7.pth + + - Name: cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 10.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_113243-42607475.pth diff --git a/object_detection/configs/retinanet/README.md b/object_detection/configs/retinanet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..34dd55ed11e4998d1d907eb97e8f71ab9d5a1e27 --- /dev/null +++ b/object_detection/configs/retinanet/README.md @@ -0,0 +1,55 @@ +# Focal Loss for Dense Object Detection + +## Abstract + + + +The highest accuracy object detectors to date are based on a two-stage approach popularized by R-CNN, where a classifier is applied to a sparse set of candidate object locations. In contrast, one-stage detectors that are applied over a regular, dense sampling of possible object locations have the potential to be faster and simpler, but have trailed the accuracy of two-stage detectors thus far. In this paper, we investigate why this is the case. We discover that the extreme foreground-background class imbalance encountered during training of dense detectors is the central cause. We propose to address this class imbalance by reshaping the standard cross entropy loss such that it down-weights the loss assigned to well-classified examples. Our novel Focal Loss focuses training on a sparse set of hard examples and prevents the vast number of easy negatives from overwhelming the detector during training. To evaluate the effectiveness of our loss, we design and train a simple dense detector we call RetinaNet. Our results show that when trained with the focal loss, RetinaNet is able to match the speed of previous one-stage detectors while surpassing the accuracy of all existing state-of-the-art two-stage detectors. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{lin2017focal, + title={Focal loss for dense object detection}, + author={Lin, Tsung-Yi and Goyal, Priya and Girshick, Ross and He, Kaiming and Doll{\'a}r, Piotr}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2017} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 3.5 | 18.6 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531-f11027c5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531_012518.log.json) | +| R-50-FPN | pytorch | 1x | 3.8 | 19.0 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130_002941.log.json) | +| R-50-FPN (FP16) | pytorch | 1x | 2.8 | 31.6 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702-0dbfb212.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702_020127.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131-fdb43119.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131_114738.log.json) | +| R-101-FPN | caffe | 1x | 5.5 | 14.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531-b428fa0f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531_012536.log.json) | +| R-101-FPN | pytorch | 1x | 5.7 | 15.0 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130-7a93545f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130_003055.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131-5560aee8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131_114859.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.0 | 12.1 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130-5c8b7ec4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130_003004.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131-237fc5e1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131_114812.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.0 | 8.7 | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130-366f5af1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130_003008.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131-bca068ab.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131_114833.log.json) | + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | box AP | Config | Download | +| :----------------: | :-----: | :-----: | :------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch| 3x | 3.5 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_mstrain_640-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_mstrain_3x_coco/retinanet_r50_fpn_mstrain_3x_coco_20210718_220633-88476508.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_mstrain_3x_coco/retinanet_r50_fpn_mstrain_3x_coco_20210718_220633-88476508.log.json) +| R-101-FPN | caffe | 3x | 5.4 | 40.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco/retinanet_r101_caffe_fpn_mstrain_3x_coco_20210721_063439-88a8a944.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco/retinanet_r101_caffe_fpn_mstrain_3x_coco_20210721_063439-88a8a944.log.json) +| R-101-FPN | pytorch| 3x | 5.4 | 41 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_fpn_mstrain_640-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_mstrain_3x_coco/retinanet_r101_fpn_mstrain_3x_coco_20210720_214650-7ee888e0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_mstrain_3x_coco/retinanet_r101_fpn_mstrain_3x_coco_20210720_214650-7ee888e0.log.json) +| X-101-64x4d-FPN | pytorch| 3x | 9.8 | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_64x4d_fpn_mstrain_640-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_mstrain_3x_coco/retinanet_x101_64x4d_fpn_mstrain_3x_coco_20210719_051838-022c2187.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_mstrain_3x_coco/retinanet_x101_64x4d_fpn_mstrain_3x_coco_20210719_051838-022c2187.log.json) diff --git a/object_detection/configs/retinanet/metafile.yml b/object_detection/configs/retinanet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..b04a06c8567e0ad36d009a1c10a0e853f825f76d --- /dev/null +++ b/object_detection/configs/retinanet/metafile.yml @@ -0,0 +1,284 @@ +Collections: + - Name: RetinaNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Focal Loss + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1708.02002 + Title: "Focal Loss for Dense Object Detection" + README: configs/retinanet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/retinanet.py#L6 + Version: v2.0.0 + +Models: + - Name: retinanet_r50_caffe_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.5 + inference time (ms/im): + - value: 53.76 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531-f11027c5.pth + + - Name: retinanet_r50_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + inference time (ms/im): + - value: 52.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth + + - Name: retinanet_r50_fpn_fp16_1x_coco + In Collection: RetinaNet + Config: configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 2.8 + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + inference time (ms/im): + - value: 31.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702-0dbfb212.pth + + - Name: retinanet_r50_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r50_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131-fdb43119.pth + + - Name: retinanet_r50_fpn_mstrain_3x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r50_fpn_mstrain_640-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_mstrain_3x_coco/retinanet_r50_fpn_mstrain_3x_coco_20210718_220633-88476508.pth + + - Name: retinanet_r101_caffe_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531-b428fa0f.pth + + - Name: retinanet_r101_caffe_fpn_mstrain_3x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco/retinanet_r101_caffe_fpn_mstrain_3x_coco_20210721_063439-88a8a944.pth + + - Name: retinanet_r101_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.7 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130-7a93545f.pth + + - Name: retinanet_r101_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 5.7 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131-5560aee8.pth + + - Name: retinanet_r101_fpn_mstrain_3x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_fpn_2x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_mstrain_3x_coco/retinanet_r101_fpn_mstrain_3x_coco_20210720_214650-7ee888e0.pth + + - Name: retinanet_x101_32x4d_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130-5c8b7ec4.pth + + - Name: retinanet_x101_32x4d_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131-237fc5e1.pth + + - Name: retinanet_x101_64x4d_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.0 + inference time (ms/im): + - value: 114.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130-366f5af1.pth + + - Name: retinanet_x101_64x4d_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.0 + inference time (ms/im): + - value: 114.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131-bca068ab.pth + + - Name: retinanet_x101_64x4d_fpn_mstrain_3x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_64x4d_fpn_mstrain_640-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_mstrain_3x_coco/retinanet_x101_64x4d_fpn_mstrain_3x_coco_20210719_051838-022c2187.pth diff --git a/object_detection/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py b/object_detection/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..56eaae200fb839eddabc95f18a7a6889cb830100 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco.py b/object_detection/configs/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b87295e69d113105cb85d388e7cf5abc9f9af217 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './retinanet_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict(depth=101)) +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/retinanet/retinanet_r101_fpn_1x_coco.py b/object_detection/configs/retinanet/retinanet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a7f06002413dcdf2716975655a582a3eefaf007a --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/retinanet/retinanet_r101_fpn_2x_coco.py b/object_detection/configs/retinanet/retinanet_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..721112a221953bb86dc3259e3991d7f0f740b26c --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/retinanet/retinanet_r101_fpn_mstrain_640-800_3x_coco.py b/object_detection/configs/retinanet/retinanet_r101_fpn_mstrain_640-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6bbcac4fa4f50f6e40372c672fdc6bd1075ec5c4 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r101_fpn_mstrain_640-800_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', '../common/mstrain_3x_coco.py' +] +# optimizer +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..04c9af5898971b4a13c46d71362c111e8cabbbaf --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4d7b8f2bd04598d64f1cf24cfaf9c155f9b21e87 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,46 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eea9690eb159fe03865825bb9f9ca5fd6ff99d70 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8057650736eaab0b7b01a7957339124f73d6d6b0 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/retinanet/retinanet_r50_fpn_1x_coco.py b/object_detection/configs/retinanet/retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..04bd696b9589e37ad34c9fdd035b97e271d3b214 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/retinanet/retinanet_r50_fpn_2x_coco.py b/object_detection/configs/retinanet/retinanet_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..927915fa8c63d380cc4bd62a580ffaad8b1ce386 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/retinanet/retinanet_r50_fpn_90k_coco.py b/object_detection/configs/retinanet/retinanet_r50_fpn_90k_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ceda32798840bf653bf83ab506ddd80f59e3a355 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_fpn_90k_coco.py @@ -0,0 +1,15 @@ +_base_ = 'retinanet_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[60000, 80000]) + +# Runner type +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=90000) + +checkpoint_config = dict(interval=10000) +evaluation = dict(interval=10000, metric='bbox') diff --git a/object_detection/configs/retinanet/retinanet_r50_fpn_fp16_1x_coco.py b/object_detection/configs/retinanet/retinanet_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6b6cebe48a166155c24918d4504acebcd104d672 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/retinanet/retinanet_r50_fpn_mstrain_640-800_3x_coco.py b/object_detection/configs/retinanet/retinanet_r50_fpn_mstrain_640-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..02a2c291631838781d63f06b286a4c5dd6a009fe --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_r50_fpn_mstrain_640-800_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', '../common/mstrain_3x_coco.py' +] +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..765a4c2cc0f69bf13891bf371c94c17b6cd5f30c --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py b/object_detection/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..14de96faf70180d7828a670630a8f48a3cd1081d --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..948cd18e4d995d18d947b345ba7229b5cad60eb1 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py b/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad04b6eea793add40c81d1d7096481597357d5bd --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_mstrain_640-800_3x_coco.py b/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_mstrain_640-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f6ab512f182a003d246fc11b3caba0a0161d25d0 --- /dev/null +++ b/object_detection/configs/retinanet/retinanet_x101_64x4d_fpn_mstrain_640-800_3x_coco.py @@ -0,0 +1,8 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', '../common/mstrain_3x_coco.py' +] +# optimizer +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict(type='ResNeXt', depth=101, groups=64, base_width=4)) +optimizer = dict(type='SGD', lr=0.01) diff --git a/object_detection/configs/rpn/README.md b/object_detection/configs/rpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d1bf67494678306039d3cd438151687fe59c4f01 --- /dev/null +++ b/object_detection/configs/rpn/README.md @@ -0,0 +1,43 @@ +# Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks + +## Abstract + + + +State-of-the-art object detection networks depend on region proposal algorithms to hypothesize object locations. Advances like SPPnet and Fast R-CNN have reduced the running time of these detection networks, exposing region proposal computation as a bottleneck. In this work, we introduce a Region Proposal Network (RPN) that shares full-image convolutional features with the detection network, thus enabling nearly cost-free region proposals. An RPN is a fully convolutional network that simultaneously predicts object bounds and objectness scores at each position. The RPN is trained end-to-end to generate high-quality region proposals, which are used by Fast R-CNN for detection. We further merge RPN and Fast R-CNN into a single network by sharing their convolutional features---using the recently popular terminology of neural networks with 'attention' mechanisms, the RPN component tells the unified network where to look. For the very deep VGG-16 model, our detection system has a frame rate of 5fps (including all steps) on a GPU, while achieving state-of-the-art object detection accuracy on PASCAL VOC 2007, 2012, and MS COCO datasets with only 300 proposals per image. In ILSVRC and COCO 2015 competitions, Faster R-CNN and RPN are the foundations of the 1st-place winning entries in several tracks. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{ren2015faster, + title={Faster r-cnn: Towards real-time object detection with region proposal networks}, + author={Ren, Shaoqing and He, Kaiming and Girshick, Ross and Sun, Jian}, + booktitle={Advances in neural information processing systems}, + year={2015} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | AR1000 | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 3.5 | 22.6 | 58.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_caffe_fpn_1x_coco/rpn_r50_caffe_fpn_1x_coco_20200531-5b903a37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_caffe_fpn_1x_coco/rpn_r50_caffe_fpn_1x_coco_20200531_012334.log.json) | +| R-50-FPN | pytorch | 1x | 3.8 | 22.3 | 58.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_1x_coco/rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_1x_coco/rpn_r50_fpn_1x_coco_20200218_151240.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 58.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_2x_coco/rpn_r50_fpn_2x_coco_20200131-0728c9b3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_2x_coco/rpn_r50_fpn_2x_coco_20200131_190631.log.json) | +| R-101-FPN | caffe | 1x | 5.4 | 17.3 | 60.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_caffe_fpn_1x_coco/rpn_r101_caffe_fpn_1x_coco_20200531-0629a2e2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_caffe_fpn_1x_coco/rpn_r101_caffe_fpn_1x_coco_20200531_012345.log.json) | +| R-101-FPN | pytorch | 1x | 5.8 | 16.5 | 59.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_1x_coco/rpn_r101_fpn_1x_coco_20200131-2ace2249.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_1x_coco/rpn_r101_fpn_1x_coco_20200131_191000.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 60.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_2x_coco/rpn_r101_fpn_2x_coco_20200131-24e3db1a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_2x_coco/rpn_r101_fpn_2x_coco_20200131_191106.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.0 | 13.0 | 60.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_1x_coco/rpn_x101_32x4d_fpn_1x_coco_20200219-b02646c6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_1x_coco/rpn_x101_32x4d_fpn_1x_coco_20200219_012037.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 61.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_2x_coco/rpn_x101_32x4d_fpn_2x_coco_20200208-d22bd0bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_2x_coco/rpn_x101_32x4d_fpn_2x_coco_20200208_200752.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.1 | 9.1 | 61.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_1x_coco/rpn_x101_64x4d_fpn_1x_coco_20200208-cde6f7dd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_1x_coco/rpn_x101_64x4d_fpn_1x_coco_20200208_200752.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 61.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_2x_coco/rpn_x101_64x4d_fpn_2x_coco_20200208-c65f524f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_2x_coco/rpn_x101_64x4d_fpn_2x_coco_20200208_200752.log.json) | diff --git a/object_detection/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py b/object_detection/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..27be94638a989f238972e85f9c14e1bcba0d09ac --- /dev/null +++ b/object_detection/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/object_detection/configs/rpn/rpn_r101_fpn_1x_coco.py b/object_detection/configs/rpn/rpn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..962728ff08abb4652c617a085649575b6cfdcbf8 --- /dev/null +++ b/object_detection/configs/rpn/rpn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/rpn/rpn_r101_fpn_2x_coco.py b/object_detection/configs/rpn/rpn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ac7671c1c2421c0caa7b42d012cc3a2edc068934 --- /dev/null +++ b/object_detection/configs/rpn/rpn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/rpn/rpn_r50_caffe_c4_1x_coco.py b/object_detection/configs/rpn/rpn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6da0ee94906fd8febaf69786976e478ef8f35c9e --- /dev/null +++ b/object_detection/configs/rpn/rpn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/rpn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='proposal_fast') diff --git a/object_detection/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py b/object_detection/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..68c36fa8caa0d0715128b02da03d14e7f5b27862 --- /dev/null +++ b/object_detection/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/rpn/rpn_r50_fpn_1x_coco.py b/object_detection/configs/rpn/rpn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..26f95a3402f9fd2d54c5919484e2f4958beb8a34 --- /dev/null +++ b/object_detection/configs/rpn/rpn_r50_fpn_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = [ + '../_base_/models/rpn_r50_fpn.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +data = dict(train=dict(pipeline=train_pipeline)) +evaluation = dict(interval=1, metric='proposal_fast') diff --git a/object_detection/configs/rpn/rpn_r50_fpn_2x_coco.py b/object_detection/configs/rpn/rpn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2f264bfe4234c870839ee77e3a671464aacc7813 --- /dev/null +++ b/object_detection/configs/rpn/rpn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py b/object_detection/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0c73948ac56afa34b9d6c8d22d6158271306b8c --- /dev/null +++ b/object_detection/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py b/object_detection/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c6880b762abc8f5d3bf12f278054d76958756fb2 --- /dev/null +++ b/object_detection/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py b/object_detection/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..96e691a912c424f09add038c75631a2e1fefeffc --- /dev/null +++ b/object_detection/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py b/object_detection/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4182a39667c47d774a1df9d34a1bc2fe60b45538 --- /dev/null +++ b/object_detection/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/sabl/README.md b/object_detection/configs/sabl/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c090d00829d7f211c5c3de0df848969c9378829a --- /dev/null +++ b/object_detection/configs/sabl/README.md @@ -0,0 +1,52 @@ +# Side-Aware Boundary Localization for More Precise Object Detection + +## Abstract + + + +Current object detection frameworks mainly rely on bounding box regression to localize objects. Despite the remarkable progress in recent years, the precision of bounding box regression remains unsatisfactory, hence limiting performance in object detection. We observe that precise localization requires careful placement of each side of the bounding box. However, the mainstream approach, which focuses on predicting centers and sizes, is not the most effective way to accomplish this task, especially when there exists displacements with large variance between the anchors and the targets. In this paper, we propose an alternative approach, named as Side-Aware Boundary Localization (SABL), where each side of the bounding box is respectively localized with a dedicated network branch. To tackle the difficulty of precise localization in the presence of displacements with large variance, we further propose a two-step localization scheme, which first predicts a range of movement through bucket prediction and then pinpoints the precise position within the predicted bucket. We test the proposed method on both two-stage and single-stage detection frameworks. Replacing the standard bounding box regression branch with the proposed design leads to significant improvements on Faster R-CNN, RetinaNet, and Cascade R-CNN, by 3.0%, 1.7%, and 0.9%, respectively. + + +
+ +
+ + + + +## Citation + + + +We provide config files to reproduce the object detection results in the ECCV 2020 Spotlight paper for [Side-Aware Boundary Localization for More Precise Object Detection](https://arxiv.org/abs/1912.04260). + +```latex +@inproceedings{Wang_2020_ECCV, + title = {Side-Aware Boundary Localization for More Precise Object Detection}, + author = {Jiaqi Wang and Wenwei Zhang and Yuhang Cao and Kai Chen and Jiangmiao Pang and Tao Gong and Jianping Shi and Chen Change Loy and Dahua Lin}, + booktitle = {ECCV}, + year = {2020} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val). +Single-scale testing (1333x800) is adopted in all results. + +| Method | Backbone | Lr schd | ms-train | box AP | Config | Download | +| :----------------: | :-------: | :-----: | :------: | :----: | :----------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| SABL Faster R-CNN | R-50-FPN | 1x | N | 39.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/20200830_130324.log.json) | +| SABL Faster R-CNN | R-101-FPN | 1x | N | 41.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/sabl_faster_rcnn_r101_fpn_1x_coco-f804c6c1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/20200830_183949.log.json) | +| SABL Cascade R-CNN | R-50-FPN | 1x | N | 41.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/sabl_cascade_rcnn_r50_fpn_1x_coco-e1748e5e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/20200831_033726.log.json) | +| SABL Cascade R-CNN | R-101-FPN | 1x | N | 43.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/sabl_cascade_rcnn_r101_fpn_1x_coco-2b83e87c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/20200831_141745.log.json) | + +| Method | Backbone | GN | Lr schd | ms-train | box AP | Config | Download | +| :------------: | :-------: | :---: | :-----: | :---------: | :----: | :---------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| SABL RetinaNet | R-50-FPN | N | 1x | N | 37.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/20200830_053451.log.json) | +| SABL RetinaNet | R-50-FPN | Y | 1x | N | 38.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/sabl_retinanet_r50_fpn_gn_1x_coco-e16dfcf1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/20200831_141955.log.json) | +| SABL RetinaNet | R-101-FPN | N | 1x | N | 39.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/sabl_retinanet_r101_fpn_1x_coco-42026904.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/20200831_034256.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 1x | N | 40.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/sabl_retinanet_r101_fpn_gn_1x_coco-40a893e8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/20200830_201422.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 2x | Y (640~800) | 42.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco-1e63382c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/20200830_144807.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 2x | Y (480~960) | 43.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco-5342f857.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/20200830_164537.log.json) | diff --git a/object_detection/configs/sabl/metafile.yml b/object_detection/configs/sabl/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..23c51cffb574519f4983edc0b510b3dd7f5dd6fa --- /dev/null +++ b/object_detection/configs/sabl/metafile.yml @@ -0,0 +1,140 @@ +Collections: + - Name: SABL + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - SABL + Paper: + URL: https://arxiv.org/abs/1912.04260 + Title: 'Side-Aware Boundary Localization for More Precise Object Detection' + README: configs/sabl/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.4.0/mmdet/models/roi_heads/bbox_heads/sabl_head.py#L14 + Version: v2.4.0 + +Models: + - Name: sabl_faster_rcnn_r50_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth + + - Name: sabl_faster_rcnn_r101_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/sabl_faster_rcnn_r101_fpn_1x_coco-f804c6c1.pth + + - Name: sabl_cascade_rcnn_r50_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/sabl_cascade_rcnn_r50_fpn_1x_coco-e1748e5e.pth + + - Name: sabl_cascade_rcnn_r101_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/sabl_cascade_rcnn_r101_fpn_1x_coco-2b83e87c.pth + + - Name: sabl_retinanet_r50_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth + + - Name: sabl_retinanet_r50_fpn_gn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/sabl_retinanet_r50_fpn_gn_1x_coco-e16dfcf1.pth + + - Name: sabl_retinanet_r101_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/sabl_retinanet_r101_fpn_1x_coco-42026904.pth + + - Name: sabl_retinanet_r101_fpn_gn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/sabl_retinanet_r101_fpn_gn_1x_coco-40a893e8.pth + + - Name: sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco-1e63382c.pth + + - Name: sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco-5342f857.pth diff --git a/object_detection/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..64fe2304c0f34c366ff443d4531ae07c48d915d8 --- /dev/null +++ b/object_detection/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,90 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict(bbox_head=[ + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.5), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.3), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, loss_weight=1.0)) + ])) diff --git a/object_detection/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4b28a59280e6701d31afeeaae7ae12cdbd4fb95e --- /dev/null +++ b/object_detection/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,86 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + roi_head=dict(bbox_head=[ + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.5), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.3), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, loss_weight=1.0)) + ])) diff --git a/object_detection/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py b/object_detection/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e48d4259b78aa4494a9de1deabdf40c0d37d9816 --- /dev/null +++ b/object_detection/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict( + bbox_head=dict( + _delete_=True, + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)))) diff --git a/object_detection/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..732c7ba3f607e2ac68f16acceddd16b1269aa2cf --- /dev/null +++ b/object_detection/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,34 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict( + _delete_=True, + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)))) diff --git a/object_detection/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b08e916c9f9d158dd89a3a13418cc51bd25ef953 --- /dev/null +++ b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py @@ -0,0 +1,54 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fc30d63dc58b44deda01790e6f432db0fe957a1e --- /dev/null +++ b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e8fe16646278fba3aba64742bb9912984720489b --- /dev/null +++ b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py @@ -0,0 +1,73 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..30c43399f7bf2ec1f67aee3265565a8067fe2b6a --- /dev/null +++ b/object_detection/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py @@ -0,0 +1,73 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py b/object_detection/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6fe6bd660230eedf70f87072e5abec66036d865f --- /dev/null +++ b/object_detection/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py b/object_detection/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6acf080afe1b04e50467b16b60700feb5c12e886 --- /dev/null +++ b/object_detection/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/scnet/README.md b/object_detection/configs/scnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..52f3b4934c506816ccd703916c33b3874a6dc81f --- /dev/null +++ b/object_detection/configs/scnet/README.md @@ -0,0 +1,65 @@ +# SCNet: Training Inference Sample Consistency for Instance Segmentation + +## Abstract + + + +Cascaded architectures have brought significant performance improvement in object detection and instance segmentation. However, there are lingering issues regarding the disparity in the Intersection-over-Union (IoU) distribution of the samples between training and inference. This disparity can potentially exacerbate detection accuracy. This paper proposes an architecture referred to as Sample Consistency Network (SCNet) to ensure that the IoU distribution of the samples at training time is close to that at inference time. Furthermore, SCNet incorporates feature relay and utilizes global contextual information to further reinforce the reciprocal relationships among classifying, detecting, and segmenting sub-tasks. Extensive experiments on the standard COCO dataset reveal the effectiveness of the proposed method over multiple evaluation metrics, including box AP, mask AP, and inference speed. In particular, while running 38\% faster, the proposed SCNet improves the AP of the box and mask predictions by respectively 1.3 and 2.3 points compared to the strong Cascade Mask R-CNN baseline. + + +
+ +
+ + + + +## Citation + + + +We provide the code for reproducing experiment results of [SCNet](https://arxiv.org/abs/2012.10150). + +``` +@inproceedings{vu2019cascade, + title={SCNet: Training Inference Sample Consistency for Instance Segmentation}, + author={Vu, Thang and Haeyong, Kang and Yoo, Chang D}, + booktitle={AAAI}, + year={2021} +} +``` + +## Dataset + +SCNet requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Backbone | Style | Lr schd | Mem (GB) | Inf speed (fps) | box AP | mask AP | TTA box AP | TTA mask AP | Config | Download | +|:---------------:|:-------:|:-------:|:--------:|:---------------:|:------:|:-------:|:----------:|:-----------:|:------:|:------------:| +| R-50-FPN | pytorch | 1x | 7.0 | 6.2 | 43.5 | 39.2 | 44.8 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r50_fpn_1x_coco/scnet_r50_fpn_1x_coco-c3f09857.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r50_fpn_1x_coco/scnet_r50_fpn_1x_coco_20210117_192725.log.json) | +| R-50-FPN | pytorch | 20e | 7.0 | 6.2 | 44.5 | 40.0 | 45.8 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r50_fpn_20e_coco/scnet_r50_fpn_20e_coco-a569f645.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r50_fpn_20e_coco/scnet_r50_fpn_20e_coco_20210116_060148.log.json) | +| R-101-FPN | pytorch | 20e | 8.9 | 5.8 | 45.8 | 40.9 | 47.3 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r101_fpn_20e_coco/scnet_r101_fpn_20e_coco-294e312c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r101_fpn_20e_coco/scnet_r101_fpn_20e_coco_20210118_175824.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 13.2 | 4.9 | 47.5 | 42.3 | 48.9 | 44.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_x101_64x4d_fpn_20e_coco/scnet_x101_64x4d_fpn_20e_coco-fb09dec9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_x101_64x4d_fpn_20e_coco/scnet_x101_64x4d_fpn_20e_coco_20210120_045959.log.json) | + +### Notes + +- Training hyper-parameters are identical to those of [HTC](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc). +- TTA means Test Time Augmentation, which applies horizontal flip and multi-scale testing. Refer to [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r50_fpn_1x_coco.py). diff --git a/object_detection/configs/scnet/metafile.yml b/object_detection/configs/scnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..15eaebfa80f5594357d825316e969e8afdfa9c1e --- /dev/null +++ b/object_detection/configs/scnet/metafile.yml @@ -0,0 +1,116 @@ +Collections: + - Name: SCNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - SCNet + Paper: + URL: https://arxiv.org/abs/2012.10150 + Title: 'SCNet: Training Inference Sample Consistency for Instance Segmentation' + README: configs/scnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.9.0/mmdet/models/detectors/scnet.py#L6 + Version: v2.9.0 + +Models: + - Name: scnet_r50_fpn_1x_coco + In Collection: SCNet + Config: configs/scnet/scnet_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 161.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r50_fpn_1x_coco/scnet_r50_fpn_1x_coco-c3f09857.pth + + - Name: scnet_r50_fpn_20e_coco + In Collection: SCNet + Config: configs/scnet/scnet_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 161.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r50_fpn_20e_coco/scnet_r50_fpn_20e_coco-a569f645.pth + + - Name: scnet_r101_fpn_20e_coco + In Collection: SCNet + Config: configs/scnet/scnet_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 8.9 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_r101_fpn_20e_coco/scnet_r101_fpn_20e_coco-294e312c.pth + + - Name: scnet_x101_64x4d_fpn_20e_coco + In Collection: SCNet + Config: configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 13.2 + inference time (ms/im): + - value: 204.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scnet/scnet_x101_64x4d_fpn_20e_coco/scnet_x101_64x4d_fpn_20e_coco-fb09dec9.pth diff --git a/object_detection/configs/scnet/scnet_r101_fpn_20e_coco.py b/object_detection/configs/scnet/scnet_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebba52978b23c07a68e3563033c860a95dd515b6 --- /dev/null +++ b/object_detection/configs/scnet/scnet_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './scnet_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/scnet/scnet_r50_fpn_1x_coco.py b/object_detection/configs/scnet/scnet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fe03b0d4d7c4556a486a13a1d543a668b5d3fcab --- /dev/null +++ b/object_detection/configs/scnet/scnet_r50_fpn_1x_coco.py @@ -0,0 +1,136 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' +# model settings +model = dict( + type='SCNet', + roi_head=dict( + _delete_=True, + type='SCNetRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='SCNetBBoxHead', + num_shared_fcs=2, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='SCNetBBoxHead', + num_shared_fcs=2, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='SCNetBBoxHead', + num_shared_fcs=2, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='SCNetMaskHead', + num_convs=12, + in_channels=256, + conv_out_channels=256, + num_classes=80, + conv_to_res=True, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + semantic_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[8]), + semantic_head=dict( + type='SCNetSemanticHead', + num_ins=5, + fusion_level=1, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + loss_seg=dict( + type='CrossEntropyLoss', ignore_index=255, loss_weight=0.2), + conv_to_res=True), + glbctx_head=dict( + type='GlobalContextHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_weight=3.0, + conv_to_res=True), + feat_relay_head=dict( + type='FeatureRelayHead', + in_channels=1024, + out_conv_channels=256, + roi_feat_size=7, + scale_factor=2))) + +# uncomment below code to enable test time augmentations +# img_norm_cfg = dict( +# mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# test_pipeline = [ +# dict(type='LoadImageFromFile'), +# dict( +# type='MultiScaleFlipAug', +# img_scale=[(600, 900), (800, 1200), (1000, 1500), (1200, 1800), +# (1400, 2100)], +# flip=True, +# transforms=[ +# dict(type='Resize', keep_ratio=True), +# dict(type='RandomFlip', flip_ratio=0.5), +# dict(type='Normalize', **img_norm_cfg), +# dict(type='Pad', size_divisor=32), +# dict(type='ImageToTensor', keys=['img']), +# dict(type='Collect', keys=['img']), +# ]) +# ] +# data = dict( +# val=dict(pipeline=test_pipeline), +# test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/scnet/scnet_r50_fpn_20e_coco.py b/object_detection/configs/scnet/scnet_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3b121a6a2836ac7626f7b383ada9508f8b9d972d --- /dev/null +++ b/object_detection/configs/scnet/scnet_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './scnet_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/object_detection/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py b/object_detection/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e54b030fa68f76f22edf66e3594d66a13c2c672 --- /dev/null +++ b/object_detection/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,15 @@ +_base_ = './scnet_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py b/object_detection/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9f3ce6d14e6b3474d78c8de3f3565b0029dc067e --- /dev/null +++ b/object_detection/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py @@ -0,0 +1,3 @@ +_base_ = './scnet_x101_64x4d_fpn_20e_coco.py' +data = dict(samples_per_gpu=1, workers_per_gpu=1) +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/scratch/README.md b/object_detection/configs/scratch/README.md new file mode 100644 index 0000000000000000000000000000000000000000..68d3a72fd651463055987f035036828985c6bb32 --- /dev/null +++ b/object_detection/configs/scratch/README.md @@ -0,0 +1,39 @@ +# Rethinking ImageNet Pre-training + +## Abstract + + + +We report competitive results on object detection and instance segmentation on the COCO dataset using standard models trained from random initialization. The results are no worse than their ImageNet pre-training counterparts even when using the hyper-parameters of the baseline system (Mask R-CNN) that were optimized for fine-tuning pre-trained models, with the sole exception of increasing the number of training iterations so the randomly initialized models may converge. Training from random initialization is surprisingly robust; our results hold even when: (i) using only 10% of the training data, (ii) for deeper and wider models, and (iii) for multiple tasks and metrics. Experiments show that ImageNet pre-training speeds up convergence early in training, but does not necessarily provide regularization or improve final target task accuracy. To push the envelope we demonstrate 50.9 AP on COCO object detection without using any external data---a result on par with the top COCO 2017 competition results that used ImageNet pre-training. These observations challenge the conventional wisdom of ImageNet pre-training for dependent tasks and we expect these discoveries will encourage people to rethink the current de facto paradigm of `pre-training and fine-tuning' in computer vision. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{he2018rethinking, + title={Rethinking imagenet pre-training}, + author={He, Kaiming and Girshick, Ross and Doll{\'a}r, Piotr}, + journal={arXiv preprint arXiv:1811.08883}, + year={2018} +} +``` + +## Results and Models + +| Model | Backbone | Style | Lr schd | box AP | mask AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN | R-50-FPN | pytorch | 6x | 40.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_bbox_mAP-0.407_20200201_193013-90813d01.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_20200201_193013.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 6x | 41.2 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_bbox_mAP-0.412__segm_mAP-0.374_20200201_193051-1e190a40.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_20200201_193051.log.json) | + +Note: + +- The above models are trained with 16 GPUs. diff --git a/object_detection/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py b/object_detection/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..55aa3a6e73b9e56fb1d285272b3011fad8e9e11f --- /dev/null +++ b/object_detection/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py @@ -0,0 +1,24 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + frozen_stages=-1, + zero_init_residual=False, + norm_cfg=norm_cfg, + init_cfg=None), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg))) +# optimizer +optimizer = dict(paramwise_cfg=dict(norm_decay_mult=0)) +optimizer_config = dict(_delete_=True, grad_clip=None) +# learning policy +lr_config = dict(warmup_ratio=0.1, step=[65, 71]) +runner = dict(type='EpochBasedRunner', max_epochs=73) diff --git a/object_detection/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py b/object_detection/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cc52cb8f7618f57f280f4e5d640f99839bf66278 --- /dev/null +++ b/object_detection/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py @@ -0,0 +1,25 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + frozen_stages=-1, + zero_init_residual=False, + norm_cfg=norm_cfg, + init_cfg=None), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# optimizer +optimizer = dict(paramwise_cfg=dict(norm_decay_mult=0)) +optimizer_config = dict(_delete_=True, grad_clip=None) +# learning policy +lr_config = dict(warmup_ratio=0.1, step=[65, 71]) +runner = dict(type='EpochBasedRunner', max_epochs=73) diff --git a/object_detection/configs/scratch/metafile.yml b/object_detection/configs/scratch/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..65025fac3927a138597e5947c1af0c213c3503fb --- /dev/null +++ b/object_detection/configs/scratch/metafile.yml @@ -0,0 +1,48 @@ +Collections: + - Name: Rethinking ImageNet Pre-training + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1811.08883 + Title: 'Rethinking ImageNet Pre-training' + README: configs/scratch/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_gn-all_scratch_6x_coco + In Collection: Rethinking ImageNet Pre-training + Config: configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py + Metadata: + Epochs: 72 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_bbox_mAP-0.407_20200201_193013-90813d01.pth + + - Name: mask_rcnn_r50_fpn_gn-all_scratch_6x_coco + In Collection: Rethinking ImageNet Pre-training + Config: configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py + Metadata: + Epochs: 72 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_bbox_mAP-0.412__segm_mAP-0.374_20200201_193051-1e190a40.pth diff --git a/object_detection/configs/seesaw_loss/README.md b/object_detection/configs/seesaw_loss/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9c1fcb0c7699f7fc8a2e301d567d7b2a6f30b203 --- /dev/null +++ b/object_detection/configs/seesaw_loss/README.md @@ -0,0 +1,53 @@ +# Seesaw Loss for Long-Tailed Instance Segmentation + +## Abstract + + + +Instance segmentation has witnessed a remarkable progress on class-balanced benchmarks. However, they fail to perform as accurately in real-world scenarios, where the category distribution of objects naturally comes with a long tail. Instances of head classes dominate a long-tailed dataset and they serve as negative samples of tail categories. The overwhelming gradients of negative samples on tail classes lead to a biased learning process for classifiers. Consequently, objects of tail categories are more likely to be misclassified as backgrounds or head categories. To tackle this problem, we propose Seesaw Loss to dynamically re-balance gradients of positive and negative samples for each category, with two complementary factors, i.e., mitigation factor and compensation factor. The mitigation factor reduces punishments to tail categories w.r.t. the ratio of cumulative training instances between different categories. Meanwhile, the compensation factor increases the penalty of misclassified instances to avoid false positives of tail categories. We conduct extensive experiments on Seesaw Loss with mainstream frameworks and different data sampling strategies. With a simple end-to-end training pipeline, Seesaw Loss obtains significant gains over Cross-Entropy Loss, and achieves state-of-the-art performance on LVIS dataset without bells and whistles. + + +
+ +
+ + + + +## Citation + + + +We provide config files to reproduce the instance segmentation performance in the CVPR 2021 paper for [Seesaw Loss for Long-Tailed Instance Segmentation](https://arxiv.org/abs/2008.10032). + +```latex +@inproceedings{wang2021seesaw, + title={Seesaw Loss for Long-Tailed Instance Segmentation}, + author={Jiaqi Wang and Wenwei Zhang and Yuhang Zang and Yuhang Cao and Jiangmiao Pang and Tao Gong and Kai Chen and Ziwei Liu and Chen Change Loy and Dahua Lin}, + booktitle={Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition}, + year={2021} +} +``` + + +* Please setup [LVIS dataset](../lvis/README.md) for MMDetection. + +* RFS indicates to use oversample strategy [here](../../docs/tutorials/customize_dataset.md#class-balanced-dataset) with oversample threshold `1e-3`. + +## Results and models of Seasaw Loss on LVIS v1 dataset + + +| Method | Backbone | Style | Lr schd | Data Sampler | Norm Mask | box AP | mask AP | Config | Download | +| :----------------: | :-------: | :-----: | :-----: | :----------: | :-------: | :----: | :-----: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| Mask R-CNN | R-50-FPN | pytorch | 2x | random | N | 25.6 | 25.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-a698dd3d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 2x | random | Y | 25.6 | 25.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-a1c11314.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | random | N | 27.4 | 26.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-8e6e6dd5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | random | Y | 27.2 | 27.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-a0b59c42.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 2x | RFS | N | 27.6 | 26.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-392a804b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 2x | RFS | Y | 27.6 | 26.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-cd0f6a12.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | N | 28.9 | 27.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-e68eb464.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | Y | 28.9 | 28.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-1d817139.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | random | N | 33.1 | 29.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-71e2215e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | random | Y | 33.0 | 30.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-8b5a6745.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | N | 30.0 | 29.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-5d8ca2a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | Y | 32.8 | 30.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-c8551505.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | diff --git a/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..beeb0d1e5cd221c822641a1f64a4f27ad0cf25e5 --- /dev/null +++ b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,132 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline)) +evaluation = dict(interval=24, metric=['bbox', 'segm']) diff --git a/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0f299484940db4ee1a1edd55006e2e145d99af2b --- /dev/null +++ b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..bb88750fe5f4d8cefb81222d65de6ce8e4c7dcc9 --- /dev/null +++ b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,98 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) +evaluation = dict(interval=24, metric=['bbox', 'segm']) diff --git a/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..262e76bdd5e26091670f33534b43172e0664d3ba --- /dev/null +++ b/object_detection/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..57deab10863a0d375e4393e051abad96545c73d7 --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..a539929252c0b760a13a208883b867d085ba8821 --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..1f5065e799a90e1458da2db737bd496d9dc11349 --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..13d0b5f2304fdc8af9d65cdb591c5dc6ee035097 --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..743f5f2617d01639cbcf855abb59e9cd94ed3c8a --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,75 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict( + num_classes=1203, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0)), + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline)) +evaluation = dict(interval=24, metric=['bbox', 'segm']) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0af89210777d31e1ebf8c2852669fd397ab8c8bc --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..4fc15049c6c6184506095483c1c16aabc5e55328 --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,41 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict( + num_classes=1203, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0)), + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) +evaluation = dict(interval=12, metric=['bbox', 'segm']) diff --git a/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0ef6bd2ce4301287cba1b48d89efbbcccecfe3bc --- /dev/null +++ b/object_detection/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/object_detection/configs/seesaw_loss/metafile.yml b/object_detection/configs/seesaw_loss/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..70dd2fe681ee2cbefeb2b6a52c6eb789c811af43 --- /dev/null +++ b/object_detection/configs/seesaw_loss/metafile.yml @@ -0,0 +1,203 @@ +Collections: + - Name: Seesaw Loss + Metadata: + Training Data: LVIS + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Softmax + - RPN + - Convolution + - Dense Connections + - FPN + - ResNet + - RoIAlign + - Seesaw Loss + Paper: + URL: https://arxiv.org/abs/2008.10032 + Title: 'Seesaw Loss for Long-Tailed Instance Segmentation' + README: configs/seesaw_loss/README.md + +Models: + - Name: mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 25.6 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 25.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-a698dd3d.pth + - Name: mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 25.6 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 25.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-a1c11314.pth + - Name: mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 27.4 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 26.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-8e6e6dd5.pth + - Name: mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 27.2 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 27.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-a0b59c42.pth + - Name: mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 27.6 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 26.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-392a804b.pth + - Name: mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 27.6 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 26.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-cd0f6a12.pth + - Name: mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 28.9 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 27.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-e68eb464.pth + - Name: mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 28.9 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 28.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-1d817139.pth + - Name: cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 33.1 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 29.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-71e2215e.pth + - Name: cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 33.0 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 30.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-8b5a6745.pth + - Name: cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 30.0 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 29.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-5d8ca2a4.pth + - Name: cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1 + In Collection: Seesaw Loss + Config: configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: LVIS v1 + Metrics: + box AP: 32.8 + - Task: Instance Segmentation + Dataset: LVIS v1 + Metrics: + mask AP: 30.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-c8551505.pth diff --git a/object_detection/configs/selfsup_pretrain/README.md b/object_detection/configs/selfsup_pretrain/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2c61bd6dee1d2e0284717994f32af124ba4467df --- /dev/null +++ b/object_detection/configs/selfsup_pretrain/README.md @@ -0,0 +1,116 @@ +# Backbones Trained by Self-Supervise Algorithms + +## Abstract + + + +Unsupervised image representations have significantly reduced the gap with supervised pretraining, notably with the recent achievements of contrastive learning methods. These contrastive methods typically work online and rely on a large number of explicit pairwise feature comparisons, which is computationally challenging. In this paper, we propose an online algorithm, SwAV, that takes advantage of contrastive methods without requiring to compute pairwise comparisons. Specifically, our method simultaneously clusters the data while enforcing consistency between cluster assignments produced for different augmentations (or views) of the same image, instead of comparing features directly as in contrastive learning. Simply put, we use a swapped prediction mechanism where we predict the cluster assignment of a view from the representation of another view. Our method can be trained with large and small batches and can scale to unlimited amounts of data. Compared to previous contrastive methods, our method is more memory efficient since it does not require a large memory bank or a special momentum network. In addition, we also propose a new data augmentation strategy, multi-crop, that uses a mix of views with different resolutions in place of two full-resolution views, without increasing the memory or compute requirements much. We validate our findings by achieving 75.3% top-1 accuracy on ImageNet with ResNet-50, as well as surpassing supervised pretraining on all the considered transfer tasks. + + +
+ +
+ +We present Momentum Contrast (MoCo) for unsupervised visual representation learning. From a perspective on contrastive learning as dictionary look-up, we build a dynamic dictionary with a queue and a moving-averaged encoder. This enables building a large and consistent dictionary on-the-fly that facilitates contrastive unsupervised learning. MoCo provides competitive results under the common linear protocol on ImageNet classification. More importantly, the representations learned by MoCo transfer well to downstream tasks. MoCo can outperform its supervised pre-training counterpart in 7 detection/segmentation tasks on PASCAL VOC, COCO, and other datasets, sometimes surpassing it by large margins. This suggests that the gap between unsupervised and supervised representation learning has been largely closed in many vision tasks. + + +
+ +
+ + + + +## Citation + + + +We support to apply the backbone models pre-trained by different self-supervised methods in detection systems and provide their results on Mask R-CNN. + +The pre-trained models are converted from [MoCo](https://github.com/facebookresearch/moco) and downloaded from [SwAV](https://github.com/facebookresearch/swav). + +For SwAV, please cite + +```latex +@article{caron2020unsupervised, + title={Unsupervised Learning of Visual Features by Contrasting Cluster Assignments}, + author={Caron, Mathilde and Misra, Ishan and Mairal, Julien and Goyal, Priya and Bojanowski, Piotr and Joulin, Armand}, + booktitle={Proceedings of Advances in Neural Information Processing Systems (NeurIPS)}, + year={2020} +} +``` + +For MoCo, please cite + +```latex +@Article{he2019moco, + author = {Kaiming He and Haoqi Fan and Yuxin Wu and Saining Xie and Ross Girshick}, + title = {Momentum Contrast for Unsupervised Visual Representation Learning}, + journal = {arXiv preprint arXiv:1911.05722}, + year = {2019}, +} +@Article{chen2020mocov2, + author = {Xinlei Chen and Haoqi Fan and Ross Girshick and Kaiming He}, + title = {Improved Baselines with Momentum Contrastive Learning}, + journal = {arXiv preprint arXiv:2003.04297}, + year = {2020}, +} +``` + +## Usage + +To use a self-supervisely pretrained backbone, there are two steps to do: + +1. Download and convert the model to PyTorch-style supported by MMDetection +2. Modify the config and change the training setting accordingly + +### Convert model + +For more general usage, we also provide script `selfsup2mmdet.py` in the tools directory to convert the key of models pretrained by different self-supervised methods to PyTorch-style checkpoints used in MMDetection. + +```bash +python -u tools/model_converters/selfsup2mmdet.py ${PRETRAIN_PATH} ${STORE_PATH} --selfsup ${method} +``` + +This script convert model from `PRETRAIN_PATH` and store the converted model in `STORE_PATH`. + +For example, to use a ResNet-50 backbone released by MoCo, you can download it from [here](https://dl.fbaipublicfiles.com/moco/moco_checkpoints/moco_v2_800ep/moco_v2_800ep_pretrain.pth.tar) and use the following command + +```bash +python -u tools/model_converters/selfsup2mmdet.py ./moco_v2_800ep_pretrain.pth.tar mocov2_r50_800ep_pretrain.pth --selfsup moco +``` + +To use the ResNet-50 backbone released by SwAV, you can download it from [here](https://dl.fbaipublicfiles.com/deepcluster/swav_800ep_pretrain.pth.tar) + +### Modify config + +The backbone requires SyncBN and the `fronzen_stages` need to be changed. A config that use the moco backbone is as below + +```python +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + pretrained='./mocov2_r50_800ep_pretrain.pth', + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False)) + +``` + +## Results + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-----: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|Mask RCNN |[R50 by MoCo v2](./mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py)| pytorch |1x|| |38.0|34.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco_20210604_114614-a8b63483.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco_20210604_114614.log.json)| +|Mask RCNN |[R50 by MoCo v2](./mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py)| pytorch | multi-scale 2x || |40.8|36.8|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco_20210605_163717-d95df20a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco_20210605_163717.log.json)| +|Mask RCNN |[R50 by SwAV](./mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py)| pytorch | 1x || |39.1 | 35.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco/mask_rcnn_r50_fpn_swav-pretrain_1x_coco_20210604_114640-7b9baf28.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco/mask_rcnn_r50_fpn_swav-pretrain_1x_coco_20210604_114640.log.json)| +|Mask RCNN |[R50 by SwAV](./mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py)| pytorch | multi-scale 2x || |41.3|37.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco_20210605_163717-08e26fca.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco_20210605_163717.log.json)| + +### Notice + +1. We only provide single-scale 1x and multi-scale 2x configs as examples to show how to use backbones trained by self-supervised algorithms. We will try to reproduce the results in their corresponding paper using the released backbone in the future. Please stay tuned. diff --git a/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f1e061524e656409e37d3ae80b290c368a47d6a6 --- /dev/null +++ b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./mocov2_r50_800ep_pretrain.pth'))) diff --git a/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..09aa15608decb610a2f0b1181e50cbe1b8c6387a --- /dev/null +++ b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./mocov2_r50_800ep_pretrain.pth'))) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] + +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f92a3453dd1d5e8460a4279764845ce3e9c3ed81 --- /dev/null +++ b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./swav_800ep_pretrain.pth.tar'))) diff --git a/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fe473613492b5388ceb50b1669317539360b8e2f --- /dev/null +++ b/object_detection/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./swav_800ep_pretrain.pth.tar'))) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] + +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/solo/README.md b/object_detection/configs/solo/README.md new file mode 100644 index 0000000000000000000000000000000000000000..76664b7e4a6fcc3f01e9e674af28c3b4db05fca7 --- /dev/null +++ b/object_detection/configs/solo/README.md @@ -0,0 +1,56 @@ +# SOLO: Segmenting Objects by Locations + +## Abstract + + + +We present a new, embarrassingly simple approach to instance segmentation in images. Compared to many other dense prediction tasks, e.g., semantic segmentation, it is the arbitrary number of instances that have made instance segmentation much more challenging. In order to predict a mask for each instance, mainstream approaches either follow the 'detect-thensegment' strategy as used by Mask R-CNN, or predict category masks first then use clustering techniques to group pixels into individual instances. We view the task of instance segmentation from a completely new perspective by introducing the notion of "instance categories", which assigns categories to each pixel within an instance according to the instance's location and size, thus nicely converting instance mask segmentation into a classification-solvable problem. Now instance segmentation is decomposed into two classification tasks. We demonstrate a much simpler and flexible instance segmentation framework with strong performance, achieving on par accuracy with Mask R-CNN and outperforming recent singleshot instance segmenters in accuracy. We hope that this very simple and strong framework can serve as a baseline for many instance-level recognition tasks besides instance segmentation. + + +
+ +
+ + + + +## Citation + +``` +@inproceedings{wang2020solo, + title = {{SOLO}: Segmenting Objects by Locations}, + author = {Wang, Xinlong and Kong, Tao and Shen, Chunhua and Jiang, Yuning and Li, Lei}, + booktitle = {Proc. Eur. Conf. Computer Vision (ECCV)}, + year = {2020} +} +``` + +## Results and Models + +### SOLO + +| Backbone | Style | MS train | Lr schd | Mem (GB) | Inf time (fps) | mask AP | Download | +|:---------:|:-------:|:--------:|:-------:|:--------:|:--------------:|:------:|:--------:| +| R-50 | pytorch | N | 1x | 8.0 | 14.0 | 33.1 | [model](https://download.openmmlab.com/mmdetection/v2.0/solo/solo_r50_fpn_1x_coco/solo_r50_fpn_1x_coco_20210821_035055-2290a6b8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/solo/solo_r50_fpn_1x_coco/solo_r50_fpn_1x_coco_20210821_035055.log.json) | +| R-50 | pytorch | Y | 3x | 7.4 | 14.0 | 35.9 | [model](https://download.openmmlab.com/mmdetection/v2.0/solo/solo_r50_fpn_3x_coco/solo_r50_fpn_3x_coco_20210901_012353-11d224d7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/solo/solo_r50_fpn_3x_coco/solo_r50_fpn_3x_coco_20210901_012353.log.json) | + +### Decoupled SOLO + +| Backbone | Style | MS train | Lr schd | Mem (GB) | Inf time (fps) | mask AP | Download | +|:---------:|:-------:|:--------:|:-------:|:--------:|:--------------:|:-------:|:--------:| +| R-50 | pytorch | N | 1x | 7.8 | 12.5 | 33.9 | [model](https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_r50_fpn_1x_coco/decoupled_solo_r50_fpn_1x_coco_20210820_233348-6337c589.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_r50_fpn_1x_coco/decoupled_solo_r50_fpn_1x_coco_20210820_233348.log.json) | +| R-50 | pytorch | Y | 3x | 7.9 | 12.5 | 36.7 | [model](https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_r50_fpn_3x_coco/decoupled_solo_r50_fpn_3x_coco_20210821_042504-7b3301ec.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_r50_fpn_3x_coco/decoupled_solo_r50_fpn_3x_coco_20210821_042504.log.json) | + +- Decoupled SOLO has a decoupled head which is different from SOLO head. +Decoupled SOLO serves as an efficient and equivalent variant in accuracy +of SOLO. Please refer to the corresponding config files for details. + +### Decoupled Light SOLO + +| Backbone | Style | MS train | Lr schd | Mem (GB) | Inf time (fps) | mask AP | Download | +|:---------:|:-------:|:--------:|:-------:|:--------:|:--------------:|:------:|:--------:| +| R-50 | pytorch | Y | 3x | 2.2 | 31.2 | 32.9 | [model](https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_light_r50_fpn_3x_coco/decoupled_solo_light_r50_fpn_3x_coco_20210906_142703-e70e226f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_light_r50_fpn_3x_coco/decoupled_solo_light_r50_fpn_3x_coco_20210906_142703.log.json) | + +- Decoupled Light SOLO using decoupled structure similar to Decoupled +SOLO head, with light-weight head and smaller input size, Please refer +to the corresponding config files for details. diff --git a/object_detection/configs/solo/decoupled_solo_light_r50_fpn_3x_coco.py b/object_detection/configs/solo/decoupled_solo_light_r50_fpn_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..101f8f1d376f8a574cc2b35d17498097891fa2c7 --- /dev/null +++ b/object_detection/configs/solo/decoupled_solo_light_r50_fpn_3x_coco.py @@ -0,0 +1,63 @@ +_base_ = './decoupled_solo_r50_fpn_3x_coco.py' + +# model settings +model = dict( + mask_head=dict( + type='DecoupledSOLOLightHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 8, 16, 32, 32], + scale_ranges=((1, 64), (32, 128), (64, 256), (128, 512), (256, 2048)), + pos_scale=0.2, + num_grids=[40, 36, 24, 16, 12], + cls_down_index=0, + loss_mask=dict( + type='DiceLoss', use_sigmoid=True, activate=False, + loss_weight=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(852, 512), (852, 480), (852, 448), (852, 416), (852, 384), + (852, 352)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(852, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/solo/decoupled_solo_r50_fpn_1x_coco.py b/object_detection/configs/solo/decoupled_solo_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b611cdf4d05fde5f76901c85b7ba55405a5190d0 --- /dev/null +++ b/object_detection/configs/solo/decoupled_solo_r50_fpn_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = [ + './solo_r50_fpn_1x_coco.py', +] +# model settings +model = dict( + mask_head=dict( + type='DecoupledSOLOHead', + num_classes=80, + in_channels=256, + stacked_convs=7, + feat_channels=256, + strides=[8, 8, 16, 32, 32], + scale_ranges=((1, 96), (48, 192), (96, 384), (192, 768), (384, 2048)), + pos_scale=0.2, + num_grids=[40, 36, 24, 16, 12], + cls_down_index=0, + loss_mask=dict( + type='DiceLoss', use_sigmoid=True, activate=False, + loss_weight=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) + +optimizer = dict(type='SGD', lr=0.01) diff --git a/object_detection/configs/solo/decoupled_solo_r50_fpn_3x_coco.py b/object_detection/configs/solo/decoupled_solo_r50_fpn_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4a8c19decb72a3d904a277faac06670999f6b322 --- /dev/null +++ b/object_detection/configs/solo/decoupled_solo_r50_fpn_3x_coco.py @@ -0,0 +1,25 @@ +_base_ = './solo_r50_fpn_3x_coco.py' + +# model settings +model = dict( + mask_head=dict( + type='DecoupledSOLOHead', + num_classes=80, + in_channels=256, + stacked_convs=7, + feat_channels=256, + strides=[8, 8, 16, 32, 32], + scale_ranges=((1, 96), (48, 192), (96, 384), (192, 768), (384, 2048)), + pos_scale=0.2, + num_grids=[40, 36, 24, 16, 12], + cls_down_index=0, + loss_mask=dict( + type='DiceLoss', use_sigmoid=True, activate=False, + loss_weight=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) diff --git a/object_detection/configs/solo/metafile.yml b/object_detection/configs/solo/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..b6244e80f1a53503d36b20d3d589476ad3930a42 --- /dev/null +++ b/object_detection/configs/solo/metafile.yml @@ -0,0 +1,115 @@ +Collections: + - Name: SOLO + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Convolution + - ResNet + Paper: https://arxiv.org/abs/1912.04488 + README: configs/solo/README.md + +Models: + - Name: decoupled_solo_r50_fpn_1x_coco + In Collection: SOLO + Config: configs/solo/decoupled_solo_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 12 + inference time (ms/im): + - value: 116.4 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (1333, 800) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 33.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_r50_fpn_1x_coco/decoupled_solo_r50_fpn_1x_coco_20210820_233348-6337c589.pth + + - Name: decoupled_solo_r50_fpn_3x_coco + In Collection: SOLO + Config: configs/solo/decoupled_solo_r50_fpn_3x_coco.py + Metadata: + Training Memory (GB): 7.9 + Epochs: 36 + inference time (ms/im): + - value: 117.2 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (1333, 800) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_r50_fpn_3x_coco/decoupled_solo_r50_fpn_3x_coco_20210821_042504-7b3301ec.pth + + - Name: decoupled_solo_light_r50_fpn_3x_coco + In Collection: SOLO + Config: configs/solo/decoupled_solo_light_r50_fpn_3x_coco.py + Metadata: + Training Memory (GB): 2.2 + Epochs: 36 + inference time (ms/im): + - value: 35.0 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (852, 512) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 32.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/solo/decoupled_solo_light_r50_fpn_3x_coco/decoupled_solo_light_r50_fpn_3x_coco_20210906_142703-e70e226f.pth + + - Name: solo_r50_fpn_3x_coco + In Collection: SOLO + Config: configs/solo/solo_r50_fpn_3x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 36 + inference time (ms/im): + - value: 94.2 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (1333, 800) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/solo/solo_r50_fpn_3x_coco/solo_r50_fpn_3x_coco_20210901_012353-11d224d7.pth + + - Name: solo_r50_fpn_1x_coco + In Collection: SOLO + Config: configs/solo/solo_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + Epochs: 12 + inference time (ms/im): + - value: 95.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (1333, 800) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 33.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/solo/solo_r50_fpn_1x_coco/solo_r50_fpn_1x_coco_20210821_035055-2290a6b8.pth diff --git a/object_detection/configs/solo/solo_r50_fpn_1x_coco.py b/object_detection/configs/solo/solo_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9093a50480096a22def41e36a9793bf765df56cc --- /dev/null +++ b/object_detection/configs/solo/solo_r50_fpn_1x_coco.py @@ -0,0 +1,53 @@ +_base_ = [ + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +# model settings +model = dict( + type='SOLO', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50'), + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=0, + num_outs=5), + mask_head=dict( + type='SOLOHead', + num_classes=80, + in_channels=256, + stacked_convs=7, + feat_channels=256, + strides=[8, 8, 16, 32, 32], + scale_ranges=((1, 96), (48, 192), (96, 384), (192, 768), (384, 2048)), + pos_scale=0.2, + num_grids=[40, 36, 24, 16, 12], + cls_down_index=0, + loss_mask=dict(type='DiceLoss', use_sigmoid=True, loss_weight=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True)), + # model training and testing settings + test_cfg=dict( + nms_pre=500, + score_thr=0.1, + mask_thr=0.5, + filter_thr=0.05, + kernel='gaussian', # gaussian/linear + sigma=2.0, + max_per_img=100)) + +# optimizer +optimizer = dict(type='SGD', lr=0.01) diff --git a/object_detection/configs/solo/solo_r50_fpn_3x_coco.py b/object_detection/configs/solo/solo_r50_fpn_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..52302cdf9dea18ef511eb854fcd3e88f22b0ed44 --- /dev/null +++ b/object_detection/configs/solo/solo_r50_fpn_3x_coco.py @@ -0,0 +1,28 @@ +_base_ = './solo_r50_fpn_1x_coco.py' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 800), (1333, 768), (1333, 736), (1333, 704), + (1333, 672), (1333, 640)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[27, 33]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/sparse_rcnn/README.md b/object_detection/configs/sparse_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..48c8aef4340e7545f112597534f2f5fff0b1a9b3 --- /dev/null +++ b/object_detection/configs/sparse_rcnn/README.md @@ -0,0 +1,42 @@ +# Sparse R-CNN: End-to-End Object Detection with Learnable Proposals + +## Abstract + + + +We present Sparse R-CNN, a purely sparse method for object detection in images. Existing works on object detection heavily rely on dense object candidates, such as k anchor boxes pre-defined on all grids of image feature map of size H×W. In our method, however, a fixed sparse set of learned object proposals, total length of N, are provided to object recognition head to perform classification and location. By eliminating HWk (up to hundreds of thousands) hand-designed object candidates to N (e.g. 100) learnable proposals, Sparse R-CNN completely avoids all efforts related to object candidates design and many-to-one label assignment. More importantly, final predictions are directly output without non-maximum suppression post-procedure. Sparse R-CNN demonstrates accuracy, run-time and training convergence performance on par with the well-established detector baselines on the challenging COCO dataset, e.g., achieving 45.0 AP in standard 3× training schedule and running at 22 fps using ResNet-50 FPN model. We hope our work could inspire re-thinking the convention of dense prior in object detectors. + + +
+ +
+ + + + +## Citation + + + +``` +@article{peize2020sparse, + title = {{SparseR-CNN}: End-to-End Object Detection with Learnable Proposals}, + author = {Peize Sun and Rufeng Zhang and Yi Jiang and Tao Kong and Chenfeng Xu and Wei Zhan and Masayoshi Tomizuka and Lei Li and Zehuan Yuan and Changhu Wang and Ping Luo}, + journal = {arXiv preprint arXiv:2011.12450}, + year = {2020} +} +``` + +## Results and Models + +| Model | Backbone | Style | Lr schd | Number of Proposals |Multi-Scale| RandomCrop | box AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:-------: |:-------: |:---------:|:------:|:------:|:--------:| +| Sparse R-CNN | R-50-FPN | pytorch | 1x | 100 | False | False | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco/sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco/sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.log.json) | +| Sparse R-CNN | R-50-FPN | pytorch | 3x | 100 | True | False | 42.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco_20201218_154234-7bc5c054.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco_20201218_154234-7bc5c054.log.json) | +| Sparse R-CNN | R-50-FPN | pytorch | 3x | 300 | True | True | 45.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_024605-9fe92701.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_024605-9fe92701.log.json) | +| Sparse R-CNN | R-101-FPN | pytorch | 3x | 100 | True | False | 44.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco_20201223_121552-6c46c9d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco_20201223_121552-6c46c9d6.log.json) | +| Sparse R-CNN | R-101-FPN | pytorch | 3x | 300 | True | True | 46.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_023452-c23c3564.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_023452-c23c3564.log.json) | + +### Notes + +We observe about 0.3 AP noise especially when using ResNet-101 as the backbone. diff --git a/object_detection/configs/sparse_rcnn/metafile.yml b/object_detection/configs/sparse_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..bb1273eccd7541d1fcaa958c9aae276322c4d193 --- /dev/null +++ b/object_detection/configs/sparse_rcnn/metafile.yml @@ -0,0 +1,80 @@ +Collections: + - Name: Sparse R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - Sparse R-CNN + Paper: + URL: https://arxiv.org/abs/2011.12450 + Title: 'Sparse R-CNN: End-to-End Object Detection with Learnable Proposals' + README: configs/sparse_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.9.0/mmdet/models/detectors/sparse_rcnn.py#L6 + Version: v2.9.0 + +Models: + - Name: sparse_rcnn_r50_fpn_1x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco/sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.pth + + - Name: sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco_20201218_154234-7bc5c054.pth + + - Name: sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_024605-9fe92701.pth + + - Name: sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco_20201223_121552-6c46c9d6.pth + + - Name: sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_023452-c23c3564.pth diff --git a/object_detection/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/object_detection/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..de323bdfaad7a092373da57d8f5ce99441bd48cf --- /dev/null +++ b/object_detection/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py b/object_detection/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ab4c5f68178a55d89a74bfa2911d48befb8869f8 --- /dev/null +++ b/object_detection/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py b/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b383ee48598c9ae73c6f44dbb539cdfa6c052d80 --- /dev/null +++ b/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,95 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +num_stages = 6 +num_proposals = 100 +model = dict( + type='SparseRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=0, + add_extra_convs='on_input', + num_outs=4), + rpn_head=dict( + type='EmbeddingRPNHead', + num_proposals=num_proposals, + proposal_feature_channel=256), + roi_head=dict( + type='SparseRoIHead', + num_stages=num_stages, + stage_loss_weights=[1] * num_stages, + proposal_feature_channel=256, + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='DIIHead', + num_classes=80, + num_ffn_fcs=2, + num_heads=8, + num_cls_fcs=1, + num_reg_fcs=3, + feedforward_channels=2048, + in_channels=256, + dropout=0.0, + ffn_act_cfg=dict(type='ReLU', inplace=True), + dynamic_conv_cfg=dict( + type='DynamicConv', + in_channels=256, + feat_channels=64, + out_channels=256, + input_feat_shape=7, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN')), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=2.0), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=False, + target_means=[0., 0., 0., 0.], + target_stds=[0.5, 0.5, 1., 1.])) for _ in range(num_stages) + ]), + # training and testing settings + train_cfg=dict( + rpn=None, + rcnn=[ + dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='FocalLossCost', weight=2.0), + reg_cost=dict(type='BBoxL1Cost', weight=5.0), + iou_cost=dict(type='IoUCost', iou_mode='giou', + weight=2.0)), + sampler=dict(type='PseudoSampler'), + pos_weight=1) for _ in range(num_stages) + ]), + test_cfg=dict(rpn=None, rcnn=dict(max_per_img=num_proposals))) + +# optimizer +optimizer = dict(_delete_=True, type='AdamW', lr=0.000025, weight_decay=0.0001) +optimizer_config = dict(_delete_=True, grad_clip=dict(max_norm=1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..36f1d62eba62bb9c3266864cd4250caedea95a21 --- /dev/null +++ b/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,52 @@ +_base_ = './sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py' +num_proposals = 300 +model = dict( + rpn_head=dict(num_proposals=num_proposals), + test_cfg=dict( + _delete_=True, rpn=None, rcnn=dict(max_per_img=num_proposals))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py b/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2fa2a807190427c857ddbea8ed7efd9434e5ef0f --- /dev/null +++ b/object_detection/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,23 @@ +_base_ = './sparse_rcnn_r50_fpn_1x_coco.py' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +min_values = (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, value) for value in min_values], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] + +data = dict(train=dict(pipeline=train_pipeline)) +lr_config = dict(policy='step', step=[27, 33]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/ssd/README.md b/object_detection/configs/ssd/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f668cd5a4234c766042b8f65e13f7d7c065e7e2a --- /dev/null +++ b/object_detection/configs/ssd/README.md @@ -0,0 +1,66 @@ +# SSD: Single Shot MultiBox Detector + +## Abstract + + + +We present a method for detecting objects in images using a single deep neural network. Our approach, named SSD, discretizes the output space of bounding boxes into a set of default boxes over different aspect ratios and scales per feature map location. At prediction time, the network generates scores for the presence of each object category in each default box and produces adjustments to the box to better match the object shape. Additionally, the network combines predictions from multiple feature maps with different resolutions to naturally handle objects of various sizes. Our SSD model is simple relative to methods that require object proposals because it completely eliminates proposal generation and subsequent pixel or feature resampling stage and encapsulates all computation in a single network. This makes SSD easy to train and straightforward to integrate into systems that require a detection component. Experimental results on the PASCAL VOC, MS COCO, and ILSVRC datasets confirm that SSD has comparable accuracy to methods that utilize an additional object proposal step and is much faster, while providing a unified framework for both training and inference. Compared to other single stage methods, SSD has much better accuracy, even with a smaller input image size. For 300×300 input, SSD achieves 72.1% mAP on VOC2007 test at 58 FPS on a Nvidia Titan X and for 500×500 input, SSD achieves 75.1% mAP, outperforming a comparable state of the art Faster R-CNN model. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{Liu_2016, + title={SSD: Single Shot MultiBox Detector}, + journal={ECCV}, + author={Liu, Wei and Anguelov, Dragomir and Erhan, Dumitru and Szegedy, Christian and Reed, Scott and Fu, Cheng-Yang and Berg, Alexander C.}, + year={2016}, +} +``` + +## Results and models of SSD + +| Backbone | Size | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------: | :---: | :---: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| VGG16 | 300 | caffe | 120e | 9.9 | 43.7 | 25.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ssd/ssd300_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20210803_015428-d231a06e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20210803_015428.log.json) | +| VGG16 | 512 | caffe | 120e | 19.4 | 30.7 | 29.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ssd/ssd512_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20210803_022849-0a47a1ca.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20210803_022849.log.json) | + +## Results and models of SSD-Lite + +| Backbone | Size | Training from scratch | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------------: | :---: | :-------------------: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| MobileNetV2 | 320 | yes | 600e | 4.0 | 69.9 | 21.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ssd/ssdlite_mobilenetv2_scratch_600e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_scratch_600e_coco_20210629_110627-974d9307.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_scratch_600e_coco_20210629_110627.log.json) | + +## Notice + +### Compatibility + +In v2.14.0, [PR5291](https://github.com/open-mmlab/mmdetection/pull/5291) refactored SSD neck and head for more +flexible usage. If users want to use the SSD checkpoint trained in the older versions, we provide a scripts +`tools/model_converters/upgrade_ssd_version.py` to convert the model weights. + +```bash +python tools/model_converters/upgrade_ssd_version.py ${OLD_MODEL_PATH} ${NEW_MODEL_PATH} + +``` + +- OLD_MODEL_PATH: the path to load the old version SSD model. +- NEW_MODEL_PATH: the path to save the converted model weights. + +### SSD-Lite training settings + +There are some differences between our implementation of MobileNetV2 SSD-Lite and the one in [TensorFlow 1.x detection model zoo](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1_detection_zoo.md) . + +1. Use 320x320 as input size instead of 300x300. +2. The anchor sizes are different. +3. The C4 feature map is taken from the last layer of stage 4 instead of the middle of the block. +4. The model in TensorFlow1.x is trained on coco 2014 and validated on coco minival2014, but we trained and validated the model on coco 2017. The mAP on val2017 is usually a little lower than minival2014 (refer to the results in TensorFlow Object Detection API, e.g., MobileNetV2 SSD gets 22 mAP on minival2014 but 20.2 mAP on val2017). diff --git a/object_detection/configs/ssd/metafile.yml b/object_detection/configs/ssd/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..b9ee79cd7b175eb4ee2c2306afd6eabea0f098f2 --- /dev/null +++ b/object_detection/configs/ssd/metafile.yml @@ -0,0 +1,78 @@ +Collections: + - Name: SSD + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - VGG + Paper: + URL: https://arxiv.org/abs/1512.02325 + Title: 'SSD: Single Shot MultiBox Detector' + README: configs/ssd/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.14.0/mmdet/models/dense_heads/ssd_head.py#L16 + Version: v2.14.0 + +Models: + - Name: ssd300_coco + In Collection: SSD + Config: configs/ssd/ssd300_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 22.88 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (300, 300) + Epochs: 120 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 25.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20210803_015428-d231a06e.pth + + - Name: ssd512_coco + In Collection: SSD + Config: configs/ssd/ssd512_coco.py + Metadata: + Training Memory (GB): 19.4 + inference time (ms/im): + - value: 32.57 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (512, 512) + Epochs: 120 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 29.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20210803_022849-0a47a1ca.pth + + - Name: ssdlite_mobilenetv2_scratch_600e_coco + In Collection: SSD + Config: configs/ssd/ssdlite_mobilenetv2_scratch_600e_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 14.3 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (320, 320) + Epochs: 600 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 21.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ssd/ssdlite_mobilenetv2_scratch_600e_coco/ssdlite_mobilenetv2_scratch_600e_coco_20210629_110627-974d9307.pth diff --git a/object_detection/configs/ssd/ssd300_coco.py b/object_detection/configs/ssd/ssd300_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2884658b493260c03f74927b87c53852f568cbe6 --- /dev/null +++ b/object_detection/configs/ssd/ssd300_coco.py @@ -0,0 +1,66 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) +custom_hooks = [ + dict(type='NumClassCheckHook'), + dict(type='CheckInvalidLossHook', interval=50, priority='VERY_LOW') +] diff --git a/object_detection/configs/ssd/ssd512_coco.py b/object_detection/configs/ssd/ssd512_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..82271c635ebd7822e39f54d61241f000adbcd4d2 --- /dev/null +++ b/object_detection/configs/ssd/ssd512_coco.py @@ -0,0 +1,79 @@ +_base_ = 'ssd300_coco.py' +input_size = 512 +model = dict( + neck=dict( + out_channels=(512, 1024, 512, 256, 256, 256, 256), + level_strides=(2, 2, 2, 2, 1), + level_paddings=(1, 1, 1, 1, 1), + last_kernel_size=4), + bbox_head=dict( + in_channels=(512, 1024, 512, 256, 256, 256, 256), + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.1, 0.9), + strides=[8, 16, 32, 64, 128, 256, 512], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2]]))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(512, 512), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(512, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) +custom_hooks = [ + dict(type='NumClassCheckHook'), + dict(type='CheckInvalidLossHook', interval=50, priority='VERY_LOW') +] diff --git a/object_detection/configs/ssd/ssdlite_mobilenetv2_scratch_600e_coco.py b/object_detection/configs/ssd/ssdlite_mobilenetv2_scratch_600e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c22d0a79c98d77d0438c7e4b9411d5c0d78872b8 --- /dev/null +++ b/object_detection/configs/ssd/ssdlite_mobilenetv2_scratch_600e_coco.py @@ -0,0 +1,145 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] + +model = dict( + type='SingleStageDetector', + backbone=dict( + type='MobileNetV2', + out_indices=(4, 7), + norm_cfg=dict(type='BN', eps=0.001, momentum=0.03), + init_cfg=dict(type='TruncNormal', layer='Conv2d', std=0.03)), + neck=dict( + type='SSDNeck', + in_channels=(96, 1280), + out_channels=(96, 1280, 512, 256, 256, 128), + level_strides=(2, 2, 2, 2), + level_paddings=(1, 1, 1, 1), + l2_norm_scale=None, + use_depthwise=True, + norm_cfg=dict(type='BN', eps=0.001, momentum=0.03), + act_cfg=dict(type='ReLU6'), + init_cfg=dict(type='TruncNormal', layer='Conv2d', std=0.03)), + bbox_head=dict( + type='SSDHead', + in_channels=(96, 1280, 512, 256, 256, 128), + num_classes=80, + use_depthwise=True, + norm_cfg=dict(type='BN', eps=0.001, momentum=0.03), + act_cfg=dict(type='ReLU6'), + init_cfg=dict(type='Normal', layer='Conv2d', std=0.001), + + # set anchor size manually instead of using the predefined + # SSD300 setting. + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + strides=[16, 32, 64, 107, 160, 320], + ratios=[[2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3]], + min_sizes=[48, 100, 150, 202, 253, 304], + max_sizes=[100, 150, 202, 253, 304, 320]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2])), + # model training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False), + test_cfg=dict( + nms_pre=1000, + nms=dict(type='nms', iou_threshold=0.45), + min_bbox_size=0, + score_thr=0.02, + max_per_img=200)) +cudnn_benchmark = True + +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(320, 320), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=320), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(320, 320), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=320), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=24, + workers_per_gpu=4, + train=dict( + _delete_=True, + type='RepeatDataset', # use RepeatDataset to speed up training + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +optimizer = dict(type='SGD', lr=0.015, momentum=0.9, weight_decay=4.0e-5) +optimizer_config = dict(grad_clip=None) + +# learning policy +lr_config = dict( + policy='CosineAnnealing', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + min_lr=0) +runner = dict(type='EpochBasedRunner', max_epochs=120) + +# Avoid evaluation and saving weights too frequently +evaluation = dict(interval=5, metric='bbox') +checkpoint_config = dict(interval=5) +custom_hooks = [ + dict(type='NumClassCheckHook'), + dict(type='CheckInvalidLossHook', interval=50, priority='VERY_LOW') +] diff --git a/object_detection/configs/strong_baselines/README.md b/object_detection/configs/strong_baselines/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5ada104bbe2259e61de7a86182cc91f6a95b21e1 --- /dev/null +++ b/object_detection/configs/strong_baselines/README.md @@ -0,0 +1,18 @@ +# Strong Baselines + +We train Mask R-CNN with large-scale jitter and longer schedule as strong baselines. +The modifications follow those in [Detectron2](https://github.com/facebookresearch/detectron2/tree/master/configs/new_baselines). + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | pytorch | 50e | | | | | [config](./mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_50e_coco.py) | [model]() | [log]() | +| R-50-FPN | pytorch | 100e | | | | | [config](./mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py) | [model]() | [log]() | +| R-50-FPN | caffe | 100e | | | 44.7 | 40.4 | [config](./mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py) | [model]() | [log]() | +| R-50-FPN | caffe | 400e | | | | | [config](./mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_400e_coco.py) | [model]() | [log]() | + +## Notice + +When using large-scale jittering, there are sometimes empty proposals in the box and mask heads during training. +This requires MMSyncBN that allows empty tensors. Therefore, please use mmcv-full>=1.3.14 to train models supported in this directory. diff --git a/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py b/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a40d6a036508c2ca9188caeda94b4ee0aca6c8b9 --- /dev/null +++ b/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py @@ -0,0 +1,80 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../common/lsj_100e_coco_instance.py' +] + +norm_cfg = dict(type='SyncBN', requires_grad=True) +# Use MMSyncBN that handles empty tensor in head. It can be changed to +# SyncBN after https://github.com/pytorch/pytorch/issues/36530 is fixed +# Requires MMCV-full after https://github.com/open-mmlab/mmcv/pull/1205. +head_norm_cfg = dict(type='MMSyncBN', requires_grad=True) +model = dict( + backbone=dict( + frozen_stages=-1, + norm_eval=False, + norm_cfg=norm_cfg, + init_cfg=None, + style='caffe'), + neck=dict(norm_cfg=norm_cfg), + rpn_head=dict(num_convs=2), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=head_norm_cfg), + mask_head=dict(norm_cfg=head_norm_cfg))) + +file_client_args = dict(backend='disk') +# file_client_args = dict( +# backend='petrel', +# path_mapping=dict({ +# './data/': 's3://openmmlab/datasets/detection/', +# 'data/': 's3://openmmlab/datasets/detection/' +# })) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +image_size = (1024, 1024) +train_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=image_size, + ratio_range=(0.1, 2.0), + multiscale_mode='range', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=image_size, + recompute_bbox=True, + allow_negative_crop=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(1e-2, 1e-2)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=image_size), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py b/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..31824eb50067c6b2cab49f3e6eebfa5f02fe592d --- /dev/null +++ b/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py @@ -0,0 +1,2 @@ +_base_ = 'mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py' +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_400e_coco.py b/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_400e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1211925dea4e27db833f06d52367791ac7125033 --- /dev/null +++ b/object_detection/configs/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_400e_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py' + +# Use RepeatDataset to speed up training +# change repeat time from 4 (for 100 epochs) to 16 (for 400 epochs) +data = dict(train=dict(times=4 * 4)) +lr_config = dict(warmup_iters=500 * 4) diff --git a/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py b/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4a15d698b672da57e1bd866189e6b75785fbad8a --- /dev/null +++ b/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py @@ -0,0 +1,22 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../common/lsj_100e_coco_instance.py' +] + +norm_cfg = dict(type='SyncBN', requires_grad=True) +# Use MMSyncBN that handles empty tensor in head. It can be changed to +# SyncBN after https://github.com/pytorch/pytorch/issues/36530 is fixed +# Requires MMCV-full after https://github.com/open-mmlab/mmcv/pull/1205. +head_norm_cfg = dict(type='MMSyncBN', requires_grad=True) +model = dict( + # the model is trained from scratch, so init_cfg is None + backbone=dict( + frozen_stages=-1, norm_eval=False, norm_cfg=norm_cfg, init_cfg=None), + neck=dict(norm_cfg=norm_cfg), + rpn_head=dict(num_convs=2), # leads to 0.1+ mAP + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=head_norm_cfg), + mask_head=dict(norm_cfg=head_norm_cfg))) diff --git a/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py b/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7b97960a878e4f0649c34dc7c00c99516baa731a --- /dev/null +++ b/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py @@ -0,0 +1,3 @@ +_base_ = 'mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py' +# use FP16 +fp16 = dict(loss_scale=512.) diff --git a/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_50e_coco.py b/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..922579a184a5a16c8e8263d50b39be4d99de8a90 --- /dev/null +++ b/object_detection/configs/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_50e_coco.py @@ -0,0 +1,5 @@ +_base_ = 'mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py' + +# Use RepeatDataset to speed up training +# change repeat time from 4 (for 100 epochs) to 2 (for 50 epochs) +data = dict(train=dict(times=2)) diff --git a/object_detection/configs/swin/README.md b/object_detection/configs/swin/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9a4ef02dc289918da92ea3d2b6eb6da4e3deaeda --- /dev/null +++ b/object_detection/configs/swin/README.md @@ -0,0 +1,44 @@ +# Swin Transformer: Hierarchical Vision Transformer using Shifted Windows + +## Abstract + + + +This paper presents a new vision Transformer, called Swin Transformer, that capably serves as a general-purpose backbone for computer vision. Challenges in adapting Transformer from language to vision arise from differences between the two domains, such as large variations in the scale of visual entities and the high resolution of pixels in images compared to words in text. To address these differences, we propose a hierarchical Transformer whose representation is computed with Shifted windows. The shifted windowing scheme brings greater efficiency by limiting self-attention computation to non-overlapping local windows while also allowing for cross-window connection. This hierarchical architecture has the flexibility to model at various scales and has linear computational complexity with respect to image size. These qualities of Swin Transformer make it compatible with a broad range of vision tasks, including image classification (87.3 top-1 accuracy on ImageNet-1K) and dense prediction tasks such as object detection (58.7 box AP and 51.1 mask AP on COCO test-dev) and semantic segmentation (53.5 mIoU on ADE20K val). Its performance surpasses the previous state-of-the-art by a large margin of +2.7 box AP and +2.6 mask AP on COCO, and +3.2 mIoU on ADE20K, demonstrating the potential of Transformer-based models as vision backbones. The hierarchical design and the shifted window approach also prove beneficial for all-MLP architectures. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{liu2021Swin, + title={Swin Transformer: Hierarchical Vision Transformer using Shifted Windows}, + author={Liu, Ze and Lin, Yutong and Cao, Yue and Hu, Han and Wei, Yixuan and Zhang, Zheng and Lin, Stephen and Guo, Baining}, + journal={arXiv preprint arXiv:2103.14030}, + year={2021} +} +``` + +## Results and models + +### Mask R-CNN + +| Backbone | Pretrain | Lr schd | Multi-scale crop | FP16 |Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :------: | :---------: | :-----: | :-------------------:| :------: |:------: | :------------: | :----: | :-----: | :------: | :--------: | +| Swin-T | ImageNet-1K | 1x | no | no | 7.6 | | 42.7 | 39.3 | [config](./mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco/mask_rcnn_swin-t-p4-w7_fpn_1x_coco_20210902_120937-9d6b7cfa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco/mask_rcnn_swin-t-p4-w7_fpn_1x_coco_20210902_120937.log.json) | +| Swin-T | ImageNet-1K | 3x | yes | no | 10.2 | | 46.0 | 41.6 | [config](./mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco_20210906_131725-bacf6f7b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco_20210906_131725.log.json) | +| Swin-T | ImageNet-1K | 3x | yes | yes | 7.8 | | 46.0 | 41.7 | [config](./mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco_20210908_165006-90a4008c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco_20210908_165006.log.json) | +| Swin-S | ImageNet-1K | 3x | yes | yes | 11.9 | | 48.2 | 43.2 | [config](./mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco_20210903_104808-b92c91f1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco_20210903_104808.log.json) | + +### Notice +Please follow the example +of `retinanet_swin-t-p4-w7_fpn_1x_coco.py` when you want to combine Swin Transformer with +the one-stage detector. Because there is a layer norm at the outs of Swin Transformer, you must set `start_level` as 0 in FPN, so we have to set the `out_indices` of backbone as `[1,2,3]`. diff --git a/object_detection/configs/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco.py b/object_detection/configs/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..15d50a0228b9c4442596a440814109844ab6cfed --- /dev/null +++ b/object_detection/configs/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py' +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_small_patch4_window7_224.pth' # noqa +model = dict( + backbone=dict( + depths=[2, 2, 18, 2], + init_cfg=dict(type='Pretrained', checkpoint=pretrained))) diff --git a/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py b/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..337e85818c5d3bd30147d636d4a90dc8d64184fc --- /dev/null +++ b/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth' # noqa +model = dict( + type='MaskRCNN', + backbone=dict( + _delete_=True, + type='SwinTransformer', + embed_dims=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4, + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + patch_norm=True, + out_indices=(0, 1, 2, 3), + with_cp=False, + convert_weights=True, + init_cfg=dict(type='Pretrained', checkpoint=pretrained)), + neck=dict(in_channels=[96, 192, 384, 768])) + +optimizer = dict( + _delete_=True, + type='AdamW', + lr=0.0001, + betas=(0.9, 0.999), + weight_decay=0.05, + paramwise_cfg=dict( + custom_keys={ + 'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.) + })) +lr_config = dict(warmup_iters=1000, step=[8, 11]) +runner = dict(max_epochs=12) diff --git a/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py b/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2be31143df5dcfbe8a9582d556f398ccda293464 --- /dev/null +++ b/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py @@ -0,0 +1,3 @@ +_base_ = './mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py' +# you need to set mode='dynamic' if you are using pytorch<=1.5.0 +fp16 = dict(loss_scale=dict(init_scale=512)) diff --git a/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py b/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2612f6e331e4fafe87945a990801122b7e620f69 --- /dev/null +++ b/object_detection/configs/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py @@ -0,0 +1,91 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth' # noqa + +model = dict( + type='MaskRCNN', + backbone=dict( + _delete_=True, + type='SwinTransformer', + embed_dims=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4, + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + patch_norm=True, + out_indices=(0, 1, 2, 3), + with_cp=False, + convert_weights=True, + init_cfg=dict(type='Pretrained', checkpoint=pretrained)), + neck=dict(in_channels=[96, 192, 384, 768])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict( + _delete_=True, + type='AdamW', + lr=0.0001, + betas=(0.9, 0.999), + weight_decay=0.05, + paramwise_cfg=dict( + custom_keys={ + 'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.) + })) +lr_config = dict(warmup_iters=1000, step=[27, 33]) +runner = dict(max_epochs=36) diff --git a/object_detection/configs/swin/metafile.yml b/object_detection/configs/swin/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..b265afe36bb8588cb6697906c550af1f6440dae8 --- /dev/null +++ b/object_detection/configs/swin/metafile.yml @@ -0,0 +1,85 @@ +Collections: + - Name: Swin Transformer + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + Training Resources: 8x V100 GPUs + Architecture: + - Swin Transformer + Paper: + URL: https://arxiv.org/abs/2107.08430 + Title: 'Swin Transformer: Hierarchical Vision Transformer using Shifted Windows' + README: configs/swin/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.16.0/mmdet/models/backbones/swin.py#L465 + Version: v2.16.0 + +Models: + - Name: mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco + In Collection: Swin Transformer + Config: configs/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco.py + Metadata: + Training Memory (GB): 11.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 43.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco_20210903_104808-b92c91f1.pth + + - Name: mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco + In Collection: Swin Transformer + Config: configs/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py + Metadata: + Training Memory (GB): 10.2 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco_20210906_131725-bacf6f7b.pth + + - Name: mask_rcnn_swin-t-p4-w7_fpn_1x_coco + In Collection: Swin Transformer + Config: configs/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco/mask_rcnn_swin-t-p4-w7_fpn_1x_coco_20210902_120937-9d6b7cfa.pth + + - Name: mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco + In Collection: Swin Transformer + Config: configs/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco_20210908_165006-90a4008c.pth diff --git a/object_detection/configs/swin/retinanet_swin-t-p4-w7_fpn_1x_coco.py b/object_detection/configs/swin/retinanet_swin-t-p4-w7_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9d620fd980ad527d1a594b3e2e5f756e0b7d7071 --- /dev/null +++ b/object_detection/configs/swin/retinanet_swin-t-p4-w7_fpn_1x_coco.py @@ -0,0 +1,29 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth' # noqa +model = dict( + backbone=dict( + _delete_=True, + type='SwinTransformer', + embed_dims=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4, + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + patch_norm=True, + out_indices=(1, 2, 3), + # Please only add indices that would be used + # in FPN, otherwise some parameter will not be used + with_cp=False, + init_cfg=dict(type='Pretrained', checkpoint=pretrained)), + neck=dict(in_channels=[192, 384, 768], start_level=0, num_outs=5)) + +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/object_detection/configs/tood/README.md b/object_detection/configs/tood/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b1522e78565969262190baba6119ba7a37aa9d41 --- /dev/null +++ b/object_detection/configs/tood/README.md @@ -0,0 +1,44 @@ +# TOOD: Task-aligned One-stage Object Detection + +## Abstract + + + +One-stage object detection is commonly implemented by optimizing two sub-tasks: object classification and localization, using heads with two parallel branches, which might lead to a certain level of spatial misalignment in predictions between the two tasks. In this work, we propose a Task-aligned One-stage Object Detection (TOOD) that explicitly aligns the two tasks in a learning-based manner. First, we design a novel Task-aligned Head (T-Head) which offers a better balance between learning task-interactive and task-specific features, as well as a greater flexibility to learn the alignment via a task-aligned predictor. Second, we propose Task Alignment Learning (TAL) to explicitly pull closer (or even unify) the optimal anchors for the two tasks during training via a designed sample assignment scheme and a task-aligned loss. Extensive experiments are conducted on MS-COCO, where TOOD achieves a 51.1 AP at single-model single-scale testing. This surpasses the recent one-stage detectors by a large margin, such as ATSS (47.7 AP), GFL (48.2 AP), and PAA (49.0 AP), with fewer parameters and FLOPs. Qualitative results also demonstrate the effectiveness of TOOD for better aligning the tasks of object classification and localization. + + +
+ +
+ + + + +## Citation + + + +```latex +@inproceedings{feng2021tood, + title={TOOD: Task-aligned One-stage Object Detection}, + author={Feng, Chengjian and Zhong, Yujie and Gao, Yu and Scott, Matthew R and Huang, Weilin}, + booktitle={ICCV}, + year={2021} +} +``` + +## Results and Models + +| Backbone | Style | Anchor Type | Lr schd | Multi-scale Training| Mem (GB)| Inf time (fps) | box AP | Config | Download | +|:-----------------:|:-------:|:------------:|:-------:|:-------------------:|:-------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | Anchor-free | 1x | N | 4.1 | | 42.4 | [config](./tood_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_1x_coco/tood_r50_fpn_1x_coco_20211210_103425-20e20746.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_1x_coco/tood_r50_fpn_1x_coco_20211210_103425.log) | +| R-50 | pytorch | Anchor-based | 1x | N | 4.1 | | 42.4 | [config](./tood_r50_fpn_anchor_based_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_anchor_based_1x_coco/tood_r50_fpn_anchor_based_1x_coco_20211214_100105-b776c134.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_anchor_based_1x_coco/tood_r50_fpn_anchor_based_1x_coco_20211214_100105.log) | +| R-50 | pytorch | Anchor-free | 2x | Y | 4.1 | | 44.5 | [config](./tood_r50_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_mstrain_2x_coco/tood_r50_fpn_mstrain_2x_coco_20211210_144231-3b23174c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_mstrain_2x_coco/tood_r50_fpn_mstrain_2x_coco_20211210_144231.log) | +| R-101 | pytorch | Anchor-free | 2x | Y | 6.0 | | 46.1 | [config](./tood_r101_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r101_fpn_mstrain_2x_coco/tood_r101_fpn_mstrain_2x_coco_20211210_144232-a18f53c8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r101_fpn_mstrain_2x_coco/tood_r101_fpn_mstrain_2x_coco_20211210_144232.log) | +| R-101-dcnv2 | pytorch | Anchor-free | 2x | Y | 6.2 | | 49.3 | [config](./tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20211210_213728-4a824142.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20211210_213728.log) | +| X-101-64x4d | pytorch | Anchor-free | 2x | Y | 10.2 | | 47.6 | [config](./tood_x101_64x4d_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_x101_64x4d_fpn_mstrain_2x_coco/tood_x101_64x4d_fpn_mstrain_2x_coco_20211211_003519-a4f36113.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tood/tood_x101_64x4d_fpn_mstrain_2x_coco/tood_x101_64x4d_fpn_mstrain_2x_coco_20211211_003519.log) | +| X-101-64x4d-dcnv2 | pytorch | Anchor-free | 2x | Y | | | | [config](./tood_x101_64x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py) | [model]() | [log]() | + +[1] *1x and 2x mean the model is trained for 90K and 180K iterations, respectively.* \ +[2] *All results are obtained with a single model and without any test time data augmentation such as multi-scale, flipping and etc..* \ +[3] *`dcnv2` denotes deformable convolutional networks v2.* \ diff --git a/object_detection/configs/tood/metafile.yml b/object_detection/configs/tood/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..27a0f8dbfc59614ffd39f22d49ac3eabce8e3b62 --- /dev/null +++ b/object_detection/configs/tood/metafile.yml @@ -0,0 +1,95 @@ +Collections: + - Name: TOOD + Metadata: + Training Data: COCO + Training Techniques: + - SGD + Training Resources: 8x V100 GPUs + Architecture: + - TOOD + Paper: + URL: https://arxiv.org/abs/2108.07755 + Title: 'TOOD: Task-aligned One-stage Object Detection' + README: configs/tood/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.20.0/mmdet/models/detectors/tood.py#L7 + Version: v2.20.0 + +Models: + - Name: tood_r101_fpn_mstrain_2x_coco + In Collection: TOOD + Config: configs/tood/tood_r101_fpn_mstrain_2x_coco.py + Metadata: + Training Memory (GB): 6.0 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r101_fpn_mstrain_2x_coco/tood_r101_fpn_mstrain_2x_coco_20211210_144232-a18f53c8.pth + + - Name: tood_x101_64x4d_fpn_mstrain_2x_coco + In Collection: TOOD + Config: configs/tood/tood_x101_64x4d_fpn_mstrain_2x_coco.py + Metadata: + Training Memory (GB): 10.2 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tood/tood_x101_64x4d_fpn_mstrain_2x_coco/tood_x101_64x4d_fpn_mstrain_2x_coco_20211211_003519-a4f36113.pth + + - Name: tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco + In Collection: TOOD + Config: configs/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py + Metadata: + Training Memory (GB): 6.2 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20211210_213728-4a824142.pth + + - Name: tood_r50_fpn_anchor_based_1x_coco + In Collection: TOOD + Config: configs/tood/tood_r50_fpn_anchor_based_1x_coco.py + Metadata: + Training Memory (GB): 4.1 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_anchor_based_1x_coco/tood_r50_fpn_anchor_based_1x_coco_20211214_100105-b776c134.pth + + - Name: tood_r50_fpn_1x_coco + In Collection: TOOD + Config: configs/tood/tood_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.1 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_1x_coco/tood_r50_fpn_1x_coco_20211210_103425-20e20746.pth + + - Name: tood_r50_fpn_mstrain_2x_coco + In Collection: TOOD + Config: configs/tood/tood_r50_fpn_mstrain_2x_coco.py + Metadata: + Training Memory (GB): 4.1 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tood/tood_r50_fpn_mstrain_2x_coco/tood_r50_fpn_mstrain_2x_coco_20211210_144231-3b23174c.pth diff --git a/object_detection/configs/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py b/object_detection/configs/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c7f1bbcbaf17381f6917f5fe7dda8d5b40dd9170 --- /dev/null +++ b/object_detection/configs/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './tood_r101_fpn_mstrain_2x_coco.py' + +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deformable_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True)), + bbox_head=dict(num_dcn=2)) diff --git a/object_detection/configs/tood/tood_r101_fpn_mstrain_2x_coco.py b/object_detection/configs/tood/tood_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d9d2c32d8ceba33a8efa0ddd7074426480301512 --- /dev/null +++ b/object_detection/configs/tood/tood_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './tood_r50_fpn_mstrain_2x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/tood/tood_r50_fpn_1x_coco.py b/object_detection/configs/tood/tood_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..35a77a400e155c7e08253bb526b4592c2fca405c --- /dev/null +++ b/object_detection/configs/tood/tood_r50_fpn_1x_coco.py @@ -0,0 +1,74 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='TOOD', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='TOODHead', + num_classes=80, + in_channels=256, + stacked_convs=6, + feat_channels=256, + anchor_type='anchor_free', + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + initial_loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + activated=True, # use probability instead of logit as input + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + activated=True, # use probability instead of logit as input + beta=2.0, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)), + train_cfg=dict( + initial_epoch=4, + initial_assigner=dict(type='ATSSAssigner', topk=9), + assigner=dict(type='TaskAlignedAssigner', topk=13), + alpha=1, + beta=6, + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) + +# custom hooks +custom_hooks = [dict(type='SetEpochInfoHook')] diff --git a/object_detection/configs/tood/tood_r50_fpn_anchor_based_1x_coco.py b/object_detection/configs/tood/tood_r50_fpn_anchor_based_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c7fbf6aff197b821de07f8d4a73f9c72e5f76288 --- /dev/null +++ b/object_detection/configs/tood/tood_r50_fpn_anchor_based_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './tood_r50_fpn_1x_coco.py' +model = dict(bbox_head=dict(anchor_type='anchor_based')) diff --git a/object_detection/configs/tood/tood_r50_fpn_mstrain_2x_coco.py b/object_detection/configs/tood/tood_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..157d13a4a17b0aaae3faf23b70a5c7d64b682d32 --- /dev/null +++ b/object_detection/configs/tood/tood_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,22 @@ +_base_ = './tood_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/tood/tood_x101_64x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py b/object_detection/configs/tood/tood_x101_64x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..47c92695a92dae83217eaacb9788f88e6c801272 --- /dev/null +++ b/object_detection/configs/tood/tood_x101_64x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './tood_x101_64x4d_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deformable_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, False, True, True), + ), + bbox_head=dict(num_dcn=2)) diff --git a/object_detection/configs/tood/tood_x101_64x4d_fpn_mstrain_2x_coco.py b/object_detection/configs/tood/tood_x101_64x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..842f320e83966e9c8dbbf337cfcef2bcb8d782db --- /dev/null +++ b/object_detection/configs/tood/tood_x101_64x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './tood_r50_fpn_mstrain_2x_coco.py' + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/tridentnet/README.md b/object_detection/configs/tridentnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6c43a3b0e610e9fc3ee487bd6eaf247f24bc9acb --- /dev/null +++ b/object_detection/configs/tridentnet/README.md @@ -0,0 +1,42 @@ +# Scale-Aware Trident Networks for Object Detection + +## Abstract + + + +Scale variation is one of the key challenges in object detection. In this work, we first present a controlled experiment to investigate the effect of receptive fields for scale variation in object detection. Based on the findings from the exploration experiments, we propose a novel Trident Network (TridentNet) aiming to generate scale-specific feature maps with a uniform representational power. We construct a parallel multi-branch architecture in which each branch shares the same transformation parameters but with different receptive fields. Then, we adopt a scale-aware training scheme to specialize each branch by sampling object instances of proper scales for training. As a bonus, a fast approximation version of TridentNet could achieve significant improvements without any additional parameters and computational cost compared with the vanilla detector. On the COCO dataset, our TridentNet with ResNet-101 backbone achieves state-of-the-art single-model results of 48.4 mAP. + + +
+ +
+ + + + +## Citation + + + +``` +@InProceedings{li2019scale, + title={Scale-Aware Trident Networks for Object Detection}, + author={Li, Yanghao and Chen, Yuntao and Wang, Naiyan and Zhang, Zhaoxiang}, + journal={The International Conference on Computer Vision (ICCV)}, + year={2019} +} +``` + +## Results and models + +We reports the test results using only one branch for inference. + +| Backbone | Style | mstrain | Lr schd | Mem (GB) | Inf time (fps) | box AP | Download | +| :-------------: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :------: | +| R-50 | caffe | N | 1x | | | 37.7 |[model](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_1x_coco/tridentnet_r50_caffe_1x_coco_20201230_141838-2ec0b530.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_1x_coco/tridentnet_r50_caffe_1x_coco_20201230_141838.log.json) | +| R-50 | caffe | Y | 1x | | | 37.6 |[model](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco/tridentnet_r50_caffe_mstrain_1x_coco_20201230_141839-6ce55ccb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco/tridentnet_r50_caffe_mstrain_1x_coco_20201230_141839.log.json) | +| R-50 | caffe | Y | 3x | | | 40.3 |[model](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco/tridentnet_r50_caffe_mstrain_3x_coco_20201130_100539-46d227ba.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco/tridentnet_r50_caffe_mstrain_3x_coco_20201130_100539.log.json) | + +**Note** + +Similar to [Detectron2](https://github.com/facebookresearch/detectron2/tree/master/projects/TridentNet), we haven't implemented the Scale-aware Training Scheme in section 4.2 of the paper. diff --git a/object_detection/configs/tridentnet/metafile.yml b/object_detection/configs/tridentnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..2536f976fcf9d75744332b0040792f2e3b65b4cb --- /dev/null +++ b/object_detection/configs/tridentnet/metafile.yml @@ -0,0 +1,55 @@ +Collections: + - Name: TridentNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - TridentNet Block + Paper: + URL: https://arxiv.org/abs/1901.01892 + Title: 'Scale-Aware Trident Networks for Object Detection' + README: configs/tridentnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.8.0/mmdet/models/detectors/trident_faster_rcnn.py#L6 + Version: v2.8.0 + +Models: + - Name: tridentnet_r50_caffe_1x_coco + In Collection: TridentNet + Config: configs/tridentnet/tridentnet_r50_caffe_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_1x_coco/tridentnet_r50_caffe_1x_coco_20201230_141838-2ec0b530.pth + + - Name: tridentnet_r50_caffe_mstrain_1x_coco + In Collection: TridentNet + Config: configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco/tridentnet_r50_caffe_mstrain_1x_coco_20201230_141839-6ce55ccb.pth + + - Name: tridentnet_r50_caffe_mstrain_3x_coco + In Collection: TridentNet + Config: configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco/tridentnet_r50_caffe_mstrain_3x_coco_20201130_100539-46d227ba.pth diff --git a/object_detection/configs/tridentnet/tridentnet_r50_caffe_1x_coco.py b/object_detection/configs/tridentnet/tridentnet_r50_caffe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d779f75f8395c9d25345b936029ffc1628b5d4cb --- /dev/null +++ b/object_detection/configs/tridentnet/tridentnet_r50_caffe_1x_coco.py @@ -0,0 +1,55 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='TridentFasterRCNN', + backbone=dict( + type='TridentResNet', + trident_dilations=(1, 2, 3), + num_branch=3, + test_branch_idx=1, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict(type='TridentRoIHead', num_branch=3, test_branch_idx=1), + train_cfg=dict( + rpn_proposal=dict(max_per_img=500), + rcnn=dict( + sampler=dict(num=128, pos_fraction=0.5, + add_gt_as_proposals=False)))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py b/object_detection/configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c73d9eaa96c7f88dd33eb55f21848db2421bea1e --- /dev/null +++ b/object_detection/configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py @@ -0,0 +1,22 @@ +_base_ = 'tridentnet_r50_caffe_1x_coco.py' + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] + +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/object_detection/configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py b/object_detection/configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0f402826d3a22714078d8c50ed6bd8959018e4e7 --- /dev/null +++ b/object_detection/configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = 'tridentnet_r50_caffe_mstrain_1x_coco.py' + +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/object_detection/configs/vfnet/README.md b/object_detection/configs/vfnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e57a1beb3cf815f168bbfe977aac0b28e9331234 --- /dev/null +++ b/object_detection/configs/vfnet/README.md @@ -0,0 +1,53 @@ +# VarifocalNet: An IoU-aware Dense Object Detector + +## Abstract + + + +Accurately ranking the vast number of candidate detections is crucial for dense object detectors to achieve high performance. Prior work uses the classification score or a combination of classification and predicted localization scores to rank candidates. However, neither option results in a reliable ranking, thus degrading detection performance. In this paper, we propose to learn an Iou-aware Classification Score (IACS) as a joint representation of object presence confidence and localization accuracy. We show that dense object detectors can achieve a more accurate ranking of candidate detections based on the IACS. We design a new loss function, named Varifocal Loss, to train a dense object detector to predict the IACS, and propose a new star-shaped bounding box feature representation for IACS prediction and bounding box refinement. Combining these two new components and a bounding box refinement branch, we build an IoU-aware dense object detector based on the FCOS+ATSS architecture, that we call VarifocalNet or VFNet for short. Extensive experiments on MS COCO show that our VFNet consistently surpasses the strong baseline by ∼2.0 AP with different backbones. Our best model VFNet-X-1200 with Res2Net-101-DCN achieves a single-model single-scale AP of 55.1 on COCO test-dev, which is state-of-the-art among various object detectors. + + +
+ +
+ + + + +## Introduction + + + +**VarifocalNet (VFNet)** learns to predict the IoU-aware classification score which mixes the object presence confidence and localization accuracy together as the detection score for a bounding box. The learning is supervised by the proposed Varifocal Loss (VFL), based on a new star-shaped bounding box feature representation (the features at nine yellow sampling points). Given the new representation, the object localization accuracy is further improved by refining the initially regressed bounding box. The full paper is available at: [https://arxiv.org/abs/2008.13367](https://arxiv.org/abs/2008.13367). + + +## Citation + +```latex +@article{zhang2020varifocalnet, + title={VarifocalNet: An IoU-aware Dense Object Detector}, + author={Zhang, Haoyang and Wang, Ying and Dayoub, Feras and S{\"u}nderhauf, Niko}, + journal={arXiv preprint arXiv:2008.13367}, + year={2020} +} +``` + +## Results and Models + +| Backbone | Style | DCN | MS train | Lr schd |Inf time (fps) | box AP (val) | box AP (test-dev) | Config | Download | +|:------------:|:---------:|:-------:|:--------:|:-------:|:-------------:|:------------:|:-----------------:|:------:|:--------:| +| R-50 | pytorch | N | N | 1x | - | 41.6 | 41.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco_20201027-38db6f58.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco.json)| +| R-50 | pytorch | N | Y | 2x | - | 44.5 | 44.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco_20201027-7cc75bd2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco.json)| +| R-50 | pytorch | Y | Y | 2x | - | 47.8 | 48.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-6879c318.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| R-101 | pytorch | N | N | 1x | - | 43.0 | 43.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco_20201027pth-c831ece7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco.json)| +| R-101 | pytorch | N | Y | 2x | - | 46.2 | 46.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco_20201027pth-4a5d53f1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco.json)| +| R-101 | pytorch | Y | Y | 2x | - | 49.0 | 49.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-7729adb5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| X-101-32x4d | pytorch | Y | Y | 2x | - | 49.7 | 50.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-d300a6fc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| X-101-64x4d | pytorch | Y | Y | 2x | - | 50.4 | 50.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-b5f6da5e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| + +**Notes:** + +- The MS-train scale range is 1333x[480:960] (`range` mode) and the inference scale keeps 1333x800. +- DCN means using `DCNv2` in both backbone and head. +- Inference time will be updated soon. +- More results and pre-trained models can be found in [VarifocalNet-Github](https://github.com/hyz-xmaster/VarifocalNet) diff --git a/object_detection/configs/vfnet/metafile.yml b/object_detection/configs/vfnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..bcbe576fa6f229d04ebcf15e391782bedbbc8310 --- /dev/null +++ b/object_detection/configs/vfnet/metafile.yml @@ -0,0 +1,116 @@ +Collections: + - Name: VFNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - Varifocal Loss + Paper: + URL: https://arxiv.org/abs/2008.13367 + Title: 'VarifocalNet: An IoU-aware Dense Object Detector' + README: configs/vfnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.6.0/mmdet/models/detectors/vfnet.py#L6 + Version: v2.6.0 + +Models: + - Name: vfnet_r50_fpn_1x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco_20201027-38db6f58.pth + + - Name: vfnet_r50_fpn_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco_20201027-7cc75bd2.pth + + - Name: vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-6879c318.pth + + - Name: vfnet_r101_fpn_1x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco_20201027pth-c831ece7.pth + + - Name: vfnet_r101_fpn_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco_20201027pth-4a5d53f1.pth + + - Name: vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-7729adb5.pth + + - Name: vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-d300a6fc.pth + + - Name: vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-b5f6da5e.pth diff --git a/object_detection/configs/vfnet/vfnet_r101_fpn_1x_coco.py b/object_detection/configs/vfnet/vfnet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b296a07959e43517d792f36f356404a232fb0dc3 --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/vfnet/vfnet_r101_fpn_2x_coco.py b/object_detection/configs/vfnet/vfnet_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..27962f3a88d850edb38360b6988584f7438691b7 --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r101_fpn_2x_coco.py @@ -0,0 +1,8 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e438c247cf4c9c5b2b5aabffda535bec61d4a21e --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eae69a01e801ae0422cdb8f8e58fd02a1720fee9 --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..815a36e079111ee605c46d27bda9962dabdd6cdd --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,18 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/object_detection/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..58022e0eeac5fba20b2360e0578aa9b9c781f287 --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/object_detection/configs/vfnet/vfnet_r50_fpn_1x_coco.py b/object_detection/configs/vfnet/vfnet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7de64296cd78ce12a1d3df281bdffb8c393543be --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r50_fpn_1x_coco.py @@ -0,0 +1,107 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='VFNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='VFNetHead', + num_classes=80, + in_channels=256, + stacked_convs=3, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + center_sampling=False, + dcn_on_last_conv=False, + use_atss=True, + use_vfl=True, + loss_cls=dict( + type='VarifocalLoss', + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.5), + loss_bbox_refine=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +# data setting +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.1, + step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/object_detection/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..24d2093b8b537a365c3e07261921b120b422918c --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True)), + bbox_head=dict(dcn_on_last_conv=True)) diff --git a/object_detection/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6078bb98cacc04da23dcb7a661047902e0adefb3 --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/object_detection/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7efa0517eb72395a2ff24992318fcb4667fc033d --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..49a4312107d9ff045bc626802fa23cf01f54d10e --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/object_detection/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7e1ee429f3dbaa895018a1b280ff312d01965e03 --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py b/object_detection/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e51064e7ec003604edb99c2759b3f5fe4b95423e --- /dev/null +++ b/object_detection/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/object_detection/configs/wider_face/README.md b/object_detection/configs/wider_face/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2e6124c8b818cfc1fba3fc8f074425fb4038f19b --- /dev/null +++ b/object_detection/configs/wider_face/README.md @@ -0,0 +1,61 @@ +# WIDER FACE: A Face Detection Benchmark + +## Abstract + + + +Face detection is one of the most studied topics in the computer vision community. Much of the progresses have been made by the availability of face detection benchmark datasets. We show that there is a gap between current face detection performance and the real world requirements. To facilitate future face detection research, we introduce the WIDER FACE dataset, which is 10 times larger than existing datasets. The dataset contains rich annotations, including occlusions, poses, event categories, and face bounding boxes. Faces in the proposed dataset are extremely challenging due to large variations in scale, pose and occlusion, as shown in Fig. 1. Furthermore, we show that WIDER FACE dataset is an effective training source for face detection. We benchmark several representative detection systems, providing an overview of state-of-the-art performance and propose a solution to deal with large scale variation. Finally, we discuss common failure cases that worth to be further investigated. + + +
+ +
+ + + + +## Introduction + + + +To use the WIDER Face dataset you need to download it +and extract to the `data/WIDERFace` folder. Annotation in the VOC format +can be found in this [repo](https://github.com/sovrasov/wider-face-pascal-voc-annotations.git). +You should move the annotation files from `WIDER_train_annotations` and `WIDER_val_annotations` folders +to the `Annotation` folders inside the corresponding directories `WIDER_train` and `WIDER_val`. +Also annotation lists `val.txt` and `train.txt` should be copied to `data/WIDERFace` from `WIDER_train_annotations` and `WIDER_val_annotations`. +The directory should be like this: + +``` +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── WIDERFace +│ │ ├── WIDER_train +│ | │ ├──0--Parade +│ | │ ├── ... +│ | │ ├── Annotations +│ │ ├── WIDER_val +│ | │ ├──0--Parade +│ | │ ├── ... +│ | │ ├── Annotations +│ │ ├── val.txt +│ │ ├── train.txt + +``` + +After that you can train the SSD300 on WIDER by launching training with the `ssd300_wider_face.py` config or +create your own config based on the presented one. + +## Citation + +``` +@inproceedings{yang2016wider, + Author = {Yang, Shuo and Luo, Ping and Loy, Chen Change and Tang, Xiaoou}, + Booktitle = {IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + Title = {WIDER FACE: A Face Detection Benchmark}, + Year = {2016} +} +``` diff --git a/object_detection/configs/wider_face/ssd300_wider_face.py b/object_detection/configs/wider_face/ssd300_wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..5a3eb38df3dc75af176cc6972af88e76124ba4dc --- /dev/null +++ b/object_detection/configs/wider_face/ssd300_wider_face.py @@ -0,0 +1,18 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/wider_face.py', + '../_base_/default_runtime.py' +] +model = dict(bbox_head=dict(num_classes=1)) +# optimizer +optimizer = dict(type='SGD', lr=0.012, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.001, + step=[16, 20]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) +log_config = dict(interval=1) diff --git a/object_detection/configs/yolact/README.md b/object_detection/configs/yolact/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c4e390302f11427d3d7562c41895b94bfeff64d3 --- /dev/null +++ b/object_detection/configs/yolact/README.md @@ -0,0 +1,78 @@ +# **Y**ou **O**nly **L**ook **A**t **C**oefficien**T**s + +## Abstract + + + +We present a simple, fully-convolutional model for real-time instance segmentation that achieves 29.8 mAP on MS COCO at 33.5 fps evaluated on a single Titan Xp, which is significantly faster than any previous competitive approach. Moreover, we obtain this result after training on only one GPU. We accomplish this by breaking instance segmentation into two parallel subtasks: (1) generating a set of prototype masks and (2) predicting per-instance mask coefficients. Then we produce instance masks by linearly combining the prototypes with the mask coefficients. We find that because this process doesn't depend on repooling, this approach produces very high-quality masks and exhibits temporal stability for free. Furthermore, we analyze the emergent behavior of our prototypes and show they learn to localize instances on their own in a translation variant manner, despite being fully-convolutional. Finally, we also propose Fast NMS, a drop-in 12 ms faster replacement for standard NMS that only has a marginal performance penalty. + + +
+ +
+ + + + +## Introduction + + + +A simple, fully convolutional model for real-time instance segmentation. This is the code for our paper: + +- [YOLACT: Real-time Instance Segmentation](https://arxiv.org/abs/1904.02689) + + +For a real-time demo, check out our ICCV video: +[![IMAGE ALT TEXT HERE](https://img.youtube.com/vi/0pMfmo8qfpQ/0.jpg)](https://www.youtube.com/watch?v=0pMfmo8qfpQ) + +## Evaluation + +Here are our YOLACT models along with their FPS on a Titan Xp and mAP on COCO's `val`: + +| Image Size | GPU x BS | Backbone | *FPS | mAP | Weights | Configs | Download | +|:----------:|:--------:|:-------------:|:-----:|:----:|:-------:|:------:|:--------:| +| 550 | 1x8 | Resnet50-FPN | 42.5 | 29.0 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact/yolact_r50_1x8_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/yolact/yolact_r50_1x8_coco/yolact_r50_1x8_coco_20200908-f38d58df.pth) | +| 550 | 8x8 | Resnet50-FPN | 42.5 | 28.4 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact/yolact_r50_8x8_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolact/yolact_r50_8x8_coco/yolact_r50_8x8_coco_20200908-ca34f5db.pth) | +| 550 | 1x8 | Resnet101-FPN | 33.5 | 30.4 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact/yolact_r101_1x8_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolact/yolact_r101_1x8_coco/yolact_r101_1x8_coco_20200908-4cbe9101.pth) | + +*Note: The FPS is evaluated by the [original implementation](https://github.com/dbolya/yolact). When calculating FPS, only the model inference time is taken into account. Data loading and post-processing operations such as converting masks to RLE code, generating COCO JSON results, image rendering are not included. + +## Training + +All the aforementioned models are trained with a single GPU. It typically takes ~12GB VRAM when using resnet-101 as the backbone. If you want to try multiple GPUs training, you may have to modify the configuration files accordingly, such as adjusting the training schedule and freezing batch norm. + +```Shell +# Trains using the resnet-101 backbone with a batch size of 8 on a single GPU. +./tools/dist_train.sh configs/yolact/yolact_r101.py 1 +``` + +## Testing + +Please refer to [mmdetection/docs/getting_started.md](https://github.com/open-mmlab/mmdetection/blob/master/docs/getting_started.md#inference-with-pretrained-models). + +## Citation + +If you use YOLACT or this code base in your work, please cite + +```latex +@inproceedings{yolact-iccv2019, + author = {Daniel Bolya and Chong Zhou and Fanyi Xiao and Yong Jae Lee}, + title = {YOLACT: {Real-time} Instance Segmentation}, + booktitle = {ICCV}, + year = {2019}, +} +``` + + diff --git a/object_detection/configs/yolact/metafile.yml b/object_detection/configs/yolact/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..e7019ae62ce981eaf7c4e4704ea223f48b464ead --- /dev/null +++ b/object_detection/configs/yolact/metafile.yml @@ -0,0 +1,78 @@ +Collections: + - Name: YOLACT + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.02689 + Title: 'YOLACT: Real-time Instance Segmentation' + README: configs/yolact/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.5.0/mmdet/models/detectors/yolact.py#L9 + Version: v2.5.0 + +Models: + - Name: yolact_r50_1x8_coco + In Collection: YOLACT + Config: configs/yolact/yolact_r50_1x8_coco.py + Metadata: + Training Resources: 1x V100 GPU + Batch Size: 8 + inference time (ms/im): + - value: 23.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (550, 550) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 29.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolact/yolact_r50_1x8_coco/yolact_r50_1x8_coco_20200908-f38d58df.pth + + - Name: yolact_r50_8x8_coco + In Collection: YOLACT + Config: configs/yolact/yolact_r50_8x8_coco.py + Metadata: + Batch Size: 64 + inference time (ms/im): + - value: 23.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (550, 550) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 28.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolact/yolact_r50_8x8_coco/yolact_r50_8x8_coco_20200908-ca34f5db.pth + + - Name: yolact_r101_1x8_coco + In Collection: YOLACT + Config: configs/yolact/yolact_r101_1x8_coco.py + Metadata: + Training Resources: 1x V100 GPU + Batch Size: 8 + inference time (ms/im): + - value: 29.85 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (550, 550) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 30.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolact/yolact_r101_1x8_coco/yolact_r101_1x8_coco_20200908-4cbe9101.pth diff --git a/object_detection/configs/yolact/yolact_r101_1x8_coco.py b/object_detection/configs/yolact/yolact_r101_1x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..532631dd5f8483dfb61488e4f445f1f50a71fbde --- /dev/null +++ b/object_detection/configs/yolact/yolact_r101_1x8_coco.py @@ -0,0 +1,7 @@ +_base_ = './yolact_r50_1x8_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/object_detection/configs/yolact/yolact_r50_1x8_coco.py b/object_detection/configs/yolact/yolact_r50_1x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ceed0caafe3b4ed9cfd59487c09eea60fd67546b --- /dev/null +++ b/object_detection/configs/yolact/yolact_r50_1x8_coco.py @@ -0,0 +1,160 @@ +_base_ = '../_base_/default_runtime.py' + +# model settings +img_size = 550 +model = dict( + type='YOLACT', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=-1, # do not freeze stem + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=False, # update the statistics of bn + zero_init_residual=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5, + upsample_cfg=dict(mode='bilinear')), + bbox_head=dict( + type='YOLACTHead', + num_classes=80, + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=3, + scales_per_octave=1, + base_sizes=[8, 16, 32, 64, 128], + ratios=[0.5, 1.0, 2.0], + strides=[550.0 / x for x in [69, 35, 18, 9, 5]], + centers=[(550 * 0.5 / x, 550 * 0.5 / x) + for x in [69, 35, 18, 9, 5]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + reduction='none', + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.5), + num_head_convs=1, + num_protos=32, + use_ohem=True), + mask_head=dict( + type='YOLACTProtonet', + in_channels=256, + num_protos=32, + num_classes=80, + max_masks_to_train=100, + loss_mask_weight=6.125), + segm_head=dict( + type='YOLACTSegmHead', + num_classes=80, + in_channels=256, + loss_segm=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + # smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + iou_thr=0.5, + top_k=200, + max_per_img=100)) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.68, 116.78, 103.94], std=[58.40, 57.12, 57.38], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(4.0, 4.0)), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(img_size, img_size), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(img_size, img_size), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.1, + step=[20, 42, 49, 52]) +runner = dict(type='EpochBasedRunner', max_epochs=55) +cudnn_benchmark = True +evaluation = dict(metric=['bbox', 'segm']) diff --git a/object_detection/configs/yolact/yolact_r50_8x8_coco.py b/object_detection/configs/yolact/yolact_r50_8x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3adcb74a6155a0ab7303ab9ae90ee120f3eb4ad --- /dev/null +++ b/object_detection/configs/yolact/yolact_r50_8x8_coco.py @@ -0,0 +1,11 @@ +_base_ = 'yolact_r50_1x8_coco.py' + +optimizer = dict(type='SGD', lr=8e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[20, 42, 49, 52]) diff --git a/object_detection/configs/yolo/README.md b/object_detection/configs/yolo/README.md new file mode 100644 index 0000000000000000000000000000000000000000..93d14bf6c9a5a5ca1ce180ea13d12d87801b8e0c --- /dev/null +++ b/object_detection/configs/yolo/README.md @@ -0,0 +1,59 @@ +# YOLOv3: An Incremental Improvement + +## Abstract + + + +We present some updates to YOLO! We made a bunch of little design changes to make it better. We also trained this new network that's pretty swell. It's a little bigger than last time but more accurate. It's still fast though, don't worry. At 320x320 YOLOv3 runs in 22 ms at 28.2 mAP, as accurate as SSD but three times faster. When we look at the old .5 IOU mAP detection metric YOLOv3 is quite good. It achieves 57.9 mAP@50 in 51 ms on a Titan X, compared to 57.5 mAP@50 in 198 ms by RetinaNet, similar performance but 3.8x faster. + + +
+ +
+ + + + +## Citation + + + +```latex +@misc{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Joseph Redmon and Ali Farhadi}, + year={2018}, + eprint={1804.02767}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Results and Models + +| Backbone | Scale | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| DarkNet-53 | 320 | 273e | 2.7 | 63.9 | 27.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_320_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-421362b6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-20200819_172101.log.json) | +| DarkNet-53 | 416 | 273e | 3.8 | 61.2 | 30.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-2b60fcd9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-20200819_173424.log.json) | +| DarkNet-53 | 608 | 273e | 7.4 | 48.1 | 33.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco_20210518_115020-a2c3acb8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco_20210518_115020.log.json) | + +## Mixed Precision Training + +We also train YOLOv3 with mixed precision training. + +| Backbone | Scale | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| DarkNet-53 | 608 | 273e | 4.7 | 48.1 | 33.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_fp16_mstrain-608_273e_coco/yolov3_d53_fp16_mstrain-608_273e_coco_20210517_213542-4bc34944.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_fp16_mstrain-608_273e_coco/yolov3_d53_fp16_mstrain-608_273e_coco_20210517_213542.log.json) | + +## Lightweight models + +| Backbone | Scale | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| MobileNetV2 | 416 | 300e | 5.3 | | 23.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco/yolov3_mobilenetv2_mstrain-416_300e_coco_20210718_010823-f68a07b3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco/yolov3_mobilenetv2_mstrain-416_300e_coco_20210718_010823.log.json) | +| MobileNetV2 | 320 | 300e | 3.2 | | 22.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_mobilenetv2_320_300e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_mobilenetv2_320_300e_coco/yolov3_mobilenetv2_320_300e_coco_20210719_215349-d18dff72.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_mobilenetv2_320_300e_coco/yolov3_mobilenetv2_320_300e_coco_20210719_215349.log.json) | + +Notice: We reduce the number of channels to 96 in both head and neck. It can reduce the flops and parameters, which makes these models more suitable for edge devices. + +## Credit + +This implementation originates from the project of Haoyu Wu(@wuhy08) at Western Digital. diff --git a/object_detection/configs/yolo/metafile.yml b/object_detection/configs/yolo/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..22c35da550e2bfbdd645f3fee40428137a1d8534 --- /dev/null +++ b/object_detection/configs/yolo/metafile.yml @@ -0,0 +1,124 @@ +Collections: + - Name: YOLOv3 + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - DarkNet + Paper: + URL: https://arxiv.org/abs/1804.02767 + Title: 'YOLOv3: An Incremental Improvement' + README: configs/yolo/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.4.0/mmdet/models/detectors/yolo.py#L8 + Version: v2.4.0 + +Models: + - Name: yolov3_d53_320_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_320_273e_coco.py + Metadata: + Training Memory (GB): 2.7 + inference time (ms/im): + - value: 15.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (320, 320) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 27.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-421362b6.pth + + - Name: yolov3_d53_mstrain-416_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_mstrain-416_273e_coco.py + Metadata: + Training Memory (GB): 3.8 + inference time (ms/im): + - value: 16.34 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (416, 416) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 30.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-2b60fcd9.pth + + - Name: yolov3_d53_mstrain-608_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_mstrain-608_273e_coco.py + Metadata: + Training Memory (GB): 7.4 + inference time (ms/im): + - value: 20.79 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (608, 608) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 33.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco_20210518_115020-a2c3acb8.pth + + - Name: yolov3_d53_fp16_mstrain-608_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py + Metadata: + Training Memory (GB): 4.7 + inference time (ms/im): + - value: 20.79 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (608, 608) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 33.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_fp16_mstrain-608_273e_coco/yolov3_d53_fp16_mstrain-608_273e_coco_20210517_213542-4bc34944.pth + + - Name: yolov3_mobilenetv2_320_300e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_mobilenetv2_320_300e_coco.py + Metadata: + Training Memory (GB): 3.2 + Epochs: 300 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 22.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_mobilenetv2_320_300e_coco/yolov3_mobilenetv2_320_300e_coco_20210719_215349-d18dff72.pth + + - Name: yolov3_mobilenetv2_mstrain-416_300e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco.py + Metadata: + Training Memory (GB): 5.3 + Epochs: 300 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 23.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco/yolov3_mobilenetv2_mstrain-416_300e_coco_20210718_010823-f68a07b3.pth diff --git a/object_detection/configs/yolo/yolov3_d53_320_273e_coco.py b/object_detection/configs/yolo/yolov3_d53_320_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d4785e3133c91a8d11b7a6ac6f7106a9310af65e --- /dev/null +++ b/object_detection/configs/yolo/yolov3_d53_320_273e_coco.py @@ -0,0 +1,42 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# dataset settings +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(320, 320), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='PhotoMetricDistortion'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(320, 320), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py b/object_detection/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4ef2422dada278c1e28b48d333437c7994832eba --- /dev/null +++ b/object_detection/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py @@ -0,0 +1,3 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# fp16 settings +fp16 = dict(loss_scale='dynamic') diff --git a/object_detection/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py b/object_detection/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..94325c5a18a0b78788c1bdcccb68c179297bc084 --- /dev/null +++ b/object_detection/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py @@ -0,0 +1,42 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# dataset settings +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=[(320, 320), (416, 416)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='PhotoMetricDistortion'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(416, 416), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py b/object_detection/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..601f0fa33db9e5f0ec958c0381ae0ba4a5158913 --- /dev/null +++ b/object_detection/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py @@ -0,0 +1,127 @@ +_base_ = '../_base_/default_runtime.py' +# model settings +model = dict( + type='YOLOV3', + backbone=dict( + type='Darknet', + depth=53, + out_indices=(3, 4, 5), + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://darknet53')), + neck=dict( + type='YOLOV3Neck', + num_scales=3, + in_channels=[1024, 512, 256], + out_channels=[512, 256, 128]), + bbox_head=dict( + type='YOLOV3Head', + num_classes=80, + in_channels=[512, 256, 128], + out_channels=[1024, 512, 256], + anchor_generator=dict( + type='YOLOAnchorGenerator', + base_sizes=[[(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)]], + strides=[32, 16, 8]), + bbox_coder=dict(type='YOLOBBoxCoder'), + featmap_strides=[32, 16, 8], + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_conf=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_xy=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=2.0, + reduction='sum'), + loss_wh=dict(type='MSELoss', loss_weight=2.0, reduction='sum')), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='GridAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0)), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + conf_thr=0.005, + nms=dict(type='nms', iou_threshold=0.45), + max_per_img=100)) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=[(320, 320), (608, 608)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='PhotoMetricDistortion'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(608, 608), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.001, momentum=0.9, weight_decay=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=2000, # same as burn-in in darknet + warmup_ratio=0.1, + step=[218, 246]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=273) +evaluation = dict(interval=1, metric=['bbox']) diff --git a/object_detection/configs/yolo/yolov3_mobilenetv2_320_300e_coco.py b/object_detection/configs/yolo/yolov3_mobilenetv2_320_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..477d2530ac255e9fff4deabc650e26b326f14af4 --- /dev/null +++ b/object_detection/configs/yolo/yolov3_mobilenetv2_320_300e_coco.py @@ -0,0 +1,53 @@ +_base_ = ['./yolov3_mobilenetv2_mstrain-416_300e_coco.py'] + +# yapf:disable +model = dict( + bbox_head=dict( + anchor_generator=dict( + base_sizes=[[(220, 125), (128, 222), (264, 266)], + [(35, 87), (102, 96), (60, 170)], + [(10, 15), (24, 36), (72, 42)]]))) +# yapf:enable + +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(320, 320), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='PhotoMetricDistortion'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(320, 320), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco.py b/object_detection/configs/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fc3663e87b12b753d08f1f5124d6d3cb28f42263 --- /dev/null +++ b/object_detection/configs/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco.py @@ -0,0 +1,137 @@ +_base_ = '../_base_/default_runtime.py' +# model settings +model = dict( + type='YOLOV3', + backbone=dict( + type='MobileNetV2', + out_indices=(2, 4, 6), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://mmdet/mobilenet_v2')), + neck=dict( + type='YOLOV3Neck', + num_scales=3, + in_channels=[320, 96, 32], + out_channels=[96, 96, 96]), + bbox_head=dict( + type='YOLOV3Head', + num_classes=80, + in_channels=[96, 96, 96], + out_channels=[96, 96, 96], + anchor_generator=dict( + type='YOLOAnchorGenerator', + base_sizes=[[(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)]], + strides=[32, 16, 8]), + bbox_coder=dict(type='YOLOBBoxCoder'), + featmap_strides=[32, 16, 8], + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_conf=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_xy=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=2.0, + reduction='sum'), + loss_wh=dict(type='MSELoss', loss_weight=2.0, reduction='sum')), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='GridAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0)), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + conf_thr=0.005, + nms=dict(type='nms', iou_threshold=0.45), + max_per_img=100)) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict( + type='Resize', + img_scale=[(320, 320), (416, 416)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='PhotoMetricDistortion'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(416, 416), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=24, + workers_per_gpu=4, + train=dict( + type='RepeatDataset', # use RepeatDataset to speed up training + times=10, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.003, momentum=0.9, weight_decay=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=4000, + warmup_ratio=0.0001, + step=[24, 28]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=30) +evaluation = dict(interval=1, metric=['bbox']) +find_unused_parameters = True diff --git a/object_detection/configs/yolof/README.md b/object_detection/configs/yolof/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6a53ed5623f86c8e7646e12288449e234986f73d --- /dev/null +++ b/object_detection/configs/yolof/README.md @@ -0,0 +1,39 @@ +# You Only Look One-level Feature + +## Abstract + + + +This paper revisits feature pyramids networks (FPN) for one-stage detectors and points out that the success of FPN is due to its divide-and-conquer solution to the optimization problem in object detection rather than multi-scale feature fusion. From the perspective of optimization, we introduce an alternative way to address the problem instead of adopting the complex feature pyramids - {\em utilizing only one-level feature for detection}. Based on the simple and efficient solution, we present You Only Look One-level Feature (YOLOF). In our method, two key components, Dilated Encoder and Uniform Matching, are proposed and bring considerable improvements. Extensive experiments on the COCO benchmark prove the effectiveness of the proposed model. Our YOLOF achieves comparable results with its feature pyramids counterpart RetinaNet while being 2.5× faster. Without transformer layers, YOLOF can match the performance of DETR in a single-level feature manner with 7× less training epochs. With an image size of 608×608, YOLOF achieves 44.3 mAP running at 60 fps on 2080Ti, which is 13% faster than YOLOv4. + + +
+ +
+ + + + +## Citation + + + +``` +@inproceedings{chen2021you, + title={You Only Look One-level Feature}, + author={Chen, Qiang and Wang, Yingming and Yang, Tong and Zhang, Xiangyu and Cheng, Jian and Sun, Jian}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2021} +} +``` + +## Results and Models + +| Backbone | Style | Epoch | Lr schd | Mem (GB) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:-------:|:--------:|:------:|:------:|:--------:| +| R-50-C5 | caffe | Y | 1x | 8.3 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolof/yolof_r50_c5_8x8_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/yolof/yolof_r50_c5_8x8_1x_coco/yolof_r50_c5_8x8_1x_coco_20210425_024427-8e864411.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolof/yolof_r50_c5_8x8_1x_coco/yolof_r50_c5_8x8_1x_coco_20210425_024427.log.json) | + +**Note**: + +1. We find that the performance is unstable and may fluctuate by about 0.3 mAP. mAP 37.4 ~ 37.7 is acceptable in YOLOF_R_50_C5_1x. Such fluctuation can also be found in the [original implementation](https://github.com/chensnathan/YOLOF). +2. In addition to instability issues, sometimes there are large loss fluctuations and NAN, so there may still be problems with this project, which will be improved subsequently. diff --git a/object_detection/configs/yolof/metafile.yml b/object_detection/configs/yolof/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..9436fee2d05328f5d514c09f0b2d9c42121c550b --- /dev/null +++ b/object_detection/configs/yolof/metafile.yml @@ -0,0 +1,32 @@ +Collections: + - Name: YOLOF + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Dilated Encoder + - ResNet + Paper: + URL: https://arxiv.org/abs/2103.09460 + Title: 'You Only Look One-level Feature' + README: configs/yolof/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.12.0/mmdet/models/detectors/yolof.py#L6 + Version: v2.12.0 + +Models: + - Name: yolof_r50_c5_8x8_1x_coco + In Collection: YOLOF + Config: configs/yolof/yolof_r50_c5_8x8_1x_coco.py + Metadata: + Training Memory (GB): 8.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolof/yolof_r50_c5_8x8_1x_coco/yolof_r50_c5_8x8_1x_coco_20210425_024427-8e864411.pth diff --git a/object_detection/configs/yolof/yolof_r50_c5_8x8_1x_coco.py b/object_detection/configs/yolof/yolof_r50_c5_8x8_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0e3b5016296b266c1cdd2d362fd2e8e72f13578a --- /dev/null +++ b/object_detection/configs/yolof/yolof_r50_c5_8x8_1x_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='YOLOF', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(3, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe')), + neck=dict( + type='DilatedEncoder', + in_channels=2048, + out_channels=512, + block_mid_channels=128, + num_residual_blocks=4), + bbox_head=dict( + type='YOLOFHead', + num_classes=80, + in_channels=512, + reg_decoded_bbox=True, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[1, 2, 4, 8, 16], + strides=[32]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1., 1., 1., 1.], + add_ctr_clamp=True, + ctr_clamp=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='UniformAssigner', pos_ignore_thr=0.15, neg_ignore_thr=0.7), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.12, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict( + norm_decay_mult=0., custom_keys={'backbone': dict(lr_mult=1. / 3)})) +lr_config = dict(warmup_iters=1500, warmup_ratio=0.00066667) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='RandomShift', shift_ratio=0.5, max_shift_px=32), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=8, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/yolof/yolof_r50_c5_8x8_iter-1x_coco.py b/object_detection/configs/yolof/yolof_r50_c5_8x8_iter-1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c95c02da103bdd499063312c36ade30601bb7380 --- /dev/null +++ b/object_detection/configs/yolof/yolof_r50_c5_8x8_iter-1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './yolof_r50_c5_8x8_1x_coco.py' + +# We implemented the iter-based config according to the source code. +# COCO dataset has 117266 images after filtering. We use 8 gpu and +# 8 batch size training, so 22500 is equivalent to +# 22500/(117266/(8x8))=12.3 epoch, 15000 is equivalent to 8.2 epoch, +# 20000 is equivalent to 10.9 epoch. Due to lr(0.12) is large, +# the iter-based and epoch-based setting have about 0.2 difference on +# the mAP evaluation value. +lr_config = dict(step=[15000, 20000]) +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=22500) +checkpoint_config = dict(interval=2500) +evaluation = dict(interval=4500) +log_config = dict(interval=20) diff --git a/object_detection/configs/yolox/README.md b/object_detection/configs/yolox/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b836b826b254100012f8020499ac1cad3e99166e --- /dev/null +++ b/object_detection/configs/yolox/README.md @@ -0,0 +1,44 @@ +# YOLOX: Exceeding YOLO Series in 2021 + +## Abstract + + + +In this report, we present some experienced improvements to YOLO series, forming a new high-performance detector -- YOLOX. We switch the YOLO detector to an anchor-free manner and conduct other advanced detection techniques, i.e., a decoupled head and the leading label assignment strategy SimOTA to achieve state-of-the-art results across a large scale range of models: For YOLO-Nano with only 0.91M parameters and 1.08G FLOPs, we get 25.3% AP on COCO, surpassing NanoDet by 1.8% AP; for YOLOv3, one of the most widely used detectors in industry, we boost it to 47.3% AP on COCO, outperforming the current best practice by 3.0% AP; for YOLOX-L with roughly the same amount of parameters as YOLOv4-CSP, YOLOv5-L, we achieve 50.0% AP on COCO at a speed of 68.9 FPS on Tesla V100, exceeding YOLOv5-L by 1.8% AP. Further, we won the 1st Place on Streaming Perception Challenge (Workshop on Autonomous Driving at CVPR 2021) using a single YOLOX-L model. We hope this report can provide useful experience for developers and researchers in practical scenes, and we also provide deploy versions with ONNX, TensorRT, NCNN, and Openvino supported. + + +
+ +
+ + + + +## Citation + + + +```latex +@article{yolox2021, + title={{YOLOX}: Exceeding YOLO Series in 2021}, + author={Ge, Zheng and Liu, Songtao and Wang, Feng and Li, Zeming and Sun, Jian}, + journal={arXiv preprint arXiv:2107.08430}, + year={2021} +} +``` + +## Results and Models + +| Backbone | size | Mem (GB) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:-------:|:--------:|:------:| +| YOLOX-tiny | 416 | 3.5 | 32.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolox/yolox_tiny_8x8_300e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_tiny_8x8_300e_coco/yolox_tiny_8x8_300e_coco_20211124_171234-b4047906.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_tiny_8x8_300e_coco/yolox_tiny_8x8_300e_coco_20211124_171234.log.json) | +| YOLOX-s | 640 | 7.6 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolox/yolox_s_8x8_300e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_s_8x8_300e_coco/yolox_s_8x8_300e_coco_20211121_095711-4592a793.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_s_8x8_300e_coco/yolox_s_8x8_300e_coco_20211121_095711.log.json) | +| YOLOX-l | 640 | 19.9 | 49.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolox/yolox_l_8x8_300e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_l_8x8_300e_coco/yolox_l_8x8_300e_coco_20211126_140236-d3bd2b23.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_l_8x8_300e_coco/yolox_l_8x8_300e_coco_20211126_140236.log.json) | +| YOLOX-x | 640 | 28.1 | 50.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolox/yolox_x_8x8_300e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_x_8x8_300e_coco/yolox_x_8x8_300e_coco_20211126_140254-1ef88d67.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_x_8x8_300e_coco/yolox_x_8x8_300e_coco_20211126_140254.log.json) | + + +**Note**: + +1. The test score threshold is 0.001, and the box AP indicates the best AP. +2. Due to the need for pre-training weights, we cannot reproduce the performance of the `yolox-nano` model. Please refer to https://github.com/Megvii-BaseDetection/YOLOX/issues/674 for more information. +3. We also trained the model by the official release of YOLOX based on [Megvii-BaseDetection/YOLOX#735](https://github.com/Megvii-BaseDetection/YOLOX/issues/735) with commit ID [38c633](https://github.com/Megvii-BaseDetection/YOLOX/tree/38c633bf176462ee42b110c70e4ffe17b5753208). We found that the best AP of `YOLOX-tiny`, `YOLOX-s`, `YOLOX-l`, and `YOLOX-x` is 31.8, 40.3, 49.2, and 50.9, respectively. The performance is consistent with that of our re-implementation (see Table above) but still has a gap (0.3~0.8 AP) in comparison with the reported performance in their [README](https://github.com/Megvii-BaseDetection/YOLOX/blob/38c633bf176462ee42b110c70e4ffe17b5753208/README.md#benchmark). diff --git a/object_detection/configs/yolox/metafile.yml b/object_detection/configs/yolox/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..845cb0a4c45a993e18b288c7509735e984aaa5c6 --- /dev/null +++ b/object_detection/configs/yolox/metafile.yml @@ -0,0 +1,70 @@ +Collections: + - Name: YOLOX + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Nesterov + - Weight Decay + - Cosine Annealing Lr Updater + Training Resources: 8x TITANXp GPUs + Architecture: + - CSPDarkNet + - PAFPN + Paper: + URL: https://arxiv.org/abs/2107.08430 + Title: 'YOLOX: Exceeding YOLO Series in 2021' + README: configs/yolox/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.15.1/mmdet/models/detectors/yolox.py#L6 + Version: v2.15.1 + + +Models: + - Name: yolox_s_8x8_300e_coco + In Collection: YOLOX + Config: configs/yolox/yolox_s_8x8_300e_coco.py + Metadata: + Training Memory (GB): 7.6 + Epochs: 300 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_s_8x8_300e_coco/yolox_s_8x8_300e_coco_20211121_095711-4592a793.pth + - Name: yolox_l_8x8_300e_coco + In Collection: YOLOX + Config: configs/yolox/yolox_l_8x8_300e_coco.py + Metadata: + Training Memory (GB): 19.9 + Epochs: 300 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_l_8x8_300e_coco/yolox_l_8x8_300e_coco_20211126_140236-d3bd2b23.pth + - Name: yolox_x_8x8_300e_coco + In Collection: YOLOX + Config: configs/yolox/yolox_x_8x8_300e_coco.py + Metadata: + Training Memory (GB): 28.1 + Epochs: 300 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_x_8x8_300e_coco/yolox_x_8x8_300e_coco_20211126_140254-1ef88d67.pth + - Name: yolox_tiny_8x8_300e_coco + In Collection: YOLOX + Config: configs/yolox/yolox_tiny_8x8_300e_coco.py + Metadata: + Training Memory (GB): 3.5 + Epochs: 300 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 32.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_tiny_8x8_300e_coco/yolox_tiny_8x8_300e_coco_20211124_171234-b4047906.pth diff --git a/object_detection/configs/yolox/yolox_l_8x8_300e_coco.py b/object_detection/configs/yolox/yolox_l_8x8_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dcbfa183a6739623553e8a0345875a707d68f2b4 --- /dev/null +++ b/object_detection/configs/yolox/yolox_l_8x8_300e_coco.py @@ -0,0 +1,8 @@ +_base_ = './yolox_s_8x8_300e_coco.py' + +# model settings +model = dict( + backbone=dict(deepen_factor=1.0, widen_factor=1.0), + neck=dict( + in_channels=[256, 512, 1024], out_channels=256, num_csp_blocks=3), + bbox_head=dict(in_channels=256, feat_channels=256)) diff --git a/object_detection/configs/yolox/yolox_m_8x8_300e_coco.py b/object_detection/configs/yolox/yolox_m_8x8_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3048c95c6860c0af055df9dc05d4f90f427fd371 --- /dev/null +++ b/object_detection/configs/yolox/yolox_m_8x8_300e_coco.py @@ -0,0 +1,8 @@ +_base_ = './yolox_s_8x8_300e_coco.py' + +# model settings +model = dict( + backbone=dict(deepen_factor=0.67, widen_factor=0.75), + neck=dict(in_channels=[192, 384, 768], out_channels=192, num_csp_blocks=2), + bbox_head=dict(in_channels=192, feat_channels=192), +) diff --git a/object_detection/configs/yolox/yolox_nano_8x8_300e_coco.py b/object_detection/configs/yolox/yolox_nano_8x8_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d33ed04bc08f3dafd327206c4bb888b9acadfd70 --- /dev/null +++ b/object_detection/configs/yolox/yolox_nano_8x8_300e_coco.py @@ -0,0 +1,11 @@ +_base_ = './yolox_tiny_8x8_300e_coco.py' + +# model settings +model = dict( + backbone=dict(deepen_factor=0.33, widen_factor=0.25, use_depthwise=True), + neck=dict( + in_channels=[64, 128, 256], + out_channels=64, + num_csp_blocks=1, + use_depthwise=True), + bbox_head=dict(in_channels=64, feat_channels=64, use_depthwise=True)) diff --git a/object_detection/configs/yolox/yolox_s_8x8_300e_coco.py b/object_detection/configs/yolox/yolox_s_8x8_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cc7305130721a20a2015110a9d1dddcdb5cdb598 --- /dev/null +++ b/object_detection/configs/yolox/yolox_s_8x8_300e_coco.py @@ -0,0 +1,160 @@ +_base_ = ['../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'] + +img_scale = (640, 640) + +# model settings +model = dict( + type='YOLOX', + input_size=img_scale, + random_size_range=(15, 25), + random_size_interval=10, + backbone=dict(type='CSPDarknet', deepen_factor=0.33, widen_factor=0.5), + neck=dict( + type='YOLOXPAFPN', + in_channels=[128, 256, 512], + out_channels=128, + num_csp_blocks=1), + bbox_head=dict( + type='YOLOXHead', num_classes=80, in_channels=128, feat_channels=128), + train_cfg=dict(assigner=dict(type='SimOTAAssigner', center_radius=2.5)), + # In order to align the source code, the threshold of the val phase is + # 0.01, and the threshold of the test phase is 0.001. + test_cfg=dict(score_thr=0.01, nms=dict(type='nms', iou_threshold=0.65))) + +# dataset settings +data_root = 'data/coco/' +dataset_type = 'CocoDataset' + +train_pipeline = [ + dict(type='Mosaic', img_scale=img_scale, pad_val=114.0), + dict( + type='RandomAffine', + scaling_ratio_range=(0.1, 2), + border=(-img_scale[0] // 2, -img_scale[1] // 2)), + dict( + type='MixUp', + img_scale=img_scale, + ratio_range=(0.8, 1.6), + pad_val=114.0), + dict(type='YOLOXHSVRandomAug'), + dict(type='RandomFlip', flip_ratio=0.5), + # According to the official implementation, multi-scale + # training is not considered here but in the + # 'mmdet/models/detectors/yolox.py'. + dict(type='Resize', img_scale=img_scale, keep_ratio=True), + dict( + type='Pad', + pad_to_square=True, + # If the image is three-channel, the pad value needs + # to be set separately for each channel. + pad_val=dict(img=(114.0, 114.0, 114.0))), + dict(type='FilterAnnotations', min_gt_bbox_wh=(1, 1), keep_empty=False), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] + +train_dataset = dict( + type='MultiImageMixDataset', + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True) + ], + filter_empty_gt=False, + ), + pipeline=train_pipeline) + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=img_scale, + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Pad', + pad_to_square=True, + pad_val=dict(img=(114.0, 114.0, 114.0))), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']) + ]) +] + +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + persistent_workers=True, + train=train_dataset, + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) + +# optimizer +# default 8 gpu +optimizer = dict( + type='SGD', + lr=0.01, + momentum=0.9, + weight_decay=5e-4, + nesterov=True, + paramwise_cfg=dict(norm_decay_mult=0., bias_decay_mult=0.)) +optimizer_config = dict(grad_clip=None) + +max_epochs = 300 +num_last_epochs = 15 +resume_from = None +interval = 10 + +# learning policy +lr_config = dict( + _delete_=True, + policy='YOLOX', + warmup='exp', + by_epoch=False, + warmup_by_epoch=True, + warmup_ratio=1, + warmup_iters=5, # 5 epoch + num_last_epochs=num_last_epochs, + min_lr_ratio=0.05) + +runner = dict(type='EpochBasedRunner', max_epochs=max_epochs) + +custom_hooks = [ + dict( + type='YOLOXModeSwitchHook', + num_last_epochs=num_last_epochs, + priority=48), + dict( + type='SyncNormHook', + num_last_epochs=num_last_epochs, + interval=interval, + priority=48), + dict( + type='ExpMomentumEMAHook', + resume_from=resume_from, + momentum=0.0001, + priority=49) +] +checkpoint_config = dict(interval=interval) +evaluation = dict( + save_best='auto', + # The evaluation interval is 'interval' when running epoch is + # less than ‘max_epochs - num_last_epochs’. + # The evaluation interval is 1 when running epoch is greater than + # or equal to ‘max_epochs - num_last_epochs’. + interval=interval, + dynamic_intervals=[(max_epochs - num_last_epochs, 1)], + metric='bbox') +log_config = dict(interval=50) diff --git a/object_detection/configs/yolox/yolox_tiny_8x8_300e_coco.py b/object_detection/configs/yolox/yolox_tiny_8x8_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3aee99ab0a6b091b04a8fb14c44c5519354f9555 --- /dev/null +++ b/object_detection/configs/yolox/yolox_tiny_8x8_300e_coco.py @@ -0,0 +1,53 @@ +_base_ = './yolox_s_8x8_300e_coco.py' + +# model settings +model = dict( + random_size_range=(10, 20), + backbone=dict(deepen_factor=0.33, widen_factor=0.375), + neck=dict(in_channels=[96, 192, 384], out_channels=96), + bbox_head=dict(in_channels=96, feat_channels=96)) + +img_scale = (640, 640) + +train_pipeline = [ + dict(type='Mosaic', img_scale=img_scale, pad_val=114.0), + dict( + type='RandomAffine', + scaling_ratio_range=(0.5, 1.5), + border=(-img_scale[0] // 2, -img_scale[1] // 2)), + dict(type='YOLOXHSVRandomAug'), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Resize', img_scale=img_scale, keep_ratio=True), + dict( + type='Pad', + pad_to_square=True, + pad_val=dict(img=(114.0, 114.0, 114.0))), + dict(type='FilterAnnotations', min_gt_bbox_wh=(1, 1), keep_empty=False), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(416, 416), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Pad', + pad_to_square=True, + pad_val=dict(img=(114.0, 114.0, 114.0))), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']) + ]) +] + +train_dataset = dict(pipeline=train_pipeline) + +data = dict( + train=train_dataset, + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/object_detection/configs/yolox/yolox_x_8x8_300e_coco.py b/object_detection/configs/yolox/yolox_x_8x8_300e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..65c0b75c186c56b3dcb55db76d74e879b413f862 --- /dev/null +++ b/object_detection/configs/yolox/yolox_x_8x8_300e_coco.py @@ -0,0 +1,8 @@ +_base_ = './yolox_s_8x8_300e_coco.py' + +# model settings +model = dict( + backbone=dict(deepen_factor=1.33, widen_factor=1.25), + neck=dict( + in_channels=[320, 640, 1280], out_channels=320, num_csp_blocks=4), + bbox_head=dict(in_channels=320, feat_channels=320)) diff --git a/object_detection/convert_to_coco.py b/object_detection/convert_to_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eaad5b9fb6125d90a11bf0d1446270d687dfe66a --- /dev/null +++ b/object_detection/convert_to_coco.py @@ -0,0 +1,294 @@ +# based on https://www.kaggle.com/sreevishnudamodaran/vinbigdata-fusing-bboxes-coco-dataset#Building-COCO-DATASET + +import os +from pathlib import Path +from datetime import datetime +import shutil +from collections import Counter +import warnings +import json + +import matplotlib.pyplot as plt +import pandas as pd +import numpy as np +import cv2 as cv +from tqdm import tqdm +from ensemble_boxes import weighted_boxes_fusion + +warnings.filterwarnings("ignore", category=UserWarning) + +def plot_img(img, size=(18, 18), is_rgb=True, title="", cmap='gray'): + plt.figure(figsize=size) + plt.imshow(img, cmap=cmap) + plt.suptitle(title) + plt.show() + +def plot_imgs(imgs, cols=2, size=10, is_rgb=True, title="", cmap='gray', img_size=None): + rows = len(imgs)//cols + 1 + fig = plt.figure(figsize=(cols*size, rows*size)) + for i, img in enumerate(imgs): + if img_size is not None: + img = cv.resize(img, img_size) + fig.add_subplot(rows, cols, i+1) + plt.imshow(img, cmap=cmap) + plt.suptitle(title) + +def draw_bbox(image, box, label, color, thickness=3): + alpha = 0.1 + alpha_box = 0.4 + overlay_bbox = image.copy() + overlay_text = image.copy() + output = image.copy() + + text_width, text_height = cv.getTextSize(label.upper(), cv.FONT_HERSHEY_SIMPLEX, 0.6, 1)[0] + cv.rectangle(overlay_bbox, (box[0], box[1]), (box[2], box[3]), + color, -1) + cv.addWeighted(overlay_bbox, alpha, output, 1 - alpha, 0, output) + cv.rectangle(overlay_text, (box[0], box[1]-7-text_height), (box[0]+text_width+2, box[1]), + (0, 0, 0), -1) + cv.addWeighted(overlay_text, alpha_box, output, 1 - alpha_box, 0, output) + cv.rectangle(output, (box[0], box[1]), (box[2], box[3]), + color, thickness) + cv.putText(output, label.upper(), (box[0], box[1]-5), + cv.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 255), 1, cv.LINE_AA) + return output + +def normalize_bboxes(df): + df['x_min'] = df.apply(lambda row: (row.x_min)/row.width * 512, axis =1) + df['y_min'] = df.apply(lambda row: (row.y_min)/row.height * 512, axis =1) + + df['x_max'] = df.apply(lambda row: (row.x_max)/row.width * 512, axis =1) + df['y_max'] = df.apply(lambda row: (row.y_max)/row.height * 512, axis =1) + + df['x_mid'] = df.apply(lambda row: (row.x_max+row.x_min)/2 * 512, axis =1) + df['y_mid'] = df.apply(lambda row: (row.y_max+row.y_min)/2 * 512, axis =1) + + df['w'] = df.apply(lambda row: (row.x_max-row.x_min), axis =1) + df['h'] = df.apply(lambda row: (row.y_max-row.y_min), axis =1) + + df['area'] = df['w']*df['h'] + return df + + +labels = [ + "__ignore__", + "Aortic_enlargement", + "Atelectasis", + "Calcification", + "Cardiomegaly", + "Consolidation", + "ILD", + "Infiltration", + "Lung_Opacity", + "Nodule/Mass", + "Other_lesion", + "Pleural_effusion", + "Pleural_thickening", + "Pneumothorax", + "Pulmonary_fibrosis" + ] + +label2color = [[59, 238, 119], [222, 21, 229], [94, 49, 164], [206, 221, 133], [117, 75, 3], + [210, 224, 119], [211, 176, 166], [63, 7, 197], [102, 65, 77], [194, 134, 175], + [209, 219, 50], [255, 44, 47], [89, 125, 149], [110, 27, 100]] + +viz_labels = labels[1:] + +now = datetime.now() + +data = dict( + info=dict( + description=None, + url=None, + version=None, + year=now.year, + contributor=None, + date_created=now.strftime('%Y-%m-%d %H:%M:%S.%f'), + ), + licenses=[dict( + url=None, + id=0, + name=None, + )], + images=[], + type='instances', + annotations=[], + categories=[], +) + +class_name_to_id = {} +for i, each_label in enumerate(labels): + class_id = i - 1 # starts with -1 + class_name = each_label + if class_id == -1: + assert class_name == '__ignore__' + continue + class_name_to_id[class_name] = class_id + data['categories'].append(dict( + supercategory=None, + id=class_id, + name=class_name, + )) + +train_out_dir = 'data/train' +valid_out_dir = 'data/valid' +test_out_dir = 'data/test' + +for dir in [train_out_dir, valid_out_dir, test_out_dir]: + if Path(dir).exists(): + shutil.rmtree(dir) + os.makedirs(dir) + +train_out_file = 'data/train_annotations.json' +valid_out_file = 'data/valid_annotations.json' +test_out_file = 'data/test_annotations.json' + +all_images_folder = 'vinbigdata/train' +all_files = os.listdir(all_images_folder) +all_files = np.sort(np.array(all_files)) + +data_train = data.copy() +data_valid = data.copy() +data_test = data.copy() + +for data in [data_train, data_valid, data_test]: + data['images'] = [] + data['annotations'] = [] + +all_annotations = pd.read_csv('vinbigdata/train.csv') +all_annotations = all_annotations[all_annotations.class_id != 14] +all_annotations['image_path'] = all_annotations['image_id'].map(lambda id: + os.path.join(all_images_folder, str(id) + '.png')) +normalize_bboxes(all_annotations) +all_image_paths = all_annotations['image_path'].unique() + +np.random.seed(1) + +indices = np.arange(len(all_image_paths)) +np.random.shuffle(indices) + +# train, valid, test +splits = [0.7, 0.1, 0.2] + +train_split_index = int(splits[0] * len(indices)) +valid_split_index = int((splits[0] + splits[1]) * len(indices)) + +train_paths = all_image_paths[:train_split_index] +valid_paths = all_image_paths[train_split_index:valid_split_index] +test_paths = all_image_paths[valid_split_index:] + +print(f'train: {len(train_paths)}, test: {len(test_paths)}, valid: {len(valid_paths)}') + +folders = [train_out_dir, valid_out_dir, test_out_dir] +paths = [train_paths, valid_paths, test_paths] +data_dicts = [data_train, data_valid, data_test] +out_files = [train_out_file, valid_out_file, test_out_file] + +# parameters for weighted box fusion +iou_thr = 0.2 +skip_box_thr = 0.0001 + +for (folder, paths, data, out_file) in zip(folders, paths, data_dicts, out_files): + print(f'Saving to {folder}...') + + viz_images = [] + + for i, path in tqdm(enumerate(paths)): + img_array = cv.imread(path) + image_basename = Path(path).stem + shutil.copy2(path, folder) + + ## Add Images to annotation + data['images'].append(dict( + license=0, + url=None, + file_name=os.path.join(folder.split('/')[-1], image_basename+ '.png'), + height=img_array.shape[0], + width=img_array.shape[1], + date_captured=None, + id=i + )) + + img_annotations = all_annotations[all_annotations.image_id==image_basename] + boxes_viz = img_annotations[['x_min', 'y_min', 'x_max', 'y_max']].to_numpy().tolist() + labels_viz = img_annotations['class_id'].to_numpy().tolist() + + ## Visualize Original Bboxes every 500th + if (i%500==0): + img_before = img_array.copy() + for box, label in zip(boxes_viz, labels_viz): + x_min, y_min, x_max, y_max = (box[0], box[1], box[2], box[3]) + color = label2color[int(label)] + img_before = draw_bbox(img_before, list(np.int_(box)), viz_labels[label], color) + viz_images.append(img_before) + + boxes_list = [] + scores_list = [] + labels_list = [] + weights = [] + + boxes_single = [] + labels_single = [] + + cls_ids = img_annotations['class_id'].unique().tolist() + + count_dict = Counter(img_annotations['class_id'].tolist()) + + for cid in cls_ids: + ## Performing Fusing operation only for multiple bboxes with the same label + if count_dict[cid]==1: + labels_single.append(cid) + boxes_single.append(img_annotations[img_annotations.class_id==cid][['x_min', 'y_min', 'x_max', 'y_max']].to_numpy().squeeze().tolist()) + + else: + cls_list =img_annotations[img_annotations.class_id==cid]['class_id'].tolist() + labels_list.append(cls_list) + bbox = img_annotations[img_annotations.class_id==cid][['x_min', 'y_min', 'x_max', 'y_max']].to_numpy() + + ## Normalizing Bbox by Image Width and Height + bbox = bbox/(img_array.shape[1], img_array.shape[0], img_array.shape[1], img_array.shape[0]) + bbox = np.clip(bbox, 0, 1) + boxes_list.append(bbox.tolist()) + scores_list.append(np.ones(len(cls_list)).tolist()) + weights.append(1) + + ## Perform WBF + boxes, scores, box_labels = weighted_boxes_fusion(boxes_list=boxes_list, scores_list=scores_list, + labels_list=labels_list, weights=weights, + iou_thr=iou_thr, skip_box_thr=skip_box_thr) + + boxes = boxes*(img_array.shape[1], img_array.shape[0], img_array.shape[1], img_array.shape[0]) + boxes = boxes.round(1).tolist() + box_labels = box_labels.astype(int).tolist() + boxes.extend(boxes_single) + box_labels.extend(labels_single) + + for box, label in zip(boxes, box_labels): + x_min, y_min, x_max, y_max = (box[0], box[1], box[2], box[3]) + area = round((x_max-x_min)*(y_max-y_min),1) + bbox =[ + round(x_min, 1), + round(y_min, 1), + round((x_max-x_min), 1), + round((y_max-y_min), 1) + ] + + data['annotations'].append(dict( id=len(data['annotations']), image_id=i, + category_id=int(label), area=area, bbox=bbox, + iscrowd=0)) + + ## Visualize Bboxes after operation every 500th + if (i%500==0): + img_after = img_array.copy() + for box, label in zip(boxes, box_labels): + color = label2color[int(label)] + img_after = draw_bbox(img_after, list(np.int_(box)), viz_labels[label], color) + viz_images.append(img_after) + + plot_imgs(viz_images, cmap=None, size=40) + plt.figtext(0.3, 0.9,"Original Bboxes", va="top", ha="center", size=15) + plt.figtext(0.73, 0.9,"WBF", va="top", ha="center", size=15) + plt.show() + + with open(out_file, 'w') as f: + json.dump(data, f, indent=4) diff --git a/object_detection/data/__init__.py b/object_detection/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/object_detection/finetune_with_path_modify_test_eval.py b/object_detection/finetune_with_path_modify_test_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..5b776efdee292a67fd11d6587c3001e226905101 --- /dev/null +++ b/object_detection/finetune_with_path_modify_test_eval.py @@ -0,0 +1,196 @@ +import argparse +import sys +from datetime import datetime +import os +# os.chdir('/home/caduser/KOTORI/vin-ssl/source') +# sys.path.append('/home/caduser/KOTORI/vin-ssl/source') +import copy +import shutil + +from natsort import natsorted +from collections import OrderedDict +import torch + +from mmdet.datasets import build_dataset, CocoDataset +from mmdet.datasets.api_wrappers import COCO +from mmdet.datasets.builder import DATASETS +from mmdet.models import build_detector +from mmdet.apis import train_detector +from base_config_track import get_config + +@DATASETS.register_module() +class CocoDatasetSubset(CocoDataset): + """ + A subclass of MMDetection's default COCO dataset which has the ability + to take the first or last n% of the original dataset. Set either + take_first_percent or take_last_percent to a value greater than 0. + """ + def __init__(self, *args, take_first_percent=-1, take_last_percent=-1, **kwargs): + self.take_first_percent = take_first_percent + self.take_last_percent = take_last_percent + super().__init__(*args, **kwargs) + + def load_annotations(self, ann_file): + """Load annotation from COCO style annotation file. + + Args: + ann_file (str): Path of annotation file. + + Returns: + list[dict]: Annotation info from COCO api. + """ + assert self.take_first_percent > 0 or self.take_last_percent > 0, f'take_first_percent: {self.take_first_percent}, take_last_percent: {self.take_first_percent}' + assert(self.take_first_percent > 0 if self.take_last_percent <= 0 else self.take_first_percent <= 0) + + self.coco = COCO(ann_file) + # The order of returned `cat_ids` will not + # change with the order of the CLASSES + self.cat_ids = self.coco.get_cat_ids(cat_names=self.CLASSES) + + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.img_ids = self.coco.get_img_ids() + + original_count = len(self.img_ids) + + # make a subset + if self.take_first_percent > 0: + first_n = True + count = int(len(self.img_ids) * self.take_first_percent) + self.img_ids = self.img_ids[:count] + elif self.take_last_percent > 0: + first_n = False + count = int(len(self.img_ids) * self.take_last_percent) + self.img_ids = self.img_ids[-count:] + + new_count = len(self.img_ids) + + print(f'Taking {"first" if first_n else "last"} {new_count} of original dataset ({original_count}), ({(new_count / original_count) * 100})%') + + data_infos = [] + total_ann_ids = [] + for i in self.img_ids: + info = self.coco.load_imgs([i])[0] + info['filename'] = info['file_name'] + data_infos.append(info) + ann_ids = self.coco.get_ann_ids(img_ids=[i]) + total_ann_ids.extend(ann_ids) + assert len(set(total_ann_ids)) == len( + total_ann_ids), f"Annotation ids in '{ann_file}' are not unique!" + return data_infos + + +def get_training_datasets(labeled_dataset_percent, base_directory = '.'): + cfg = get_config(base_directory) + cfg.data.train['dataset']['take_last_percent'] = labeled_dataset_percent + dataset_finetune = build_dataset(cfg.data.train) + + if labeled_dataset_percent < 1: + cfg.data.train['dataset']['take_last_percent'] = -1 + cfg.data.train['dataset']['take_first_percent'] = 1 - labeled_dataset_percent + dataset_pretrain = build_dataset(cfg.data.train) + else: + dataset_pretrain = None + + return dataset_pretrain, dataset_finetune + +def train(experiment_name, weight_path, labeled_dataset_percent, epochs, batch_size, optim, clip, lr, resume): + cfg = get_config() + cfg.total_epochs = epochs + cfg.runner.max_epochs = epochs + cfg.data.samples_per_gpu = batch_size + + if optim=='adam': + cfg.optimizer = dict(type='Adam', lr=lr, weight_decay=0.0001) + else: + cfg.optimizer = dict(type='SGD', lr=lr, momentum=0.9, weight_decay=0.0001) + + if clip: + cfg.optimizer_config = dict(grad_clip=dict(max_norm=clip, norm_type=2)) + else: + cfg.optimizer_config = dict(grad_clip=None) + + cfg.work_dir += '/' + experiment_name + + logs_folder = os.path.join(cfg.work_dir, 'tf_logs') + + if resume: + checkpoints = os.listdir(cfg.work_dir) + checkpoints = natsorted(checkpoints) + checkpoints = [p for p in checkpoints if 'epoch_' in p] + checkpoint = os.path.join(cfg.work_dir, checkpoints[-1]) + cfg.resume_from = checkpoint + print ('initialize learning rate again') + cfg.optimizer.lr = lr + print (cfg.optimizer) + else: + if (os.path.exists(logs_folder)): + shutil.rmtree(logs_folder) + + print(cfg.model.backbone.init_cfg) + + if (os.path.exists(weight_path)): + state_dict = torch.load(weight_path) + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + name = 'backbone.' + k + new_state_dict[name] = v + torch.save(new_state_dict, 'tmp.pth') + cfg.load_from = 'tmp.pth' + print('Loading pretrained backbone from ' + weight_path) + + _, train_dataset = get_training_datasets(labeled_dataset_percent) + + model = build_detector(cfg.model, train_cfg=cfg.get('train_cfg')) + datasets = [train_dataset] + cfg.workflow = [('train', 1)] + cfg.device = 'cuda' + # train model + train_detector(model, datasets, cfg, distributed=False, validate=True) + +def parse_args(): + parser = argparse.ArgumentParser(description='Train using MMDet and Lightly SSL') + parser.add_argument('--experiment-name', default='no-exp') + parser.add_argument('--weight-path', type=str, required=True) + parser.add_argument('--labeled-dataset-percent', type=float, default=1) + parser.add_argument( + '--epochs', + type=int, + default=100, + help='number of epochs to train', + ) + parser.add_argument( + '--batch-size', + type=int, + default=6, + ) + parser.add_argument( + '--optim', + type=str, + default='sgd', + ) + parser.add_argument( + '--clip', + type=float, + default=0, + ) + parser.add_argument( + '--lr', + type=float, + default=0.02 / 8, + ) + parser.add_argument( + '--resume', + default=False, + action='store_true', + help='resume training from last checkpoint in work dir' + ) + args = parser.parse_args() + return args + +def main(): + args = parse_args() + train(**vars(args)) + +if __name__ == '__main__': + main() + os.remove('tmp.pth') diff --git a/object_detection/log/__init__.py b/object_detection/log/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/object_detection/mmdet_tools/mmdet_test.py b/object_detection/mmdet_tools/mmdet_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2c78cd577c6dc23eabda2c2baf055e42c9dfc7b1 --- /dev/null +++ b/object_detection/mmdet_tools/mmdet_test.py @@ -0,0 +1,319 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import os.path as osp +import time +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) + +from mmdet.apis import multi_gpu_test, single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet test (and eval) a model') + parser.add_argument('--checkpoint', help='checkpoint file') + parser.add_argument( + '--work-dir', + help='the directory to save the file containing evaluation metrics') + parser.add_argument('--out', help='output result file in pickle format') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--gpu-ids', + type=int, + nargs='+', + help='ids of gpus to use ' + '(only applicable to non-distributed testing)') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument( + '--gpu-collect', + action='store_true', + help='whether to use gpu to collect results.') + parser.add_argument( + '--tmpdir', + help='tmp directory used for collecting results from multiple ' + 'workers, available when gpu-collect is not specified') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function (deprecate), ' + 'change to --eval-options instead.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args, unknown = parser.parse_known_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.eval_options: + raise ValueError( + '--options and --eval-options cannot be both ' + 'specified, --options is deprecated in favor of --eval-options') + if args.options: + warnings.warn('--options is deprecated in favor of --eval-options') + args.eval_options = args.options + return args + +def test(cfg, checkpoint, args=None): + dataset = main(cfg, checkpoint, args) + return dataset + +def get_outputs(cfg, checkpoint, args): + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + cfg.gpu_ids = range(1) + distributed = False + + # build the dataloader + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint(model, checkpoint, map_location='cpu') + + # old versions did not save class info in checkpoints, this walkaround is + # for backward compatibility + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + model = MMDataParallel(model, device_ids=cfg.gpu_ids) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + + return dataset, outputs + + + +def main(cfg=None, checkpoint=None, args=None): + if args is None: + args = parse_args() + + assert args.out or args.eval or args.format_only or args.show \ + or args.show_dir, \ + ('Please specify at least one operation (save/eval/format/show the ' + 'results / save the results) with the argument "--out", "--eval"' + ', "--format-only", "--show" or "--show-dir"') + + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + if args.gpu_ids is not None: + cfg.gpu_ids = args.gpu_ids + else: + cfg.gpu_ids = range(1) + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + if len(cfg.gpu_ids) > 1: + warnings.warn( + f'We treat {cfg.gpu_ids} as gpu-ids, and reset to ' + f'{cfg.gpu_ids[0:1]} as gpu-ids to avoid potential error in ' + 'non-distribute testing time.') + cfg.gpu_ids = cfg.gpu_ids[0:1] + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + rank, _ = get_dist_info() + # allows not to create + if args.work_dir is not None and rank == 0: + mmcv.mkdir_or_exist(osp.abspath(args.work_dir)) + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime()) + json_file = osp.join(args.work_dir, f'eval_{timestamp}.json') + + # build the dataloader + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint(model, checkpoint, map_location='cpu') + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + # old versions did not save class info in checkpoints, this walkaround is + # for backward compatibility + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=cfg.gpu_ids) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir, + args.gpu_collect) + + rank, _ = get_dist_info() + if rank == 0: + if args.out: + print(f'\nwriting results to {args.out}') + mmcv.dump(outputs, args.out) + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule', 'dynamic_intervals' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + metric = dataset.evaluate(outputs, **eval_kwargs) + metric_dict = dict(metric=metric) + if args.work_dir is not None and rank == 0: + mmcv.dump(metric_dict, json_file) + + return dataset + + +if __name__ == '__main__': + main() diff --git a/object_detection/test_one_sequences.py b/object_detection/test_one_sequences.py new file mode 100644 index 0000000000000000000000000000000000000000..b2d824722078cd2d178ff1a0867ec1b663f192dd --- /dev/null +++ b/object_detection/test_one_sequences.py @@ -0,0 +1,288 @@ +import sys +import os +from natsort import natsorted +import argparse +from collections import OrderedDict +import io +import contextlib +import itertools +import numpy as np +import mmcv +from mmdet.datasets.api_wrappers import COCO, COCOeval +sys.path.append('/home/caduser/KOTORI/vin-ssl/source') +os.chdir('/home/caduser/KOTORI/vin-ssl/source') + +from base_config_track import get_config +from mmdet_tools import mmdet_test + +def print_log(msg, logger): + pass + #print(msg) + +def evaluate(dataset, results, metric='bbox', logger=None, jsonfile_prefix=None, classwise=False, proposal_nums=(100, 300, 1000), iou_thrs=None, metric_items=None): + """Evaluation in COCO protocol. + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'bbox', 'segm', 'proposal', 'proposal_fast'. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + jsonfile_prefix (str | None): The prefix of json files. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If not specified, a temp file will be created. Default: None. + classwise (bool): Whether to evaluating the AP for each class. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thrs (Sequence[float], optional): IoU threshold used for + evaluating recalls/mAPs. If set to a list, the average of all + IoUs will also be computed. If not specified, [0.50, 0.55, + 0.60, 0.65, 0.70, 0.75, 0.80, 0.85, 0.90, 0.95] will be used. + Default: None. + metric_items (list[str] | str, optional): Metric items that will + be returned. If not specified, ``['AR@100', 'AR@300', + 'AR@1000', 'AR_s@1000', 'AR_m@1000', 'AR_l@1000' ]`` will be + used when ``metric=='proposal'``, ``['mAP', 'mAP_50', 'mAP_75', + 'mAP_s', 'mAP_m', 'mAP_l']`` will be used when + ``metric=='bbox' or metric=='segm'``. + Returns: + dict[str, float]: COCO style evaluation metric. + """ + metrics = metric if isinstance(metric, list) else [metric] + allowed_metrics = ['bbox', 'segm', 'proposal', 'proposal_fast'] + for metric in metrics: + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + if iou_thrs is None: + iou_thrs = np.linspace( + .5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True) + if metric_items is not None: + if not isinstance(metric_items, list): + metric_items = [metric_items] + + result_files, tmp_dir = dataset.format_results(results, jsonfile_prefix) + + eval_results = OrderedDict() + cocoGt = dataset.coco + + results_per_category = [] + + for metric in metrics: + msg = f'Evaluating {metric}...' + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + if metric == 'proposal_fast': + ar = dataset.fast_eval_recall( + results, proposal_nums, iou_thrs, logger='silent') + log_msg = [] + for i, num in enumerate(proposal_nums): + eval_results[f'AR@{num}'] = ar[i] + log_msg.append(f'\nAR@{num}\t{ar[i]:.4f}') + log_msg = ''.join(log_msg) + print_log(log_msg, logger=logger) + continue + + iou_type = 'bbox' if metric == 'proposal' else metric + if metric not in result_files: + raise KeyError(f'{metric} is not in results') + try: + predictions = mmcv.load(result_files[metric]) + if iou_type == 'segm': + # Refer to https://github.com/cocodataset/cocoapi/blob/master/PythonAPI/pycocotools/coco.py#L331 # noqa + # When evaluating mask AP, if the results contain bbox, + # cocoapi will use the box area instead of the mask area + # for calculating the instance area. Though the overall AP + # is not affected, this leads to different + # small/medium/large mask AP results. + for x in predictions: + x.pop('bbox') + warnings.simplefilter('once') + warnings.warn( + 'The key "bbox" is deleted for more accurate mask AP ' + 'of small/medium/large instances since v2.12.0. This ' + 'does not change the overall mAP calculation.', + UserWarning) + cocoDt = cocoGt.loadRes(predictions) + except IndexError: + print_log( + 'The testing results of the whole dataset is empty.', + logger=logger, + level=logging.ERROR) + break + + cocoEval = COCOeval(cocoGt, cocoDt, iou_type) + cocoEval.params.catIds = dataset.cat_ids + cocoEval.params.imgIds = dataset.img_ids + cocoEval.params.maxDets = list(proposal_nums) + cocoEval.params.iouThrs = iou_thrs + # mapping of cocoEval.stats + coco_metric_names = { + 'mAP': 0, + 'mAP_50': 1, + 'mAP_75': 2, + 'mAP_s': 3, + 'mAP_m': 4, + 'mAP_l': 5, + 'AR@100': 6, + 'AR@300': 7, + 'AR@1000': 8, + 'AR_s@1000': 9, + 'AR_m@1000': 10, + 'AR_l@1000': 11 + } + if metric_items is not None: + for metric_item in metric_items: + if metric_item not in coco_metric_names: + raise KeyError( + f'metric item {metric_item} is not supported') + + if metric == 'proposal': + cocoEval.params.useCats = 0 + cocoEval.evaluate() + cocoEval.accumulate() + + # Save coco summarize print information to logger + redirect_string = io.StringIO() + with contextlib.redirect_stdout(redirect_string): + cocoEval.summarize() + print_log('\n' + redirect_string.getvalue(), logger=logger) + + if metric_items is None: + metric_items = [ + 'AR@100', 'AR@300', 'AR@1000', 'AR_s@1000', + 'AR_m@1000', 'AR_l@1000' + ] + + for item in metric_items: + val = float( + f'{cocoEval.stats[coco_metric_names[item]]:.3f}') + eval_results[item] = val + else: + cocoEval.evaluate() + cocoEval.accumulate() + + # Save coco summarize print information to logger + redirect_string = io.StringIO() + with contextlib.redirect_stdout(redirect_string): + cocoEval.summarize() + print_log('\n' + redirect_string.getvalue(), logger=logger) + + if classwise: # Compute per-category AP + # Compute per-category AP + # from https://github.com/facebookresearch/detectron2/ + precisions = cocoEval.eval['precision'] + # precision: (iou, recall, cls, area range, max dets) + assert len(dataset.cat_ids) == precisions.shape[2] + + for idx, catId in enumerate(dataset.cat_ids): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + nm = dataset.coco.loadCats(catId)[0] + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + if precision.size: + ap = np.mean(precision) + else: + ap = float('nan') + results_per_category.append( + (f'{nm["name"]}', float(ap))) + + num_columns = min(6, len(results_per_category) * 2) + results_flatten = list( + itertools.chain(*results_per_category)) + headers = ['category', 'AP'] * (num_columns // 2) + results_2d = itertools.zip_longest(*[ + results_flatten[i::num_columns] + for i in range(num_columns) + ]) + + if metric_items is None: + metric_items = [ + 'mAP', 'mAP_50', 'mAP_75', 'mAP_s', 'mAP_m', 'mAP_l' + ] + + for metric_item in metric_items: + key = f'{metric}_{metric_item}' + val = float( + f'{cocoEval.stats[coco_metric_names[metric_item]]:.3f}' + ) + eval_results[key] = val + ap = cocoEval.stats[:6] + eval_results[f'{metric}_mAP_copypaste'] = ( + f'{ap[0]:.3f} {ap[1]:.3f} {ap[2]:.3f} {ap[3]:.3f} ' + f'{ap[4]:.3f} {ap[5]:.3f}') + if tmp_dir is not None: + tmp_dir.cleanup() + return eval_results, results_per_category + +# define parse +def get_args(): + parser = argparse.ArgumentParser(description='Test trained object detection model') + parser.add_argument( + '--experiment_name', '-exp-name', type=str, default='no-exp',help='providing folder store checkpoint models') + return parser.parse_args() + +if __name__ == "__main__": + + args = get_args() + experiment_name = args.experiment_name + print ("**********" * 3) + print ('Staring evaluation process') + checkpoints = os.listdir(os.path.join('../trained_weights', experiment_name)) + checkpoints = natsorted(checkpoints) + checkpoints = [p for p in checkpoints if 'epoch_' in p] + # checkpoint = os.path.join('../trained_weights', experiment_name, checkpoints[-1]) + + selected_checkpoints = checkpoints[-1:] # change the number of models want to infer here. + dict_results = {} + valid_dict_results = {} + eval_on_valid = False + + for checkpoint_name in selected_checkpoints: + print ('-----'*5) + print ('Processing for checkpoint', checkpoint_name) + checkpoint = os.path.join('../trained_weights', experiment_name, checkpoint_name) + + results = {} + results_dir = 'results' + os.makedirs(results_dir, exist_ok=True) + + results_avg = [] + results_avg_ar = [] + results_classwise = [] + + cfg = get_config() + + if eval_on_valid: + cfg.data.test['img_prefix'] = './data/' # uncomment lines 267-268 for inference on validation set + cfg.data.test['ann_file'] = './data/valid_annotations.json' + + args_result = argparse.Namespace(eval='bbox', out='results/' + experiment_name + '.pkl', checkpoint=None, work_dir=results_dir, fuse_conv_bn=None, + gpu_ids=None, format_only=None, show=None, show_dir=None, show_score_thr=0.3, gpu_collect=None, + tmpdir=None, cfg_options=None, options=None, launcher='none', eval_options=None, local_rank=0) + + dataset, outputs = mmdet_test.get_outputs(cfg, checkpoint, args_result) + + metrics, results_per_category = evaluate(dataset, outputs, metric='bbox', classwise=True) #, iou_thrs=[0.5]) + metrics_ar, _ = evaluate(dataset, outputs, metric='proposal') + results_avg.append([experiment_name, metrics]) + results_avg_ar.append([experiment_name, metrics_ar]) + results_classwise.append([experiment_name, OrderedDict(results_per_category)]) + + print('--------------------------------') + valid_dict_results[checkpoint_name] = [] + print('Average Precision') + print(list(results_avg[0][1].keys())[:-1]) + + valid_dict_results[checkpoint_name].append(list(results_avg[0][1].keys())[:-1]) # append output to valid_dict_results + + for res in results_avg: + print([res[0], list(res[1].values())[:-1]]) + valid_dict_results[checkpoint_name].append([res[0], list(res[1].values())[:-1]]) + + dict_results[checkpoint_name] = list(results_avg[0][1].values())[1] + print ("Results on testing set") + print (valid_dict_results) + print("**********" * 3) diff --git a/object_detection/tmp.pth b/object_detection/tmp.pth new file mode 100644 index 0000000000000000000000000000000000000000..7551ed32a0e9faa1faf9c87aafa2b1cca88c103f --- /dev/null +++ b/object_detection/tmp.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf7182f35427b08eee1e5faffddd213c5833357deea1212cb048428c4270698e +size 94345605 diff --git a/onnx_model/README.md b/onnx_model/README.md new file mode 100644 index 0000000000000000000000000000000000000000..17d556712029c5a290229894301dc98dabec0cc4 --- /dev/null +++ b/onnx_model/README.md @@ -0,0 +1,28 @@ +
+

Onnx support for LVM-Med

+
+ +- Open Neural Network Exchange ([ONNX](https://github.com/onnx/onnx)) is an open ecosystem that empowers AI developers to choose the right tools as their project evolves. ONNX provides an open source format for AI models, both deep learning and traditional ML. It defines an extensible computation graph model, as well as definitions of built-in operators and standard data types. Currently ONNX concentrates on the capabilities needed for inferencing (scoring). + +- ONNX is widely supported and can be found in many frameworks, tools, and hardware. Enabling interoperability between different frameworks and streamlining the path from research to production helps increase the speed of innovation in the AI community. + +- Here, we release 2 versions of onnx models for LVM-Med which are based on Resnet-50 backbone ([`lvmmed_rn50.onnx`](./lvmmed_rn50.onnx)) and ViT backbone ([`lvmmed_vit.onnx`](./lvmmed_vit.onnx)). Also, we also release our code to transform any other LVM-Med based models into ONNX models [`torch2onnx.py`](./torch2onnx.py). + +### Onnx in LVM-Med +- To load onnx LVM-Med models: +```python +""" +Load ONNX model +""" +onnx_model_rn50 = onnx.load("onnx_model/lvmmed_rn50.onnx") # If ResNet-50 backbone +onnx_model_vit = onnx.load("onnx_model/lvmmed_vit.onnx") # If ViT backbone + +""" +Check ONNX +""" +try: + onnx.checker.check_model(onnx_model_rn50) + print("RN50 onnx passed!") +except: + print("RN50 onnx failed!") +``` \ No newline at end of file diff --git a/onnx_model/load_onnx.py b/onnx_model/load_onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..9cb15727cbaecb6c382d2c0ddd96a9ffc1676d41 --- /dev/null +++ b/onnx_model/load_onnx.py @@ -0,0 +1,17 @@ +import onnx +""" +Unit test for onnx model +""" +onnx_model_rn50 = onnx.load("onnx_model/lvmmed_rn50.onnx") +try: + onnx.checker.check_model(onnx_model_rn50) + print("[Unit test] RN50 onnx passed!") +except: + print("[Unit test] RN50 onnx failed!") + +onnx_model_vit = onnx.load("onnx_model/lvmmed_vit.onnx") +try: + onnx.checker.check_model(onnx_model_vit) + print("[Unit test] ViT onnx passed!") +except: + print("[Unit test] ViT onnx failed!") \ No newline at end of file diff --git a/onnx_model/lvmmed_rn50.onnx b/onnx_model/lvmmed_rn50.onnx new file mode 100644 index 0000000000000000000000000000000000000000..c48c64172bdc303e1a0bceadb63ff392b848c2d8 --- /dev/null +++ b/onnx_model/lvmmed_rn50.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd2d0759a95a87fd9999eccdc10c1abc5e627c77ab8a21e4ec1121f6062313d0 +size 34768 diff --git a/onnx_model/lvmmed_vit.onnx b/onnx_model/lvmmed_vit.onnx new file mode 100644 index 0000000000000000000000000000000000000000..c9a315c5b75522862ae37f5c1bc9365611f2711f --- /dev/null +++ b/onnx_model/lvmmed_vit.onnx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79e73c1c419c828c1b0b492eeb526fb01f45504ab34801137b609d5188f7fc76 +size 253661 diff --git a/onnx_model/torch2onnx.py b/onnx_model/torch2onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..7c56c2d5272725478233f04d0d8a5e1c57f91e4a --- /dev/null +++ b/onnx_model/torch2onnx.py @@ -0,0 +1,51 @@ +import torch.onnx +import torch +from torch import nn +from torch.nn import functional as F + +import onnx +from transformers import AutoModel + + +def export_onnx(example_input: torch.Tensor, + model, + onnx_model_name) -> None: + torch.onnx.export( + model, + example_input, + onnx_model_name, + export_params=False, + opset_version=10, + do_constant_folding=True, + input_names=['input'], + output_names=['output'], + dynamic_axes={ + 'input' : { + 0 : 'batch_size' + }, + 'output' : { + 0 : 'batch_size' + } + } + ) + +if __name__ == "__main__": + """ + Export LVM-Med (RN50 version) + """ + example_input_rn50 = torch.ones(1, 3, 1024, 1024) + lvmmed_rn50 = AutoModel.from_pretrained('ngctnnnn/lvmmed_rn50') + example_output_rn50 = lvmmed_rn50(example_input_rn50)['pooler_output'] + print(f"Example output for LVM-Med (RN50)'s shape: {example_output_rn50.shape}") + + export_onnx(example_input_rn50, lvmmed_rn50, onnx_model_name="onnx_model/lvmmed_rn50.onnx") + + """ + Export LVM-Med (ViT) + """ + example_input_vit = torch.ones(1, 3, 224, 224) + lvmmed_vit = AutoModel.from_pretrained('ngctnnnn/lvmmed_vit') + example_output_vit = lvmmed_vit(example_input_vit)['pooler_output'] + print(f"Example output for LVM-Med (RN50)'s shape: {example_output_vit.shape}") + + export_onnx(example_input_vit, lvmmed_vit, onnx_model_name="onnx_model/lvmmed_vit.onnx") \ No newline at end of file diff --git a/prepare_dataset.py b/prepare_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..14f7a9c847548e206e24cfe86860fb6009b87550 --- /dev/null +++ b/prepare_dataset.py @@ -0,0 +1,24 @@ +import argparse +from datasets_split import Kvasir_split, BUID_split, FGADR_split, MMWHS_MR_Heart_split, MMWHS_CT_Heart_split + +def get_args(): + parser = argparse.ArgumentParser(description='Train the UNet on images and target masks') + parser.add_argument('--dataset_name', '-ds', metavar='DS', type=str, default="", help='Name of dataset') + + return parser.parse_args() + +if __name__ == '__main__': + args = get_args() + + if args.dataset_name == "Kvasir": + Kvasir_split.Kvasir_split() + elif args.dataset_name == "BUID": + BUID_split.BUID_split() + elif args.dataset_name == "FGADR": + FGADR_split.FGADR_split() + elif args.dataset_name == "MMWHS_MR_Heart": + MMWHS_MR_Heart_split.MMWHS_MR_Heart_split() + elif args.dataset_name == "MMWHS_CT_Heart": + MMWHS_CT_Heart_split.MMWHS_CT_Heart_split() + else: + print("Let's input dataset name") \ No newline at end of file diff --git a/segment_anything/__init__.py b/segment_anything/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..65fe71691ee281310fb821441bc6d14285044322 --- /dev/null +++ b/segment_anything/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +from .build_sam import ( + build_sam, + build_sam_vit_h, + build_sam_vit_l, + build_sam_vit_b, + sam_model_registry, +) +from .predictor import SamPredictor +from .automatic_mask_generator import SamAutomaticMaskGenerator \ No newline at end of file diff --git a/segment_anything/automatic_mask_generator.py b/segment_anything/automatic_mask_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..d5a8c969207f119feff7087f94e044403acdff00 --- /dev/null +++ b/segment_anything/automatic_mask_generator.py @@ -0,0 +1,372 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torchvision.ops.boxes import batched_nms, box_area # type: ignore + +from typing import Any, Dict, List, Optional, Tuple + +from .modeling import Sam +from .predictor import SamPredictor +from .utils.amg import ( + MaskData, + area_from_rle, + batch_iterator, + batched_mask_to_box, + box_xyxy_to_xywh, + build_all_layer_point_grids, + calculate_stability_score, + coco_encode_rle, + generate_crop_boxes, + is_box_near_crop_edge, + mask_to_rle_pytorch, + remove_small_regions, + rle_to_mask, + uncrop_boxes_xyxy, + uncrop_masks, + uncrop_points, +) + + +class SamAutomaticMaskGenerator: + def __init__( + self, + model: Sam, + points_per_side: Optional[int] = 32, + points_per_batch: int = 64, + pred_iou_thresh: float = 0.88, + stability_score_thresh: float = 0.95, + stability_score_offset: float = 1.0, + box_nms_thresh: float = 0.7, + crop_n_layers: int = 0, + crop_nms_thresh: float = 0.7, + crop_overlap_ratio: float = 512 / 1500, + crop_n_points_downscale_factor: int = 1, + point_grids: Optional[List[np.ndarray]] = None, + min_mask_region_area: int = 0, + output_mode: str = "binary_mask", + ) -> None: + """ + Using a SAM model, generates masks for the entire image. + Generates a grid of point prompts over the image, then filters + low quality and duplicate masks. The default settings are chosen + for SAM with a ViT-H backbone. + + Arguments: + model (Sam): The SAM model to use for mask prediction. + points_per_side (int or None): The number of points to be sampled + along one side of the image. The total number of points is + points_per_side**2. If None, 'point_grids' must provide explicit + point sampling. + points_per_batch (int): Sets the number of points run simultaneously + by the model. Higher numbers may be faster but use more GPU memory. + pred_iou_thresh (float): A filtering threshold in [0,1], using the + model's predicted mask quality. + stability_score_thresh (float): A filtering threshold in [0,1], using + the stability of the mask under changes to the cutoff used to binarize + the model's mask predictions. + stability_score_offset (float): The amount to shift the cutoff when + calculated the stability score. + box_nms_thresh (float): The box IoU cutoff used by non-maximal + suppression to filter duplicate masks. + crop_n_layers (int): If >0, mask prediction will be run again on + crops of the image. Sets the number of layers to run, where each + layer has 2**i_layer number of image crops. + crop_nms_thresh (float): The box IoU cutoff used by non-maximal + suppression to filter duplicate masks between different crops. + crop_overlap_ratio (float): Sets the degree to which crops overlap. + In the first crop layer, crops will overlap by this fraction of + the image length. Later layers with more crops scale down this overlap. + crop_n_points_downscale_factor (int): The number of points-per-side + sampled in layer n is scaled down by crop_n_points_downscale_factor**n. + point_grids (list(np.ndarray) or None): A list over explicit grids + of points used for sampling, normalized to [0,1]. The nth grid in the + list is used in the nth crop layer. Exclusive with points_per_side. + min_mask_region_area (int): If >0, postprocessing will be applied + to remove disconnected regions and holes in masks with area smaller + than min_mask_region_area. Requires opencv. + output_mode (str): The form masks are returned in. Can be 'binary_mask', + 'uncompressed_rle', or 'coco_rle'. 'coco_rle' requires pycocotools. + For large resolutions, 'binary_mask' may consume large amounts of + memory. + """ + + assert (points_per_side is None) != ( + point_grids is None + ), "Exactly one of points_per_side or point_grid must be provided." + if points_per_side is not None: + self.point_grids = build_all_layer_point_grids( + points_per_side, + crop_n_layers, + crop_n_points_downscale_factor, + ) + elif point_grids is not None: + self.point_grids = point_grids + else: + raise ValueError("Can't have both points_per_side and point_grid be None.") + + assert output_mode in [ + "binary_mask", + "uncompressed_rle", + "coco_rle", + ], f"Unknown output_mode {output_mode}." + if output_mode == "coco_rle": + from pycocotools import mask as mask_utils # type: ignore # noqa: F401 + + if min_mask_region_area > 0: + import cv2 # type: ignore # noqa: F401 + + self.predictor = SamPredictor(model) + self.points_per_batch = points_per_batch + self.pred_iou_thresh = pred_iou_thresh + self.stability_score_thresh = stability_score_thresh + self.stability_score_offset = stability_score_offset + self.box_nms_thresh = box_nms_thresh + self.crop_n_layers = crop_n_layers + self.crop_nms_thresh = crop_nms_thresh + self.crop_overlap_ratio = crop_overlap_ratio + self.crop_n_points_downscale_factor = crop_n_points_downscale_factor + self.min_mask_region_area = min_mask_region_area + self.output_mode = output_mode + + @torch.no_grad() + def generate(self, image: np.ndarray) -> List[Dict[str, Any]]: + """ + Generates masks for the given image. + + Arguments: + image (np.ndarray): The image to generate masks for, in HWC uint8 format. + + Returns: + list(dict(str, any)): A list over records for masks. Each record is + a dict containing the following keys: + segmentation (dict(str, any) or np.ndarray): The mask. If + output_mode='binary_mask', is an array of shape HW. Otherwise, + is a dictionary containing the RLE. + bbox (list(float)): The box around the mask, in XYWH format. + area (int): The area in pixels of the mask. + predicted_iou (float): The model's own prediction of the mask's + quality. This is filtered by the pred_iou_thresh parameter. + point_coords (list(list(float))): The point coordinates input + to the model to generate this mask. + stability_score (float): A measure of the mask's quality. This + is filtered on using the stability_score_thresh parameter. + crop_box (list(float)): The crop of the image used to generate + the mask, given in XYWH format. + """ + + # Generate masks + mask_data = self._generate_masks(image) + + # Filter small disconnected regions and holes in masks + if self.min_mask_region_area > 0: + mask_data = self.postprocess_small_regions( + mask_data, + self.min_mask_region_area, + max(self.box_nms_thresh, self.crop_nms_thresh), + ) + + # Encode masks + if self.output_mode == "coco_rle": + mask_data["segmentations"] = [coco_encode_rle(rle) for rle in mask_data["rles"]] + elif self.output_mode == "binary_mask": + mask_data["segmentations"] = [rle_to_mask(rle) for rle in mask_data["rles"]] + else: + mask_data["segmentations"] = mask_data["rles"] + + # Write mask records + curr_anns = [] + for idx in range(len(mask_data["segmentations"])): + ann = { + "segmentation": mask_data["segmentations"][idx], + "area": area_from_rle(mask_data["rles"][idx]), + "bbox": box_xyxy_to_xywh(mask_data["boxes"][idx]).tolist(), + "predicted_iou": mask_data["iou_preds"][idx].item(), + "point_coords": [mask_data["points"][idx].tolist()], + "stability_score": mask_data["stability_score"][idx].item(), + "crop_box": box_xyxy_to_xywh(mask_data["crop_boxes"][idx]).tolist(), + } + curr_anns.append(ann) + + return curr_anns + + def _generate_masks(self, image: np.ndarray) -> MaskData: + orig_size = image.shape[:2] + crop_boxes, layer_idxs = generate_crop_boxes( + orig_size, self.crop_n_layers, self.crop_overlap_ratio + ) + + # Iterate over image crops + data = MaskData() + for crop_box, layer_idx in zip(crop_boxes, layer_idxs): + crop_data = self._process_crop(image, crop_box, layer_idx, orig_size) + data.cat(crop_data) + + # Remove duplicate masks between crops + if len(crop_boxes) > 1: + # Prefer masks from smaller crops + scores = 1 / box_area(data["crop_boxes"]) + scores = scores.to(data["boxes"].device) + keep_by_nms = batched_nms( + data["boxes"].float(), + scores, + torch.zeros_like(data["boxes"][:, 0]), # categories + iou_threshold=self.crop_nms_thresh, + ) + data.filter(keep_by_nms) + + data.to_numpy() + return data + + def _process_crop( + self, + image: np.ndarray, + crop_box: List[int], + crop_layer_idx: int, + orig_size: Tuple[int, ...], + ) -> MaskData: + # Crop the image and calculate embeddings + x0, y0, x1, y1 = crop_box + cropped_im = image[y0:y1, x0:x1, :] + cropped_im_size = cropped_im.shape[:2] + self.predictor.set_image(cropped_im) + + # Get points for this crop + points_scale = np.array(cropped_im_size)[None, ::-1] + points_for_image = self.point_grids[crop_layer_idx] * points_scale + + # Generate masks for this crop in batches + data = MaskData() + for (points,) in batch_iterator(self.points_per_batch, points_for_image): + batch_data = self._process_batch(points, cropped_im_size, crop_box, orig_size) + data.cat(batch_data) + del batch_data + self.predictor.reset_image() + + # Remove duplicates within this crop. + keep_by_nms = batched_nms( + data["boxes"].float(), + data["iou_preds"], + torch.zeros_like(data["boxes"][:, 0]), # categories + iou_threshold=self.box_nms_thresh, + ) + data.filter(keep_by_nms) + + # Return to the original image frame + data["boxes"] = uncrop_boxes_xyxy(data["boxes"], crop_box) + data["points"] = uncrop_points(data["points"], crop_box) + data["crop_boxes"] = torch.tensor([crop_box for _ in range(len(data["rles"]))]) + + return data + + def _process_batch( + self, + points: np.ndarray, + im_size: Tuple[int, ...], + crop_box: List[int], + orig_size: Tuple[int, ...], + ) -> MaskData: + orig_h, orig_w = orig_size + + # Run model on this batch + transformed_points = self.predictor.transform.apply_coords(points, im_size) + in_points = torch.as_tensor(transformed_points, device=self.predictor.device) + in_labels = torch.ones(in_points.shape[0], dtype=torch.int, device=in_points.device) + masks, iou_preds, _ = self.predictor.predict_torch( + in_points[:, None, :], + in_labels[:, None], + multimask_output=True, + return_logits=True, + ) + + # Serialize predictions and store in MaskData + data = MaskData( + masks=masks.flatten(0, 1), + iou_preds=iou_preds.flatten(0, 1), + points=torch.as_tensor(points.repeat(masks.shape[1], axis=0)), + ) + del masks + + # Filter by predicted IoU + if self.pred_iou_thresh > 0.0: + keep_mask = data["iou_preds"] > self.pred_iou_thresh + data.filter(keep_mask) + + # Calculate stability score + data["stability_score"] = calculate_stability_score( + data["masks"], self.predictor.model.mask_threshold, self.stability_score_offset + ) + if self.stability_score_thresh > 0.0: + keep_mask = data["stability_score"] >= self.stability_score_thresh + data.filter(keep_mask) + + # Threshold masks and calculate boxes + data["masks"] = data["masks"] > self.predictor.model.mask_threshold + data["boxes"] = batched_mask_to_box(data["masks"]) + + # Filter boxes that touch crop boundaries + keep_mask = ~is_box_near_crop_edge(data["boxes"], crop_box, [0, 0, orig_w, orig_h]) + if not torch.all(keep_mask): + data.filter(keep_mask) + + # Compress to RLE + data["masks"] = uncrop_masks(data["masks"], crop_box, orig_h, orig_w) + data["rles"] = mask_to_rle_pytorch(data["masks"]) + del data["masks"] + + return data + + @staticmethod + def postprocess_small_regions( + mask_data: MaskData, min_area: int, nms_thresh: float + ) -> MaskData: + """ + Removes small disconnected regions and holes in masks, then reruns + box NMS to remove any new duplicates. + + Edits mask_data in place. + + Requires open-cv as a dependency. + """ + if len(mask_data["rles"]) == 0: + return mask_data + + # Filter small disconnected regions and holes + new_masks = [] + scores = [] + for rle in mask_data["rles"]: + mask = rle_to_mask(rle) + + mask, changed = remove_small_regions(mask, min_area, mode="holes") + unchanged = not changed + mask, changed = remove_small_regions(mask, min_area, mode="islands") + unchanged = unchanged and not changed + + new_masks.append(torch.as_tensor(mask).unsqueeze(0)) + # Give score=0 to changed masks and score=1 to unchanged masks + # so NMS will prefer ones that didn't need postprocessing + scores.append(float(unchanged)) + + # Recalculate boxes and remove any new duplicates + masks = torch.cat(new_masks, dim=0) + boxes = batched_mask_to_box(masks) + keep_by_nms = batched_nms( + boxes.float(), + torch.as_tensor(scores), + torch.zeros_like(boxes[:, 0]), # categories + iou_threshold=nms_thresh, + ) + + # Only recalculate RLEs for masks that have changed + for i_mask in keep_by_nms: + if scores[i_mask] == 0.0: + mask_torch = masks[i_mask].unsqueeze(0) + mask_data["rles"][i_mask] = mask_to_rle_pytorch(mask_torch)[0] + mask_data["boxes"][i_mask] = boxes[i_mask] # update res directly + mask_data.filter(keep_by_nms) + + return mask_data diff --git a/segment_anything/build_sam.py b/segment_anything/build_sam.py new file mode 100644 index 0000000000000000000000000000000000000000..58fcfb1f269a5c08ce15b5d427af5eb14283839f --- /dev/null +++ b/segment_anything/build_sam.py @@ -0,0 +1,103 @@ +import torch + +from functools import partial + +from .modeling import ImageEncoderViT, MaskDecoder, PromptEncoder, Sam, TwoWayTransformer + + +def build_sam_vit_h(checkpoint=None): + return _build_sam( + encoder_embed_dim=1280, + encoder_depth=32, + encoder_num_heads=16, + encoder_global_attn_indexes=[7, 15, 23, 31], + checkpoint=checkpoint, + ) + + +build_sam = build_sam_vit_h + + +def build_sam_vit_l(checkpoint=None): + return _build_sam( + encoder_embed_dim=1024, + encoder_depth=24, + encoder_num_heads=16, + encoder_global_attn_indexes=[5, 11, 17, 23], + checkpoint=checkpoint, + ) + + +def build_sam_vit_b(checkpoint=None): + return _build_sam( + encoder_embed_dim=768, + encoder_depth=12, + encoder_num_heads=12, + encoder_global_attn_indexes=[2, 5, 8, 11], + checkpoint=checkpoint, + ) + + +sam_model_registry = { + "default": build_sam_vit_h, + "vit_h": build_sam_vit_h, + "vit_l": build_sam_vit_l, + "vit_b": build_sam_vit_b, +} + + +def _build_sam( + encoder_embed_dim, + encoder_depth, + encoder_num_heads, + encoder_global_attn_indexes, + checkpoint=None, +): + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + sam = Sam( + image_encoder=ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ), + prompt_encoder=PromptEncoder( + embed_dim=prompt_embed_dim, + image_embedding_size=(image_embedding_size, image_embedding_size), + input_image_size=(image_size, image_size), + mask_in_chans=16, + ), + mask_decoder=MaskDecoder( + num_multimask_outputs=3, + transformer=TwoWayTransformer( + depth=2, + embedding_dim=prompt_embed_dim, + mlp_dim=2048, + num_heads=8, + ), + transformer_dim=prompt_embed_dim, + iou_head_depth=3, + iou_head_hidden_dim=256, + ), + pixel_mean=[123.675, 116.28, 103.53], + pixel_std=[58.395, 57.12, 57.375], + ) + sam.eval() + device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') + + if checkpoint is not None: + with open(checkpoint, "rb") as f: + state_dict = torch.load(f, map_location=device) + sam.load_state_dict(state_dict) + return sam diff --git a/segment_anything/modeling/__init__.py b/segment_anything/modeling/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..38e906243d898d7fc071c0fe218338c5cace3ea1 --- /dev/null +++ b/segment_anything/modeling/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +from .sam import Sam +from .image_encoder import ImageEncoderViT +from .mask_decoder import MaskDecoder +from .prompt_encoder import PromptEncoder +from .transformer import TwoWayTransformer diff --git a/segment_anything/modeling/common.py b/segment_anything/modeling/common.py new file mode 100644 index 0000000000000000000000000000000000000000..2bf15236a3eb24d8526073bc4fa2b274cccb3f96 --- /dev/null +++ b/segment_anything/modeling/common.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn + +from typing import Type + + +class MLPBlock(nn.Module): + def __init__( + self, + embedding_dim: int, + mlp_dim: int, + act: Type[nn.Module] = nn.GELU, + ) -> None: + super().__init__() + self.lin1 = nn.Linear(embedding_dim, mlp_dim) + self.lin2 = nn.Linear(mlp_dim, embedding_dim) + self.act = act() + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.lin2(self.act(self.lin1(x))) + + +# From https://github.com/facebookresearch/detectron2/blob/main/detectron2/layers/batch_norm.py # noqa +# Itself from https://github.com/facebookresearch/ConvNeXt/blob/d1fa8f6fef0a165b27399986cc2bdacc92777e40/models/convnext.py#L119 # noqa +class LayerNorm2d(nn.Module): + def __init__(self, num_channels: int, eps: float = 1e-6) -> None: + super().__init__() + self.weight = nn.Parameter(torch.ones(num_channels)) + self.bias = nn.Parameter(torch.zeros(num_channels)) + self.eps = eps + + def forward(self, x: torch.Tensor) -> torch.Tensor: + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x diff --git a/segment_anything/modeling/image_encoder.py b/segment_anything/modeling/image_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..66351d9d7c589be693f4b3485901d3bdfed54d4a --- /dev/null +++ b/segment_anything/modeling/image_encoder.py @@ -0,0 +1,395 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from typing import Optional, Tuple, Type + +from .common import LayerNorm2d, MLPBlock + + +# This class and its supporting functions below lightly adapted from the ViTDet backbone available at: https://github.com/facebookresearch/detectron2/blob/main/detectron2/modeling/backbone/vit.py # noqa +class ImageEncoderViT(nn.Module): + def __init__( + self, + img_size: int = 1024, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 768, + depth: int = 12, + num_heads: int = 12, + mlp_ratio: float = 4.0, + out_chans: int = 256, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + use_abs_pos: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + global_attn_indexes: Tuple[int, ...] = (), + ) -> None: + """ + Args: + img_size (int): Input image size. + patch_size (int): Patch size. + in_chans (int): Number of input image channels. + embed_dim (int): Patch embedding dimension. + depth (int): Depth of ViT. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_abs_pos (bool): If True, use absolute positional embeddings. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. + global_attn_indexes (list): Indexes for blocks using global attention. + """ + super().__init__() + self.img_size = img_size + + self.patch_embed = PatchEmbed( + kernel_size=(patch_size, patch_size), + stride=(patch_size, patch_size), + in_chans=in_chans, + embed_dim=embed_dim, + ) + + self.pos_embed: Optional[nn.Parameter] = None + if use_abs_pos: + # Initialize absolute positional embedding with pretrain image size. + self.pos_embed = nn.Parameter( + torch.zeros(1, img_size // patch_size, img_size // patch_size, embed_dim) + ) + + self.blocks = nn.ModuleList() + for i in range(depth): + block = Block( + dim=embed_dim, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + norm_layer=norm_layer, + act_layer=act_layer, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + window_size=window_size if i not in global_attn_indexes else 0, + input_size=(img_size // patch_size, img_size // patch_size), + ) + self.blocks.append(block) + + self.neck = nn.Sequential( + nn.Conv2d( + embed_dim, + out_chans, + kernel_size=1, + bias=False, + ), + LayerNorm2d(out_chans), + nn.Conv2d( + out_chans, + out_chans, + kernel_size=3, + padding=1, + bias=False, + ), + LayerNorm2d(out_chans), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.patch_embed(x) + if self.pos_embed is not None: + x = x + self.pos_embed + + for blk in self.blocks: + x = blk(x) + + x = self.neck(x.permute(0, 3, 1, 2)) + + return x + + +class Block(nn.Module): + """Transformer blocks with support of window attention and residual propagation blocks""" + + def __init__( + self, + dim: int, + num_heads: int, + mlp_ratio: float = 4.0, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. If it equals 0, then + use global attention. + input_size (tuple(int, int) or None): Input resolution for calculating the relative + positional parameter size. + """ + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + input_size=input_size if window_size == 0 else (window_size, window_size), + ) + + self.norm2 = norm_layer(dim) + self.mlp = MLPBlock(embedding_dim=dim, mlp_dim=int(dim * mlp_ratio), act=act_layer) + + self.window_size = window_size + + def forward(self, x: torch.Tensor) -> torch.Tensor: + shortcut = x + x = self.norm1(x) + # Window partition + if self.window_size > 0: + H, W = x.shape[1], x.shape[2] + x, pad_hw = window_partition(x, self.window_size) + + x = self.attn(x) + # Reverse window partition + if self.window_size > 0: + x = window_unpartition(x, self.window_size, pad_hw, (H, W)) + + x = shortcut + x + x = x + self.mlp(self.norm2(x)) + + return x + + +class Attention(nn.Module): + """Multi-head Attention block with relative position embeddings.""" + + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + input_size (tuple(int, int) or None): Input resolution for calculating the relative + positional parameter size. + """ + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + + self.use_rel_pos = use_rel_pos + if self.use_rel_pos: + assert ( + input_size is not None + ), "Input size must be provided if using relative positional encoding." + # initialize relative positional embeddings + self.rel_pos_h = nn.Parameter(torch.zeros(2 * input_size[0] - 1, head_dim)) + self.rel_pos_w = nn.Parameter(torch.zeros(2 * input_size[1] - 1, head_dim)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + B, H, W, _ = x.shape + # qkv with shape (3, B, nHead, H * W, C) + qkv = self.qkv(x).reshape(B, H * W, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + # q, k, v with shape (B * nHead, H * W, C) + q, k, v = qkv.reshape(3, B * self.num_heads, H * W, -1).unbind(0) + + attn = (q * self.scale) @ k.transpose(-2, -1) + + if self.use_rel_pos: + attn = add_decomposed_rel_pos(attn, q, self.rel_pos_h, self.rel_pos_w, (H, W), (H, W)) + + attn = attn.softmax(dim=-1) + x = (attn @ v).view(B, self.num_heads, H, W, -1).permute(0, 2, 3, 1, 4).reshape(B, H, W, -1) + x = self.proj(x) + + return x + + +def window_partition(x: torch.Tensor, window_size: int) -> Tuple[torch.Tensor, Tuple[int, int]]: + """ + Partition into non-overlapping windows with padding if needed. + Args: + x (tensor): input tokens with [B, H, W, C]. + window_size (int): window size. + + Returns: + windows: windows after partition with [B * num_windows, window_size, window_size, C]. + (Hp, Wp): padded height and width before partition + """ + B, H, W, C = x.shape + + pad_h = (window_size - H % window_size) % window_size + pad_w = (window_size - W % window_size) % window_size + if pad_h > 0 or pad_w > 0: + x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h)) + Hp, Wp = H + pad_h, W + pad_w + + x = x.view(B, Hp // window_size, window_size, Wp // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows, (Hp, Wp) + + +def window_unpartition( + windows: torch.Tensor, window_size: int, pad_hw: Tuple[int, int], hw: Tuple[int, int] +) -> torch.Tensor: + """ + Window unpartition into original sequences and removing padding. + Args: + windows (tensor): input tokens with [B * num_windows, window_size, window_size, C]. + window_size (int): window size. + pad_hw (Tuple): padded height and width (Hp, Wp). + hw (Tuple): original height and width (H, W) before padding. + + Returns: + x: unpartitioned sequences with [B, H, W, C]. + """ + Hp, Wp = pad_hw + H, W = hw + B = windows.shape[0] // (Hp * Wp // window_size // window_size) + x = windows.view(B, Hp // window_size, Wp // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, Hp, Wp, -1) + + if Hp > H or Wp > W: + x = x[:, :H, :W, :].contiguous() + return x + + +def get_rel_pos(q_size: int, k_size: int, rel_pos: torch.Tensor) -> torch.Tensor: + """ + Get relative positional embeddings according to the relative positions of + query and key sizes. + Args: + q_size (int): size of query q. + k_size (int): size of key k. + rel_pos (Tensor): relative position embeddings (L, C). + + Returns: + Extracted positional embeddings according to relative positions. + """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode="linear", + ) + rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + + +def add_decomposed_rel_pos( + attn: torch.Tensor, + q: torch.Tensor, + rel_pos_h: torch.Tensor, + rel_pos_w: torch.Tensor, + q_size: Tuple[int, int], + k_size: Tuple[int, int], +) -> torch.Tensor: + """ + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py # noqa B950 + Args: + attn (Tensor): attention map. + q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C). + rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis. + rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis. + q_size (Tuple): spatial sequence size of query q with (q_h, q_w). + k_size (Tuple): spatial sequence size of key k with (k_h, k_w). + + Returns: + attn (Tensor): attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum("bhwc,hkc->bhwk", r_q, Rh) + rel_w = torch.einsum("bhwc,wkc->bhwk", r_q, Rw) + + attn = ( + attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :] + ).view(B, q_h * q_w, k_h * k_w) + + return attn + + +class PatchEmbed(nn.Module): + """ + Image to Patch Embedding. + """ + + def __init__( + self, + kernel_size: Tuple[int, int] = (16, 16), + stride: Tuple[int, int] = (16, 16), + padding: Tuple[int, int] = (0, 0), + in_chans: int = 3, + embed_dim: int = 768, + ) -> None: + """ + Args: + kernel_size (Tuple): kernel size of the projection layer. + stride (Tuple): stride of the projection layer. + padding (Tuple): padding size of the projection layer. + in_chans (int): Number of input image channels. + embed_dim (int): Patch embedding dimension. + """ + super().__init__() + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=kernel_size, stride=stride, padding=padding + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.proj(x) + # B C H W -> B H W C + x = x.permute(0, 2, 3, 1) + return x diff --git a/segment_anything/modeling/mask_decoder.py b/segment_anything/modeling/mask_decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..e12faab0b3b0662eef62acb133c3e785ebac5bd9 --- /dev/null +++ b/segment_anything/modeling/mask_decoder.py @@ -0,0 +1,179 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch import nn +from torch.nn import functional as F + +from typing import List, Tuple, Type + +from .common import LayerNorm2d + + +class MaskDecoder(nn.Module): + def __init__( + self, + *, + transformer_dim: int, + transformer: nn.Module, + num_multimask_outputs: int = 3, + activation: Type[nn.Module] = nn.GELU, + iou_head_depth: int = 3, + iou_head_hidden_dim: int = 256, + ) -> None: + """ + Predicts masks given an image and prompt embeddings, using a + transformer architecture. + + Arguments: + transformer_dim (int): the channel dimension of the transformer + transformer (nn.Module): the transformer used to predict masks + num_multimask_outputs (int): the number of masks to predict + when disambiguating masks + activation (nn.Module): the type of activation to use when + upscaling masks + iou_head_depth (int): the depth of the MLP used to predict + mask quality + iou_head_hidden_dim (int): the hidden dimension of the MLP + used to predict mask quality + """ + super().__init__() + self.transformer_dim = transformer_dim + self.transformer = transformer + + self.num_multimask_outputs = num_multimask_outputs + + self.iou_token = nn.Embedding(1, transformer_dim) + self.num_mask_tokens = num_multimask_outputs + 1 + self.mask_tokens = nn.Embedding(self.num_mask_tokens, transformer_dim) + + self.output_upscaling = nn.Sequential( + nn.ConvTranspose2d(transformer_dim, transformer_dim // 4, kernel_size=2, stride=2), + LayerNorm2d(transformer_dim // 4), + activation(), + nn.ConvTranspose2d(transformer_dim // 4, transformer_dim // 8, kernel_size=2, stride=2), + activation(), + ) + self.output_hypernetworks_mlps = nn.ModuleList( + [ + MLP(transformer_dim, transformer_dim, transformer_dim // 8, 3) + for i in range(self.num_mask_tokens) + ] + ) + + self.iou_prediction_head = MLP( + transformer_dim, iou_head_hidden_dim, self.num_mask_tokens, iou_head_depth + ) + + def forward( + self, + image_embeddings: torch.Tensor, + image_pe: torch.Tensor, + sparse_prompt_embeddings: torch.Tensor, + dense_prompt_embeddings: torch.Tensor, + multimask_output: bool, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Predict masks given image and prompt embeddings. + + Arguments: + image_embeddings (torch.Tensor): the embeddings from the image encoder + image_pe (torch.Tensor): positional encoding with the shape of image_embeddings + sparse_prompt_embeddings (torch.Tensor): the embeddings of the points and boxes + dense_prompt_embeddings (torch.Tensor): the embeddings of the mask inputs + multimask_output (bool): Whether to return multiple masks or a single + mask. + + Returns: + torch.Tensor: batched predicted masks + torch.Tensor: batched predictions of mask quality + """ + masks, iou_pred = self.predict_masks( + image_embeddings=image_embeddings, + image_pe=image_pe, + sparse_prompt_embeddings=sparse_prompt_embeddings, + dense_prompt_embeddings=dense_prompt_embeddings, + ) + + # Select the correct mask or masks for output + if multimask_output: + mask_slice = slice(1, None) + else: + mask_slice = slice(0, 1) + masks = masks[:, mask_slice, :, :] + iou_pred = iou_pred[:, mask_slice] + + # Prepare output + return masks, iou_pred + + def predict_masks( + self, + image_embeddings: torch.Tensor, + image_pe: torch.Tensor, + sparse_prompt_embeddings: torch.Tensor, + dense_prompt_embeddings: torch.Tensor, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """Predicts masks. See 'forward' for more details.""" + # Concatenate output tokens + output_tokens = torch.cat([self.iou_token.weight, self.mask_tokens.weight], dim=0) + output_tokens = output_tokens.unsqueeze(0).expand(sparse_prompt_embeddings.size(0), -1, -1) + tokens = torch.cat((output_tokens, sparse_prompt_embeddings), dim=1) + + # Expand per-image data in batch direction to be per-mask + if image_embeddings.shape[0] != tokens.shape[0]: + src = torch.repeat_interleave(image_embeddings, tokens.shape[0], dim=0) + else: + src = image_embeddings + src = src + dense_prompt_embeddings + pos_src = torch.repeat_interleave(image_pe, tokens.shape[0], dim=0) + b, c, h, w = src.shape + + # Run the transformer + hs, src = self.transformer(src, pos_src, tokens) + iou_token_out = hs[:, 0, :] + mask_tokens_out = hs[:, 1 : (1 + self.num_mask_tokens), :] + + # Upscale mask embeddings and predict masks using the mask tokens + src = src.transpose(1, 2).view(b, c, h, w) + upscaled_embedding = self.output_upscaling(src) + hyper_in_list: List[torch.Tensor] = [] + for i in range(self.num_mask_tokens): + hyper_in_list.append(self.output_hypernetworks_mlps[i](mask_tokens_out[:, i, :])) + hyper_in = torch.stack(hyper_in_list, dim=1) + b, c, h, w = upscaled_embedding.shape + masks = (hyper_in @ upscaled_embedding.view(b, c, h * w)).view(b, -1, h, w) + + # Generate mask quality predictions + iou_pred = self.iou_prediction_head(iou_token_out) + + return masks, iou_pred + + +# Lightly adapted from +# https://github.com/facebookresearch/MaskFormer/blob/main/mask_former/modeling/transformer/transformer_predictor.py # noqa +class MLP(nn.Module): + def __init__( + self, + input_dim: int, + hidden_dim: int, + output_dim: int, + num_layers: int, + sigmoid_output: bool = False, + ) -> None: + super().__init__() + self.num_layers = num_layers + h = [hidden_dim] * (num_layers - 1) + self.layers = nn.ModuleList( + nn.Linear(n, k) for n, k in zip([input_dim] + h, h + [output_dim]) + ) + self.sigmoid_output = sigmoid_output + + def forward(self, x): + for i, layer in enumerate(self.layers): + x = F.relu(layer(x)) if i < self.num_layers - 1 else layer(x) + if self.sigmoid_output: + x = F.sigmoid(x) + return x diff --git a/segment_anything/modeling/prompt_encoder.py b/segment_anything/modeling/prompt_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..c3143f4f8e02ddd7ca8587b40ff5d47c3a6b7ef3 --- /dev/null +++ b/segment_anything/modeling/prompt_encoder.py @@ -0,0 +1,214 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torch import nn + +from typing import Any, Optional, Tuple, Type + +from .common import LayerNorm2d + + +class PromptEncoder(nn.Module): + def __init__( + self, + embed_dim: int, + image_embedding_size: Tuple[int, int], + input_image_size: Tuple[int, int], + mask_in_chans: int, + activation: Type[nn.Module] = nn.GELU, + ) -> None: + """ + Encodes prompts for input to SAM's mask decoder. + + Arguments: + embed_dim (int): The prompts' embedding dimension + image_embedding_size (tuple(int, int)): The spatial size of the + image embedding, as (H, W). + input_image_size (int): The padded size of the image as input + to the image encoder, as (H, W). + mask_in_chans (int): The number of hidden channels used for + encoding input masks. + activation (nn.Module): The activation to use when encoding + input masks. + """ + super().__init__() + self.embed_dim = embed_dim + self.input_image_size = input_image_size + self.image_embedding_size = image_embedding_size + self.pe_layer = PositionEmbeddingRandom(embed_dim // 2) + + self.num_point_embeddings: int = 4 # pos/neg point + 2 box corners + point_embeddings = [nn.Embedding(1, embed_dim) for i in range(self.num_point_embeddings)] + self.point_embeddings = nn.ModuleList(point_embeddings) + self.not_a_point_embed = nn.Embedding(1, embed_dim) + + self.mask_input_size = (4 * image_embedding_size[0], 4 * image_embedding_size[1]) + self.mask_downscaling = nn.Sequential( + nn.Conv2d(1, mask_in_chans // 4, kernel_size=2, stride=2), + LayerNorm2d(mask_in_chans // 4), + activation(), + nn.Conv2d(mask_in_chans // 4, mask_in_chans, kernel_size=2, stride=2), + LayerNorm2d(mask_in_chans), + activation(), + nn.Conv2d(mask_in_chans, embed_dim, kernel_size=1), + ) + self.no_mask_embed = nn.Embedding(1, embed_dim) + + def get_dense_pe(self) -> torch.Tensor: + """ + Returns the positional encoding used to encode point prompts, + applied to a dense set of points the shape of the image encoding. + + Returns: + torch.Tensor: Positional encoding with shape + 1x(embed_dim)x(embedding_h)x(embedding_w) + """ + return self.pe_layer(self.image_embedding_size).unsqueeze(0) + + def _embed_points( + self, + points: torch.Tensor, + labels: torch.Tensor, + pad: bool, + ) -> torch.Tensor: + """Embeds point prompts.""" + points = points + 0.5 # Shift to center of pixel + if pad: + padding_point = torch.zeros((points.shape[0], 1, 2), device=points.device) + padding_label = -torch.ones((labels.shape[0], 1), device=labels.device) + points = torch.cat([points, padding_point], dim=1) + labels = torch.cat([labels, padding_label], dim=1) + point_embedding = self.pe_layer.forward_with_coords(points, self.input_image_size) + point_embedding[labels == -1] = 0.0 + point_embedding[labels == -1] += self.not_a_point_embed.weight + point_embedding[labels == 0] += self.point_embeddings[0].weight + point_embedding[labels == 1] += self.point_embeddings[1].weight + return point_embedding + + def _embed_boxes(self, boxes: torch.Tensor) -> torch.Tensor: + """Embeds box prompts.""" + boxes = boxes + 0.5 # Shift to center of pixel + coords = boxes.reshape(-1, 2, 2) + corner_embedding = self.pe_layer.forward_with_coords(coords, self.input_image_size) + corner_embedding[:, 0, :] += self.point_embeddings[2].weight + corner_embedding[:, 1, :] += self.point_embeddings[3].weight + return corner_embedding + + def _embed_masks(self, masks: torch.Tensor) -> torch.Tensor: + """Embeds mask inputs.""" + mask_embedding = self.mask_downscaling(masks) + return mask_embedding + + def _get_batch_size( + self, + points: Optional[Tuple[torch.Tensor, torch.Tensor]], + boxes: Optional[torch.Tensor], + masks: Optional[torch.Tensor], + ) -> int: + """ + Gets the batch size of the output given the batch size of the input prompts. + """ + if points is not None: + return points[0].shape[0] + elif boxes is not None: + return boxes.shape[0] + elif masks is not None: + return masks.shape[0] + else: + return 1 + + def _get_device(self) -> torch.device: + return self.point_embeddings[0].weight.device + + def forward( + self, + points: Optional[Tuple[torch.Tensor, torch.Tensor]], + boxes: Optional[torch.Tensor], + masks: Optional[torch.Tensor], + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Embeds different types of prompts, returning both sparse and dense + embeddings. + + Arguments: + points (tuple(torch.Tensor, torch.Tensor) or none): point coordinates + and labels to embed. + boxes (torch.Tensor or none): boxes to embed + masks (torch.Tensor or none): masks to embed + + Returns: + torch.Tensor: sparse embeddings for the points and boxes, with shape + BxNx(embed_dim), where N is determined by the number of input points + and boxes. + torch.Tensor: dense embeddings for the masks, in the shape + Bx(embed_dim)x(embed_H)x(embed_W) + """ + bs = self._get_batch_size(points, boxes, masks) + sparse_embeddings = torch.empty((bs, 0, self.embed_dim), device=self._get_device()) + if points is not None: + coords, labels = points + point_embeddings = self._embed_points(coords, labels, pad=(boxes is None)) + sparse_embeddings = torch.cat([sparse_embeddings, point_embeddings], dim=1) + if boxes is not None: + box_embeddings = self._embed_boxes(boxes) + sparse_embeddings = torch.cat([sparse_embeddings, box_embeddings], dim=1) + + if masks is not None: + dense_embeddings = self._embed_masks(masks) + else: + dense_embeddings = self.no_mask_embed.weight.reshape(1, -1, 1, 1).expand( + bs, -1, self.image_embedding_size[0], self.image_embedding_size[1] + ) + + return sparse_embeddings, dense_embeddings + + +class PositionEmbeddingRandom(nn.Module): + """ + Positional encoding using random spatial frequencies. + """ + + def __init__(self, num_pos_feats: int = 64, scale: Optional[float] = None) -> None: + super().__init__() + if scale is None or scale <= 0.0: + scale = 1.0 + self.register_buffer( + "positional_encoding_gaussian_matrix", + scale * torch.randn((2, num_pos_feats)), + ) + + def _pe_encoding(self, coords: torch.Tensor) -> torch.Tensor: + """Positionally encode points that are normalized to [0,1].""" + # assuming coords are in [0, 1]^2 square and have d_1 x ... x d_n x 2 shape + coords = 2 * coords - 1 + coords = coords @ self.positional_encoding_gaussian_matrix + coords = 2 * np.pi * coords + # outputs d_1 x ... x d_n x C shape + return torch.cat([torch.sin(coords), torch.cos(coords)], dim=-1) + + def forward(self, size: Tuple[int, int]) -> torch.Tensor: + """Generate positional encoding for a grid of the specified size.""" + h, w = size + device: Any = self.positional_encoding_gaussian_matrix.device + grid = torch.ones((h, w), device=device, dtype=torch.float32) + y_embed = grid.cumsum(dim=0) - 0.5 + x_embed = grid.cumsum(dim=1) - 0.5 + y_embed = y_embed / h + x_embed = x_embed / w + + pe = self._pe_encoding(torch.stack([x_embed, y_embed], dim=-1)) + return pe.permute(2, 0, 1) # C x H x W + + def forward_with_coords( + self, coords_input: torch.Tensor, image_size: Tuple[int, int] + ) -> torch.Tensor: + """Positionally encode points that are not normalized to [0,1].""" + coords = coords_input.clone() + coords[:, :, 0] = coords[:, :, 0] / image_size[1] + coords[:, :, 1] = coords[:, :, 1] / image_size[0] + return self._pe_encoding(coords.to(torch.float)) # B x N x C diff --git a/segment_anything/modeling/sam.py b/segment_anything/modeling/sam.py new file mode 100644 index 0000000000000000000000000000000000000000..8074cff6b40addc6b66f7ab4962218eef20da13c --- /dev/null +++ b/segment_anything/modeling/sam.py @@ -0,0 +1,174 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch import nn +from torch.nn import functional as F + +from typing import Any, Dict, List, Tuple + +from .image_encoder import ImageEncoderViT +from .mask_decoder import MaskDecoder +from .prompt_encoder import PromptEncoder + + +class Sam(nn.Module): + mask_threshold: float = 0.0 + image_format: str = "RGB" + + def __init__( + self, + image_encoder: ImageEncoderViT, + prompt_encoder: PromptEncoder, + mask_decoder: MaskDecoder, + pixel_mean: List[float] = [123.675, 116.28, 103.53], + pixel_std: List[float] = [58.395, 57.12, 57.375], + ) -> None: + """ + SAM predicts object masks from an image and input prompts. + + Arguments: + image_encoder (ImageEncoderViT): The backbone used to encode the + image into image embeddings that allow for efficient mask prediction. + prompt_encoder (PromptEncoder): Encodes various types of input prompts. + mask_decoder (MaskDecoder): Predicts masks from the image embeddings + and encoded prompts. + pixel_mean (list(float)): Mean values for normalizing pixels in the input image. + pixel_std (list(float)): Std values for normalizing pixels in the input image. + """ + super().__init__() + self.image_encoder = image_encoder + self.prompt_encoder = prompt_encoder + self.mask_decoder = mask_decoder + self.register_buffer("pixel_mean", torch.Tensor(pixel_mean).view(-1, 1, 1), False) + self.register_buffer("pixel_std", torch.Tensor(pixel_std).view(-1, 1, 1), False) + + @property + def device(self) -> Any: + return self.pixel_mean.device + + @torch.no_grad() + def forward( + self, + batched_input: List[Dict[str, Any]], + multimask_output: bool, + ) -> List[Dict[str, torch.Tensor]]: + """ + Predicts masks end-to-end from provided images and prompts. + If prompts are not known in advance, using SamPredictor is + recommended over calling the model directly. + + Arguments: + batched_input (list(dict)): A list over input images, each a + dictionary with the following keys. A prompt key can be + excluded if it is not present. + 'image': The image as a torch tensor in 3xHxW format, + already transformed for input to the model. + 'original_size': (tuple(int, int)) The original size of + the image before transformation, as (H, W). + 'point_coords': (torch.Tensor) Batched point prompts for + this image, with shape BxNx2. Already transformed to the + input frame of the model. + 'point_labels': (torch.Tensor) Batched labels for point prompts, + with shape BxN. + 'boxes': (torch.Tensor) Batched box inputs, with shape Bx4. + Already transformed to the input frame of the model. + 'mask_inputs': (torch.Tensor) Batched mask inputs to the model, + in the form Bx1xHxW. + multimask_output (bool): Whether the model should predict multiple + disambiguating masks, or return a single mask. + + Returns: + (list(dict)): A list over input images, where each element is + as dictionary with the following keys. + 'masks': (torch.Tensor) Batched binary mask predictions, + with shape BxCxHxW, where B is the number of input prompts, + C is determined by multimask_output, and (H, W) is the + original size of the image. + 'iou_predictions': (torch.Tensor) The model's predictions + of mask quality, in shape BxC. + 'low_res_logits': (torch.Tensor) Low resolution logits with + shape BxCxHxW, where H=W=256. Can be passed as mask input + to subsequent iterations of prediction. + """ + input_images = torch.stack([self.preprocess(x["image"]) for x in batched_input], dim=0) + image_embeddings = self.image_encoder(input_images) + + outputs = [] + for image_record, curr_embedding in zip(batched_input, image_embeddings): + if "point_coords" in image_record: + points = (image_record["point_coords"], image_record["point_labels"]) + else: + points = None + sparse_embeddings, dense_embeddings = self.prompt_encoder( + points=points, + boxes=image_record.get("boxes", None), + masks=image_record.get("mask_inputs", None), + ) + low_res_masks, iou_predictions = self.mask_decoder( + image_embeddings=curr_embedding.unsqueeze(0), + image_pe=self.prompt_encoder.get_dense_pe(), + sparse_prompt_embeddings=sparse_embeddings, + dense_prompt_embeddings=dense_embeddings, + multimask_output=multimask_output, + ) + masks = self.postprocess_masks( + low_res_masks, + input_size=image_record["image"].shape[-2:], + original_size=image_record["original_size"], + ) + masks = masks > self.mask_threshold + outputs.append( + { + "masks": masks, + "iou_predictions": iou_predictions, + "low_res_logits": low_res_masks, + } + ) + return outputs + + def postprocess_masks( + self, + masks: torch.Tensor, + input_size: Tuple[int, ...], + original_size: Tuple[int, ...], + ) -> torch.Tensor: + """ + Remove padding and upscale masks to the original image size. + + Arguments: + masks (torch.Tensor): Batched masks from the mask_decoder, + in BxCxHxW format. + input_size (tuple(int, int)): The size of the image input to the + model, in (H, W) format. Used to remove padding. + original_size (tuple(int, int)): The original size of the image + before resizing for input to the model, in (H, W) format. + + Returns: + (torch.Tensor): Batched masks in BxCxHxW format, where (H, W) + is given by original_size. + """ + masks = F.interpolate( + masks, + (self.image_encoder.img_size, self.image_encoder.img_size), + mode="bilinear", + align_corners=False, + ) + masks = masks[..., : input_size[0], : input_size[1]] + masks = F.interpolate(masks, original_size, mode="bilinear", align_corners=False) + return masks + + def preprocess(self, x: torch.Tensor) -> torch.Tensor: + """Normalize pixel values and pad to a square input.""" + # Normalize colors + x = (x - self.pixel_mean) / self.pixel_std + + # Pad + h, w = x.shape[-2:] + padh = self.image_encoder.img_size - h + padw = self.image_encoder.img_size - w + x = F.pad(x, (0, padw, 0, padh)) + return x diff --git a/segment_anything/modeling/transformer.py b/segment_anything/modeling/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..28fafea52288603fea275f3a100790471825c34a --- /dev/null +++ b/segment_anything/modeling/transformer.py @@ -0,0 +1,240 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch import Tensor, nn + +import math +from typing import Tuple, Type + +from .common import MLPBlock + + +class TwoWayTransformer(nn.Module): + def __init__( + self, + depth: int, + embedding_dim: int, + num_heads: int, + mlp_dim: int, + activation: Type[nn.Module] = nn.ReLU, + attention_downsample_rate: int = 2, + ) -> None: + """ + A transformer decoder that attends to an input image using + queries whose positional embedding is supplied. + + Args: + depth (int): number of layers in the transformer + embedding_dim (int): the channel dimension for the input embeddings + num_heads (int): the number of heads for multihead attention. Must + divide embedding_dim + mlp_dim (int): the channel dimension internal to the MLP block + activation (nn.Module): the activation to use in the MLP block + """ + super().__init__() + self.depth = depth + self.embedding_dim = embedding_dim + self.num_heads = num_heads + self.mlp_dim = mlp_dim + self.layers = nn.ModuleList() + + for i in range(depth): + self.layers.append( + TwoWayAttentionBlock( + embedding_dim=embedding_dim, + num_heads=num_heads, + mlp_dim=mlp_dim, + activation=activation, + attention_downsample_rate=attention_downsample_rate, + skip_first_layer_pe=(i == 0), + ) + ) + + self.final_attn_token_to_image = Attention( + embedding_dim, num_heads, downsample_rate=attention_downsample_rate + ) + self.norm_final_attn = nn.LayerNorm(embedding_dim) + + def forward( + self, + image_embedding: Tensor, + image_pe: Tensor, + point_embedding: Tensor, + ) -> Tuple[Tensor, Tensor]: + """ + Args: + image_embedding (torch.Tensor): image to attend to. Should be shape + B x embedding_dim x h x w for any h and w. + image_pe (torch.Tensor): the positional encoding to add to the image. Must + have the same shape as image_embedding. + point_embedding (torch.Tensor): the embedding to add to the query points. + Must have shape B x N_points x embedding_dim for any N_points. + + Returns: + torch.Tensor: the processed point_embedding + torch.Tensor: the processed image_embedding + """ + # BxCxHxW -> BxHWxC == B x N_image_tokens x C + bs, c, h, w = image_embedding.shape + image_embedding = image_embedding.flatten(2).permute(0, 2, 1) + image_pe = image_pe.flatten(2).permute(0, 2, 1) + + # Prepare queries + queries = point_embedding + keys = image_embedding + + # Apply transformer blocks and final layernorm + for layer in self.layers: + queries, keys = layer( + queries=queries, + keys=keys, + query_pe=point_embedding, + key_pe=image_pe, + ) + + # Apply the final attention layer from the points to the image + q = queries + point_embedding + k = keys + image_pe + attn_out = self.final_attn_token_to_image(q=q, k=k, v=keys) + queries = queries + attn_out + queries = self.norm_final_attn(queries) + + return queries, keys + + +class TwoWayAttentionBlock(nn.Module): + def __init__( + self, + embedding_dim: int, + num_heads: int, + mlp_dim: int = 2048, + activation: Type[nn.Module] = nn.ReLU, + attention_downsample_rate: int = 2, + skip_first_layer_pe: bool = False, + ) -> None: + """ + A transformer block with four layers: (1) self-attention of sparse + inputs, (2) cross attention of sparse inputs to dense inputs, (3) mlp + block on sparse inputs, and (4) cross attention of dense inputs to sparse + inputs. + + Arguments: + embedding_dim (int): the channel dimension of the embeddings + num_heads (int): the number of heads in the attention layers + mlp_dim (int): the hidden dimension of the mlp block + activation (nn.Module): the activation of the mlp block + skip_first_layer_pe (bool): skip the PE on the first layer + """ + super().__init__() + self.self_attn = Attention(embedding_dim, num_heads) + self.norm1 = nn.LayerNorm(embedding_dim) + + self.cross_attn_token_to_image = Attention( + embedding_dim, num_heads, downsample_rate=attention_downsample_rate + ) + self.norm2 = nn.LayerNorm(embedding_dim) + + self.mlp = MLPBlock(embedding_dim, mlp_dim, activation) + self.norm3 = nn.LayerNorm(embedding_dim) + + self.norm4 = nn.LayerNorm(embedding_dim) + self.cross_attn_image_to_token = Attention( + embedding_dim, num_heads, downsample_rate=attention_downsample_rate + ) + + self.skip_first_layer_pe = skip_first_layer_pe + + def forward( + self, queries: Tensor, keys: Tensor, query_pe: Tensor, key_pe: Tensor + ) -> Tuple[Tensor, Tensor]: + # Self attention block + if self.skip_first_layer_pe: + queries = self.self_attn(q=queries, k=queries, v=queries) + else: + q = queries + query_pe + attn_out = self.self_attn(q=q, k=q, v=queries) + queries = queries + attn_out + queries = self.norm1(queries) + + # Cross attention block, tokens attending to image embedding + q = queries + query_pe + k = keys + key_pe + attn_out = self.cross_attn_token_to_image(q=q, k=k, v=keys) + queries = queries + attn_out + queries = self.norm2(queries) + + # MLP block + mlp_out = self.mlp(queries) + queries = queries + mlp_out + queries = self.norm3(queries) + + # Cross attention block, image embedding attending to tokens + q = queries + query_pe + k = keys + key_pe + attn_out = self.cross_attn_image_to_token(q=k, k=q, v=queries) + keys = keys + attn_out + keys = self.norm4(keys) + + return queries, keys + + +class Attention(nn.Module): + """ + An attention layer that allows for downscaling the size of the embedding + after projection to queries, keys, and values. + """ + + def __init__( + self, + embedding_dim: int, + num_heads: int, + downsample_rate: int = 1, + ) -> None: + super().__init__() + self.embedding_dim = embedding_dim + self.internal_dim = embedding_dim // downsample_rate + self.num_heads = num_heads + assert self.internal_dim % num_heads == 0, "num_heads must divide embedding_dim." + + self.q_proj = nn.Linear(embedding_dim, self.internal_dim) + self.k_proj = nn.Linear(embedding_dim, self.internal_dim) + self.v_proj = nn.Linear(embedding_dim, self.internal_dim) + self.out_proj = nn.Linear(self.internal_dim, embedding_dim) + + def _separate_heads(self, x: Tensor, num_heads: int) -> Tensor: + b, n, c = x.shape + x = x.reshape(b, n, num_heads, c // num_heads) + return x.transpose(1, 2) # B x N_heads x N_tokens x C_per_head + + def _recombine_heads(self, x: Tensor) -> Tensor: + b, n_heads, n_tokens, c_per_head = x.shape + x = x.transpose(1, 2) + return x.reshape(b, n_tokens, n_heads * c_per_head) # B x N_tokens x C + + def forward(self, q: Tensor, k: Tensor, v: Tensor) -> Tensor: + # Input projections + q = self.q_proj(q) + k = self.k_proj(k) + v = self.v_proj(v) + + # Separate into heads + q = self._separate_heads(q, self.num_heads) + k = self._separate_heads(k, self.num_heads) + v = self._separate_heads(v, self.num_heads) + + # Attention + _, _, _, c_per_head = q.shape + attn = q @ k.permute(0, 1, 3, 2) # B x N_heads x N_tokens x N_tokens + attn = attn / math.sqrt(c_per_head) + attn = torch.softmax(attn, dim=-1) + + # Get output + out = attn @ v + out = self._recombine_heads(out) + out = self.out_proj(out) + + return out diff --git a/segment_anything/our_vit.py b/segment_anything/our_vit.py new file mode 100644 index 0000000000000000000000000000000000000000..486972a64a678931a0581a3baa6f7a94ada999f1 --- /dev/null +++ b/segment_anything/our_vit.py @@ -0,0 +1,104 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +from functools import partial +import torch.nn.functional as F + +from typing import Optional, Tuple, Type + +from .modeling import ImageEncoderViT + +def vit_encoder_l(): + + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + + + encoder_embed_dim=1024 + encoder_depth=24 + encoder_num_heads=16 + encoder_global_attn_indexes=[5, 11, 17, 23] + + + image_encoder=ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + use_abs_pos = False, + + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ) + return image_encoder + +def vit_encoder_b(): + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + + + encoder_embed_dim=768 + encoder_depth=12 + encoder_num_heads=12 + encoder_global_attn_indexes=[2, 5, 8, 11] + + image_encoder=ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + use_abs_pos = False, + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ) + return image_encoder + +def vit_encoder_h(): + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + + + encoder_embed_dim=1280 + encoder_depth=32 + encoder_num_heads=16 + encoder_global_attn_indexes=[7, 15, 23, 31] + + image_encoder=ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + use_abs_pos = False, + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ) + return image_encoder diff --git a/segment_anything/predictor.py b/segment_anything/predictor.py new file mode 100644 index 0000000000000000000000000000000000000000..8a6e6d816955b4c6097e1de6ce6e4ed3bafe327c --- /dev/null +++ b/segment_anything/predictor.py @@ -0,0 +1,269 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +from segment_anything.modeling import Sam + +from typing import Optional, Tuple + +from .utils.transforms import ResizeLongestSide + + +class SamPredictor: + def __init__( + self, + sam_model: Sam, + ) -> None: + """ + Uses SAM to calculate the image embedding for an image, and then + allow repeated, efficient mask prediction given prompts. + + Arguments: + sam_model (Sam): The model to use for mask prediction. + """ + super().__init__() + self.model = sam_model + self.transform = ResizeLongestSide(sam_model.image_encoder.img_size) + self.reset_image() + + def set_image( + self, + image: np.ndarray, + image_format: str = "RGB", + ) -> None: + """ + Calculates the image embeddings for the provided image, allowing + masks to be predicted with the 'predict' method. + + Arguments: + image (np.ndarray): The image for calculating masks. Expects an + image in HWC uint8 format, with pixel values in [0, 255]. + image_format (str): The color format of the image, in ['RGB', 'BGR']. + """ + assert image_format in [ + "RGB", + "BGR", + ], f"image_format must be in ['RGB', 'BGR'], is {image_format}." + if image_format != self.model.image_format: + image = image[..., ::-1] + + # Transform the image to the form expected by the model + input_image = self.transform.apply_image(image) + input_image_torch = torch.as_tensor(input_image, device=self.device) + input_image_torch = input_image_torch.permute(2, 0, 1).contiguous()[None, :, :, :] + + self.set_torch_image(input_image_torch, image.shape[:2]) + + @torch.no_grad() + def set_torch_image( + self, + transformed_image: torch.Tensor, + original_image_size: Tuple[int, ...], + ) -> None: + """ + Calculates the image embeddings for the provided image, allowing + masks to be predicted with the 'predict' method. Expects the input + image to be already transformed to the format expected by the model. + + Arguments: + transformed_image (torch.Tensor): The input image, with shape + 1x3xHxW, which has been transformed with ResizeLongestSide. + original_image_size (tuple(int, int)): The size of the image + before transformation, in (H, W) format. + """ + assert ( + len(transformed_image.shape) == 4 + and transformed_image.shape[1] == 3 + and max(*transformed_image.shape[2:]) == self.model.image_encoder.img_size + ), f"set_torch_image input must be BCHW with long side {self.model.image_encoder.img_size}." + self.reset_image() + + self.original_size = original_image_size + self.input_size = tuple(transformed_image.shape[-2:]) + input_image = self.model.preprocess(transformed_image) + self.features = self.model.image_encoder(input_image) + self.is_image_set = True + + def predict( + self, + point_coords: Optional[np.ndarray] = None, + point_labels: Optional[np.ndarray] = None, + box: Optional[np.ndarray] = None, + mask_input: Optional[np.ndarray] = None, + multimask_output: bool = True, + return_logits: bool = False, + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Predict masks for the given input prompts, using the currently set image. + + Arguments: + point_coords (np.ndarray or None): A Nx2 array of point prompts to the + model. Each point is in (X,Y) in pixels. + point_labels (np.ndarray or None): A length N array of labels for the + point prompts. 1 indicates a foreground point and 0 indicates a + background point. + box (np.ndarray or None): A length 4 array given a box prompt to the + model, in XYXY format. + mask_input (np.ndarray): A low resolution mask input to the model, typically + coming from a previous prediction iteration. Has form 1xHxW, where + for SAM, H=W=256. + multimask_output (bool): If true, the model will return three masks. + For ambiguous input prompts (such as a single click), this will often + produce better masks than a single prediction. If only a single + mask is needed, the model's predicted quality score can be used + to select the best mask. For non-ambiguous prompts, such as multiple + input prompts, multimask_output=False can give better results. + return_logits (bool): If true, returns un-thresholded masks logits + instead of a binary mask. + + Returns: + (np.ndarray): The output masks in CxHxW format, where C is the + number of masks, and (H, W) is the original image size. + (np.ndarray): An array of length C containing the model's + predictions for the quality of each mask. + (np.ndarray): An array of shape CxHxW, where C is the number + of masks and H=W=256. These low resolution logits can be passed to + a subsequent iteration as mask input. + """ + if not self.is_image_set: + raise RuntimeError("An image must be set with .set_image(...) before mask prediction.") + + # Transform input prompts + coords_torch, labels_torch, box_torch, mask_input_torch = None, None, None, None + if point_coords is not None: + assert ( + point_labels is not None + ), "point_labels must be supplied if point_coords is supplied." + point_coords = self.transform.apply_coords(point_coords, self.original_size) + coords_torch = torch.as_tensor(point_coords, dtype=torch.float, device=self.device) + labels_torch = torch.as_tensor(point_labels, dtype=torch.int, device=self.device) + coords_torch, labels_torch = coords_torch[None, :, :], labels_torch[None, :] + if box is not None: + box = self.transform.apply_boxes(box, self.original_size) + box_torch = torch.as_tensor(box, dtype=torch.float, device=self.device) + box_torch = box_torch[None, :] + if mask_input is not None: + mask_input_torch = torch.as_tensor(mask_input, dtype=torch.float, device=self.device) + mask_input_torch = mask_input_torch[None, :, :, :] + + masks, iou_predictions, low_res_masks = self.predict_torch( + coords_torch, + labels_torch, + box_torch, + mask_input_torch, + multimask_output, + return_logits=return_logits, + ) + + masks_np = masks[0].detach().cpu().numpy() + iou_predictions_np = iou_predictions[0].detach().cpu().numpy() + low_res_masks_np = low_res_masks[0].detach().cpu().numpy() + return masks_np, iou_predictions_np, low_res_masks_np + + @torch.no_grad() + def predict_torch( + self, + point_coords: Optional[torch.Tensor], + point_labels: Optional[torch.Tensor], + boxes: Optional[torch.Tensor] = None, + mask_input: Optional[torch.Tensor] = None, + multimask_output: bool = True, + return_logits: bool = False, + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """ + Predict masks for the given input prompts, using the currently set image. + Input prompts are batched torch tensors and are expected to already be + transformed to the input frame using ResizeLongestSide. + + Arguments: + point_coords (torch.Tensor or None): A BxNx2 array of point prompts to the + model. Each point is in (X,Y) in pixels. + point_labels (torch.Tensor or None): A BxN array of labels for the + point prompts. 1 indicates a foreground point and 0 indicates a + background point. + boxes (np.ndarray or None): A Bx4 array given a box prompt to the + model, in XYXY format. + mask_input (np.ndarray): A low resolution mask input to the model, typically + coming from a previous prediction iteration. Has form Bx1xHxW, where + for SAM, H=W=256. Masks returned by a previous iteration of the + predict method do not need further transformation. + multimask_output (bool): If true, the model will return three masks. + For ambiguous input prompts (such as a single click), this will often + produce better masks than a single prediction. If only a single + mask is needed, the model's predicted quality score can be used + to select the best mask. For non-ambiguous prompts, such as multiple + input prompts, multimask_output=False can give better results. + return_logits (bool): If true, returns un-thresholded masks logits + instead of a binary mask. + + Returns: + (torch.Tensor): The output masks in BxCxHxW format, where C is the + number of masks, and (H, W) is the original image size. + (torch.Tensor): An array of shape BxC containing the model's + predictions for the quality of each mask. + (torch.Tensor): An array of shape BxCxHxW, where C is the number + of masks and H=W=256. These low res logits can be passed to + a subsequent iteration as mask input. + """ + if not self.is_image_set: + raise RuntimeError("An image must be set with .set_image(...) before mask prediction.") + + if point_coords is not None: + points = (point_coords, point_labels) + else: + points = None + + # Embed prompts + sparse_embeddings, dense_embeddings = self.model.prompt_encoder( + points=points, + boxes=boxes, + masks=mask_input, + ) + + # Predict masks + low_res_masks, iou_predictions = self.model.mask_decoder( + image_embeddings=self.features, + image_pe=self.model.prompt_encoder.get_dense_pe(), + sparse_prompt_embeddings=sparse_embeddings, + dense_prompt_embeddings=dense_embeddings, + multimask_output=multimask_output, + ) + + # Upscale the masks to the original image resolution + masks = self.model.postprocess_masks(low_res_masks, self.input_size, self.original_size) + + if not return_logits: + masks = masks > self.model.mask_threshold + + return masks, iou_predictions, low_res_masks + + def get_image_embedding(self) -> torch.Tensor: + """ + Returns the image embeddings for the currently set image, with + shape 1xCxHxW, where C is the embedding dimension and (H,W) are + the embedding spatial dimension of SAM (typically C=256, H=W=64). + """ + if not self.is_image_set: + raise RuntimeError( + "An image must be set with .set_image(...) to generate an embedding." + ) + assert self.features is not None, "Features must exist if an image has been set." + return self.features + + @property + def device(self) -> torch.device: + return self.model.device + + def reset_image(self) -> None: + """Resets the currently set image.""" + self.is_image_set = False + self.features = None + self.orig_h = None + self.orig_w = None + self.input_h = None + self.input_w = None diff --git a/segment_anything/utils/__init__.py b/segment_anything/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5277f46157403e47fd830fc519144b97ef69d4ae --- /dev/null +++ b/segment_anything/utils/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. diff --git a/segment_anything/utils/amg.py b/segment_anything/utils/amg.py new file mode 100644 index 0000000000000000000000000000000000000000..be064071ef399fea96c673ad173689656c23534a --- /dev/null +++ b/segment_anything/utils/amg.py @@ -0,0 +1,346 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +import math +from copy import deepcopy +from itertools import product +from typing import Any, Dict, Generator, ItemsView, List, Tuple + + +class MaskData: + """ + A structure for storing masks and their related data in batched format. + Implements basic filtering and concatenation. + """ + + def __init__(self, **kwargs) -> None: + for v in kwargs.values(): + assert isinstance( + v, (list, np.ndarray, torch.Tensor) + ), "MaskData only supports list, numpy arrays, and torch tensors." + self._stats = dict(**kwargs) + + def __setitem__(self, key: str, item: Any) -> None: + assert isinstance( + item, (list, np.ndarray, torch.Tensor) + ), "MaskData only supports list, numpy arrays, and torch tensors." + self._stats[key] = item + + def __delitem__(self, key: str) -> None: + del self._stats[key] + + def __getitem__(self, key: str) -> Any: + return self._stats[key] + + def items(self) -> ItemsView[str, Any]: + return self._stats.items() + + def filter(self, keep: torch.Tensor) -> None: + for k, v in self._stats.items(): + if v is None: + self._stats[k] = None + elif isinstance(v, torch.Tensor): + self._stats[k] = v[torch.as_tensor(keep, device=v.device)] + elif isinstance(v, np.ndarray): + self._stats[k] = v[keep.detach().cpu().numpy()] + elif isinstance(v, list) and keep.dtype == torch.bool: + self._stats[k] = [a for i, a in enumerate(v) if keep[i]] + elif isinstance(v, list): + self._stats[k] = [v[i] for i in keep] + else: + raise TypeError(f"MaskData key {k} has an unsupported type {type(v)}.") + + def cat(self, new_stats: "MaskData") -> None: + for k, v in new_stats.items(): + if k not in self._stats or self._stats[k] is None: + self._stats[k] = deepcopy(v) + elif isinstance(v, torch.Tensor): + self._stats[k] = torch.cat([self._stats[k], v], dim=0) + elif isinstance(v, np.ndarray): + self._stats[k] = np.concatenate([self._stats[k], v], axis=0) + elif isinstance(v, list): + self._stats[k] = self._stats[k] + deepcopy(v) + else: + raise TypeError(f"MaskData key {k} has an unsupported type {type(v)}.") + + def to_numpy(self) -> None: + for k, v in self._stats.items(): + if isinstance(v, torch.Tensor): + self._stats[k] = v.detach().cpu().numpy() + + +def is_box_near_crop_edge( + boxes: torch.Tensor, crop_box: List[int], orig_box: List[int], atol: float = 20.0 +) -> torch.Tensor: + """Filter masks at the edge of a crop, but not at the edge of the original image.""" + crop_box_torch = torch.as_tensor(crop_box, dtype=torch.float, device=boxes.device) + orig_box_torch = torch.as_tensor(orig_box, dtype=torch.float, device=boxes.device) + boxes = uncrop_boxes_xyxy(boxes, crop_box).float() + near_crop_edge = torch.isclose(boxes, crop_box_torch[None, :], atol=atol, rtol=0) + near_image_edge = torch.isclose(boxes, orig_box_torch[None, :], atol=atol, rtol=0) + near_crop_edge = torch.logical_and(near_crop_edge, ~near_image_edge) + return torch.any(near_crop_edge, dim=1) + + +def box_xyxy_to_xywh(box_xyxy: torch.Tensor) -> torch.Tensor: + box_xywh = deepcopy(box_xyxy) + box_xywh[2] = box_xywh[2] - box_xywh[0] + box_xywh[3] = box_xywh[3] - box_xywh[1] + return box_xywh + + +def batch_iterator(batch_size: int, *args) -> Generator[List[Any], None, None]: + assert len(args) > 0 and all( + len(a) == len(args[0]) for a in args + ), "Batched iteration must have inputs of all the same size." + n_batches = len(args[0]) // batch_size + int(len(args[0]) % batch_size != 0) + for b in range(n_batches): + yield [arg[b * batch_size : (b + 1) * batch_size] for arg in args] + + +def mask_to_rle_pytorch(tensor: torch.Tensor) -> List[Dict[str, Any]]: + """ + Encodes masks to an uncompressed RLE, in the format expected by + pycoco tools. + """ + # Put in fortran order and flatten h,w + b, h, w = tensor.shape + tensor = tensor.permute(0, 2, 1).flatten(1) + + # Compute change indices + diff = tensor[:, 1:] ^ tensor[:, :-1] + change_indices = diff.nonzero() + + # Encode run length + out = [] + for i in range(b): + cur_idxs = change_indices[change_indices[:, 0] == i, 1] + cur_idxs = torch.cat( + [ + torch.tensor([0], dtype=cur_idxs.dtype, device=cur_idxs.device), + cur_idxs + 1, + torch.tensor([h * w], dtype=cur_idxs.dtype, device=cur_idxs.device), + ] + ) + btw_idxs = cur_idxs[1:] - cur_idxs[:-1] + counts = [] if tensor[i, 0] == 0 else [0] + counts.extend(btw_idxs.detach().cpu().tolist()) + out.append({"size": [h, w], "counts": counts}) + return out + + +def rle_to_mask(rle: Dict[str, Any]) -> np.ndarray: + """Compute a binary mask from an uncompressed RLE.""" + h, w = rle["size"] + mask = np.empty(h * w, dtype=bool) + idx = 0 + parity = False + for count in rle["counts"]: + mask[idx : idx + count] = parity + idx += count + parity ^= True + mask = mask.reshape(w, h) + return mask.transpose() # Put in C order + + +def area_from_rle(rle: Dict[str, Any]) -> int: + return sum(rle["counts"][1::2]) + + +def calculate_stability_score( + masks: torch.Tensor, mask_threshold: float, threshold_offset: float +) -> torch.Tensor: + """ + Computes the stability score for a batch of masks. The stability + score is the IoU between the binary masks obtained by thresholding + the predicted mask logits at high and low values. + """ + # One mask is always contained inside the other. + # Save memory by preventing unnecessary cast to torch.int64 + intersections = ( + (masks > (mask_threshold + threshold_offset)) + .sum(-1, dtype=torch.int16) + .sum(-1, dtype=torch.int32) + ) + unions = ( + (masks > (mask_threshold - threshold_offset)) + .sum(-1, dtype=torch.int16) + .sum(-1, dtype=torch.int32) + ) + return intersections / unions + + +def build_point_grid(n_per_side: int) -> np.ndarray: + """Generates a 2D grid of points evenly spaced in [0,1]x[0,1].""" + offset = 1 / (2 * n_per_side) + points_one_side = np.linspace(offset, 1 - offset, n_per_side) + points_x = np.tile(points_one_side[None, :], (n_per_side, 1)) + points_y = np.tile(points_one_side[:, None], (1, n_per_side)) + points = np.stack([points_x, points_y], axis=-1).reshape(-1, 2) + return points + + +def build_all_layer_point_grids( + n_per_side: int, n_layers: int, scale_per_layer: int +) -> List[np.ndarray]: + """Generates point grids for all crop layers.""" + points_by_layer = [] + for i in range(n_layers + 1): + n_points = int(n_per_side / (scale_per_layer**i)) + points_by_layer.append(build_point_grid(n_points)) + return points_by_layer + + +def generate_crop_boxes( + im_size: Tuple[int, ...], n_layers: int, overlap_ratio: float +) -> Tuple[List[List[int]], List[int]]: + """ + Generates a list of crop boxes of different sizes. Each layer + has (2**i)**2 boxes for the ith layer. + """ + crop_boxes, layer_idxs = [], [] + im_h, im_w = im_size + short_side = min(im_h, im_w) + + # Original image + crop_boxes.append([0, 0, im_w, im_h]) + layer_idxs.append(0) + + def crop_len(orig_len, n_crops, overlap): + return int(math.ceil((overlap * (n_crops - 1) + orig_len) / n_crops)) + + for i_layer in range(n_layers): + n_crops_per_side = 2 ** (i_layer + 1) + overlap = int(overlap_ratio * short_side * (2 / n_crops_per_side)) + + crop_w = crop_len(im_w, n_crops_per_side, overlap) + crop_h = crop_len(im_h, n_crops_per_side, overlap) + + crop_box_x0 = [int((crop_w - overlap) * i) for i in range(n_crops_per_side)] + crop_box_y0 = [int((crop_h - overlap) * i) for i in range(n_crops_per_side)] + + # Crops in XYWH format + for x0, y0 in product(crop_box_x0, crop_box_y0): + box = [x0, y0, min(x0 + crop_w, im_w), min(y0 + crop_h, im_h)] + crop_boxes.append(box) + layer_idxs.append(i_layer + 1) + + return crop_boxes, layer_idxs + + +def uncrop_boxes_xyxy(boxes: torch.Tensor, crop_box: List[int]) -> torch.Tensor: + x0, y0, _, _ = crop_box + offset = torch.tensor([[x0, y0, x0, y0]], device=boxes.device) + # Check if boxes has a channel dimension + if len(boxes.shape) == 3: + offset = offset.unsqueeze(1) + return boxes + offset + + +def uncrop_points(points: torch.Tensor, crop_box: List[int]) -> torch.Tensor: + x0, y0, _, _ = crop_box + offset = torch.tensor([[x0, y0]], device=points.device) + # Check if points has a channel dimension + if len(points.shape) == 3: + offset = offset.unsqueeze(1) + return points + offset + + +def uncrop_masks( + masks: torch.Tensor, crop_box: List[int], orig_h: int, orig_w: int +) -> torch.Tensor: + x0, y0, x1, y1 = crop_box + if x0 == 0 and y0 == 0 and x1 == orig_w and y1 == orig_h: + return masks + # Coordinate transform masks + pad_x, pad_y = orig_w - (x1 - x0), orig_h - (y1 - y0) + pad = (x0, pad_x - x0, y0, pad_y - y0) + return torch.nn.functional.pad(masks, pad, value=0) + + +def remove_small_regions( + mask: np.ndarray, area_thresh: float, mode: str +) -> Tuple[np.ndarray, bool]: + """ + Removes small disconnected regions and holes in a mask. Returns the + mask and an indicator of if the mask has been modified. + """ + import cv2 # type: ignore + + assert mode in ["holes", "islands"] + correct_holes = mode == "holes" + working_mask = (correct_holes ^ mask).astype(np.uint8) + n_labels, regions, stats, _ = cv2.connectedComponentsWithStats(working_mask, 8) + sizes = stats[:, -1][1:] # Row 0 is background label + small_regions = [i + 1 for i, s in enumerate(sizes) if s < area_thresh] + if len(small_regions) == 0: + return mask, False + fill_labels = [0] + small_regions + if not correct_holes: + fill_labels = [i for i in range(n_labels) if i not in fill_labels] + # If every region is below threshold, keep largest + if len(fill_labels) == 0: + fill_labels = [int(np.argmax(sizes)) + 1] + mask = np.isin(regions, fill_labels) + return mask, True + + +def coco_encode_rle(uncompressed_rle: Dict[str, Any]) -> Dict[str, Any]: + from pycocotools import mask as mask_utils # type: ignore + + h, w = uncompressed_rle["size"] + rle = mask_utils.frPyObjects(uncompressed_rle, h, w) + rle["counts"] = rle["counts"].decode("utf-8") # Necessary to serialize with json + return rle + + +def batched_mask_to_box(masks: torch.Tensor) -> torch.Tensor: + """ + Calculates boxes in XYXY format around masks. Return [0,0,0,0] for + an empty mask. For input shape C1xC2x...xHxW, the output shape is C1xC2x...x4. + """ + # torch.max below raises an error on empty inputs, just skip in this case + if torch.numel(masks) == 0: + return torch.zeros(*masks.shape[:-2], 4, device=masks.device) + + # Normalize shape to CxHxW + shape = masks.shape + h, w = shape[-2:] + if len(shape) > 2: + masks = masks.flatten(0, -3) + else: + masks = masks.unsqueeze(0) + + # Get top and bottom edges + in_height, _ = torch.max(masks, dim=-1) + in_height_coords = in_height * torch.arange(h, device=in_height.device)[None, :] + bottom_edges, _ = torch.max(in_height_coords, dim=-1) + in_height_coords = in_height_coords + h * (~in_height) + top_edges, _ = torch.min(in_height_coords, dim=-1) + + # Get left and right edges + in_width, _ = torch.max(masks, dim=-2) + in_width_coords = in_width * torch.arange(w, device=in_width.device)[None, :] + right_edges, _ = torch.max(in_width_coords, dim=-1) + in_width_coords = in_width_coords + w * (~in_width) + left_edges, _ = torch.min(in_width_coords, dim=-1) + + # If the mask is empty the right edge will be to the left of the left edge. + # Replace these boxes with [0, 0, 0, 0] + empty_filter = (right_edges < left_edges) | (bottom_edges < top_edges) + out = torch.stack([left_edges, top_edges, right_edges, bottom_edges], dim=-1) + out = out * (~empty_filter).unsqueeze(-1) + + # Return to original shape + if len(shape) > 2: + out = out.reshape(*shape[:-2], 4) + else: + out = out[0] + + return out diff --git a/segment_anything/utils/onnx.py b/segment_anything/utils/onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..3196bdf4b782e6eeb3da4ad66ef3c7b1741535fe --- /dev/null +++ b/segment_anything/utils/onnx.py @@ -0,0 +1,144 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +from torch.nn import functional as F + +from typing import Tuple + +from ..modeling import Sam +from .amg import calculate_stability_score + + +class SamOnnxModel(nn.Module): + """ + This model should not be called directly, but is used in ONNX export. + It combines the prompt encoder, mask decoder, and mask postprocessing of Sam, + with some functions modified to enable model tracing. Also supports extra + options controlling what information. See the ONNX export script for details. + """ + + def __init__( + self, + model: Sam, + return_single_mask: bool, + use_stability_score: bool = False, + return_extra_metrics: bool = False, + ) -> None: + super().__init__() + self.mask_decoder = model.mask_decoder + self.model = model + self.img_size = model.image_encoder.img_size + self.return_single_mask = return_single_mask + self.use_stability_score = use_stability_score + self.stability_score_offset = 1.0 + self.return_extra_metrics = return_extra_metrics + + @staticmethod + def resize_longest_image_size( + input_image_size: torch.Tensor, longest_side: int + ) -> torch.Tensor: + input_image_size = input_image_size.to(torch.float32) + scale = longest_side / torch.max(input_image_size) + transformed_size = scale * input_image_size + transformed_size = torch.floor(transformed_size + 0.5).to(torch.int64) + return transformed_size + + def _embed_points(self, point_coords: torch.Tensor, point_labels: torch.Tensor) -> torch.Tensor: + point_coords = point_coords + 0.5 + point_coords = point_coords / self.img_size + point_embedding = self.model.prompt_encoder.pe_layer._pe_encoding(point_coords) + point_labels = point_labels.unsqueeze(-1).expand_as(point_embedding) + + point_embedding = point_embedding * (point_labels != -1) + point_embedding = point_embedding + self.model.prompt_encoder.not_a_point_embed.weight * ( + point_labels == -1 + ) + + for i in range(self.model.prompt_encoder.num_point_embeddings): + point_embedding = point_embedding + self.model.prompt_encoder.point_embeddings[ + i + ].weight * (point_labels == i) + + return point_embedding + + def _embed_masks(self, input_mask: torch.Tensor, has_mask_input: torch.Tensor) -> torch.Tensor: + mask_embedding = has_mask_input * self.model.prompt_encoder.mask_downscaling(input_mask) + mask_embedding = mask_embedding + ( + 1 - has_mask_input + ) * self.model.prompt_encoder.no_mask_embed.weight.reshape(1, -1, 1, 1) + return mask_embedding + + def mask_postprocessing(self, masks: torch.Tensor, orig_im_size: torch.Tensor) -> torch.Tensor: + masks = F.interpolate( + masks, + size=(self.img_size, self.img_size), + mode="bilinear", + align_corners=False, + ) + + prepadded_size = self.resize_longest_image_size(orig_im_size, self.img_size).to(torch.int64) + masks = masks[..., : prepadded_size[0], : prepadded_size[1]] # type: ignore + + orig_im_size = orig_im_size.to(torch.int64) + h, w = orig_im_size[0], orig_im_size[1] + masks = F.interpolate(masks, size=(h, w), mode="bilinear", align_corners=False) + return masks + + def select_masks( + self, masks: torch.Tensor, iou_preds: torch.Tensor, num_points: int + ) -> Tuple[torch.Tensor, torch.Tensor]: + # Determine if we should return the multiclick mask or not from the number of points. + # The reweighting is used to avoid control flow. + score_reweight = torch.tensor( + [[1000] + [0] * (self.model.mask_decoder.num_mask_tokens - 1)] + ).to(iou_preds.device) + score = iou_preds + (num_points - 2.5) * score_reweight + best_idx = torch.argmax(score, dim=1) + masks = masks[torch.arange(masks.shape[0]), best_idx, :, :].unsqueeze(1) + iou_preds = iou_preds[torch.arange(masks.shape[0]), best_idx].unsqueeze(1) + + return masks, iou_preds + + @torch.no_grad() + def forward( + self, + image_embeddings: torch.Tensor, + point_coords: torch.Tensor, + point_labels: torch.Tensor, + mask_input: torch.Tensor, + has_mask_input: torch.Tensor, + orig_im_size: torch.Tensor, + ): + sparse_embedding = self._embed_points(point_coords, point_labels) + dense_embedding = self._embed_masks(mask_input, has_mask_input) + + masks, scores = self.model.mask_decoder.predict_masks( + image_embeddings=image_embeddings, + image_pe=self.model.prompt_encoder.get_dense_pe(), + sparse_prompt_embeddings=sparse_embedding, + dense_prompt_embeddings=dense_embedding, + ) + + if self.use_stability_score: + scores = calculate_stability_score( + masks, self.model.mask_threshold, self.stability_score_offset + ) + + if self.return_single_mask: + masks, scores = self.select_masks(masks, scores, point_coords.shape[1]) + + upscaled_masks = self.mask_postprocessing(masks, orig_im_size) + + if self.return_extra_metrics: + stability_scores = calculate_stability_score( + upscaled_masks, self.model.mask_threshold, self.stability_score_offset + ) + areas = (upscaled_masks > self.model.mask_threshold).sum(-1).sum(-1) + return upscaled_masks, scores, stability_scores, areas, masks + + return upscaled_masks, scores, masks diff --git a/segment_anything/utils/transforms.py b/segment_anything/utils/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..c08ba1e3db751f3a5483a003be38c69c2cf2df85 --- /dev/null +++ b/segment_anything/utils/transforms.py @@ -0,0 +1,102 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torch.nn import functional as F +from torchvision.transforms.functional import resize, to_pil_image # type: ignore + +from copy import deepcopy +from typing import Tuple + + +class ResizeLongestSide: + """ + Resizes images to the longest side 'target_length', as well as provides + methods for resizing coordinates and boxes. Provides methods for + transforming both numpy array and batched torch tensors. + """ + + def __init__(self, target_length: int) -> None: + self.target_length = target_length + + def apply_image(self, image: np.ndarray) -> np.ndarray: + """ + Expects a numpy array with shape HxWxC in uint8 format. + """ + target_size = self.get_preprocess_shape(image.shape[0], image.shape[1], self.target_length) + return np.array(resize(to_pil_image(image), target_size)) + + def apply_coords(self, coords: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray: + """ + Expects a numpy array of length 2 in the final dimension. Requires the + original image size in (H, W) format. + """ + old_h, old_w = original_size + new_h, new_w = self.get_preprocess_shape( + original_size[0], original_size[1], self.target_length + ) + coords = deepcopy(coords).astype(float) + coords[..., 0] = coords[..., 0] * (new_w / old_w) + coords[..., 1] = coords[..., 1] * (new_h / old_h) + return coords + + def apply_boxes(self, boxes: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray: + """ + Expects a numpy array shape Bx4. Requires the original image size + in (H, W) format. + """ + boxes = self.apply_coords(boxes.reshape(-1, 2, 2), original_size) + return boxes.reshape(-1, 4) + + def apply_image_torch(self, image: torch.Tensor) -> torch.Tensor: + """ + Expects batched images with shape BxCxHxW and float format. This + transformation may not exactly match apply_image. apply_image is + the transformation expected by the model. + """ + # Expects an image in BCHW format. May not exactly match apply_image. + target_size = self.get_preprocess_shape(image.shape[2], image.shape[3], self.target_length) + return F.interpolate( + image, target_size, mode="bilinear", align_corners=False, antialias=True + ) + + def apply_coords_torch( + self, coords: torch.Tensor, original_size: Tuple[int, ...] + ) -> torch.Tensor: + """ + Expects a torch tensor with length 2 in the last dimension. Requires the + original image size in (H, W) format. + """ + old_h, old_w = original_size + new_h, new_w = self.get_preprocess_shape( + original_size[0], original_size[1], self.target_length + ) + coords = deepcopy(coords).to(torch.float) + coords[..., 0] = coords[..., 0] * (new_w / old_w) + coords[..., 1] = coords[..., 1] * (new_h / old_h) + return coords + + def apply_boxes_torch( + self, boxes: torch.Tensor, original_size: Tuple[int, ...] + ) -> torch.Tensor: + """ + Expects a torch tensor with shape Bx4. Requires the original image + size in (H, W) format. + """ + boxes = self.apply_coords_torch(boxes.reshape(-1, 2, 2), original_size) + return boxes.reshape(-1, 4) + + @staticmethod + def get_preprocess_shape(oldh: int, oldw: int, long_side_length: int) -> Tuple[int, int]: + """ + Compute the output size given input size and target long side length. + """ + scale = long_side_length * 1.0 / max(oldh, oldw) + newh, neww = oldh * scale, oldw * scale + neww = int(neww + 0.5) + newh = int(newh + 0.5) + return (newh, neww) diff --git a/segmentation_2d/LVMMed_SAM_2d.py b/segmentation_2d/LVMMed_SAM_2d.py new file mode 100644 index 0000000000000000000000000000000000000000..d50880c5f66d3074b074434b314ffc1e1955ea2c --- /dev/null +++ b/segmentation_2d/LVMMed_SAM_2d.py @@ -0,0 +1,312 @@ +import numpy as np +import os +join = os.path.join +import gc +from tqdm import tqdm +import torch +import monai, random +from dataloader.sam_transforms import ResizeLongestSide +from segment_anything import ( + sam_model_registry, + our_vit +) +from dataloader.dataloader import sam_dataloader +from utils.SurfaceDice import compute_dice_coefficient + +def fit(cfg, + sam_model, + lvm_med_backbone, + train_loader, + valid_loader, + optimizer, + criterion, + model_save_path): + """ + Function to fit model + """ + + best_valid_dice = 0 + + device = cfg.base.gpu_id + num_epochs = cfg.train.num_epochs + + for epoch in range(num_epochs): + sam_model.train() + + epoch_loss = 0 + valid_dice = 0 + + print(f"Epoch #{epoch+1}/{num_epochs}") + for step, batch in enumerate(tqdm(train_loader, desc='Model training', unit='batch', leave=True)): + """ + We load preprocessed images, mask labels and bounding boxes directly computed from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + sam_model = sam_model.to(f"cuda:{device}") + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + + """ + We freeze image encoder & prompt encoder, only finetune mask decoder + """ + with torch.no_grad(): + """ + Compute image embeddings from a batch of images with our LVM Med's frozen encoder + """ + encoder = torch.nn.DataParallel(lvm_med_backbone, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bounding boxes to make segmentation prediction + We follow the work by Jun Ma & Bo Wang in Segment Anything in Medical Images (2023) + to get bounding boxes from the masks as the boxes prompt for SAM + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (true_mask.shape[-2], true_mask.shape[-1])) + box_torch = torch.as_tensor(box, dtype=torch.float, device=f"cuda:{device}") + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Encode box prompts information with SAM's frozen prompt encoder + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + We now finetune mask decoder + """ + sam_model = sam_model.to(f"cuda:{device}") + predicted_mask, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 1, 256, 256) + + predicted_mask = predicted_mask.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + loss = criterion(predicted_mask, true_mask) + + """ + Upgrade model's params + """ + optimizer.zero_grad(set_to_none=True) + loss.backward() + + clip_value = 1 # Clip gradient + torch.nn.utils.clip_grad_norm_(sam_model.mask_decoder.parameters(), clip_value) + + optimizer.step() + epoch_loss += loss.item() + + """ + Validation step with Dice as the metric + """ + with torch.no_grad(): + valid_dice = eval_dice(sam_model, + lvm_med_backbone, + valid_loader, + device=device) + + epoch_loss /= ((step + 1) * len(train_loader)) + print(f'Loss: {epoch_loss}\n---') + + """ + Save best model + """ + if best_valid_dice < valid_dice: + best_valid_dice = valid_dice + torch.save(sam_model.state_dict(), join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + + print(f"Valid dice: {valid_dice*100}") + print('=======================================') + + print(f"Best valid dice: {best_valid_dice*100}") + +#%% test +def eval_dice(sam_model, + lvm_med_backbone, + loader, + device): + + """ + Function to evaluate model (for both validation and testing phase) + """ + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + dice_score = 0. + for _, batch in enumerate(tqdm(loader, leave=False)): + """ + Load precomputed embeddings, mask labels and bounding boxes computed directly from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}", dtype=torch.float32) + + """ + Compute image embeddings + """ + encoder = torch.nn.DataParallel(lvm_med_backbone, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bboxes + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (image_embedding.shape[0], image_embedding.shape[1])) + box_torch = torch.as_tensor(box, dtype=torch.float32, device=device) + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Prompt encoder component + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + Mask decoder component + """ + sam_model = sam_model.to(f"cuda:{device}") + mask_segmentation, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 256, 256) + + """ + Transform prediction and evaluate + """ + true_mask = true_mask.to("cpu") + medsam_seg_prob = torch.sigmoid(mask_segmentation) + medsam_seg_prob = medsam_seg_prob.detach().cpu().numpy().squeeze() + medsam_seg = (medsam_seg_prob > 0.5).astype(np.uint8) # transform from hard masks to soft masks + dice_score += compute_dice_coefficient(true_mask>0, medsam_seg>0) + + return dice_score.cpu().numpy()/len(loader) + +def lvm_medsam_2d(yml_args, cfg): + """ + Training warm up + """ + torch.multiprocessing.set_start_method('spawn') + + random.seed(cfg.base.random_seed) + np.random.seed(cfg.base.random_seed) + torch.manual_seed(cfg.base.random_seed) + torch.cuda.manual_seed(cfg.base.random_seed) + + torch.backends.cudnn.deterministic = True + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + """ + General configuration + """ + img_shape = (3, 1024) # hard settings image shape as 3 x 1024 x 1024 + model_save_path = join("./work_dir", 'SAM-ViT-B') + os.makedirs(model_save_path, exist_ok=True) + + print(f"LVM-Med (encoder) + tFine-tuned SAM in {cfg.base.dataset_name} with {cfg.train.optimizer}, LR = {cfg.train.learning_rate}") + + """ + Load SAM with its original checkpoint + """ + sam_model = sam_model_registry["vit_b"](checkpoint=cfg.base.original_checkpoint) + + """ + Load precomputed embeddings + """ + train_loader, valid_loader, test_loader, _, _ = sam_dataloader(cfg) + + """ + Optimizer & learning rate scheduler config + """ + if cfg.train.optimizer == 'sgd': + optimizer = torch.optim.SGD(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + momentum=0.9) + elif cfg.train.optimizer == 'adam': + optimizer = torch.optim.Adam(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0, + amsgrad=True) + elif cfg.train.optimizer == 'adamw': + optimizer = torch.optim.AdamW(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0) + else: + raise NotImplementedError(f"Optimizer {cfg.train.optimizer} is not set up yet") + + """ + Loss function + In this work, we use a combination of Dice and Cross Entropy Loss to measure SAM's loss values + """ + criterion = monai.losses.DiceCELoss(sigmoid=True, + squared_pred=True, + reduction='mean') + + """ + Declare LVM Med backbone instead of using SAM's backbone + """ + arch = 'vit_encoder_b' + lvm_med_backbone = our_vit.__dict__[arch]() + lvm_weight = torch.load(yml_args.lvm_med_encoder_path, map_location ='cpu') + lvm_med_backbone.load_state_dict(lvm_weight) + + """ + Train model + """ + if not yml_args.use_test_mode: + fit(cfg, + sam_model=sam_model, + lvm_med_backbone=lvm_med_backbone, + train_loader=train_loader, + valid_loader=valid_loader, + optimizer=optimizer, + criterion=criterion, + model_save_path=model_save_path) + + """ + Test model + """ + with torch.no_grad(): + sam_model_test_dice = sam_model_registry["vit_b"](checkpoint=join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + + sam_model_test_dice.eval() + test_dice_score = eval_dice(sam_model_test_dice, + lvm_med_backbone, + test_loader, + device=cfg.base.gpu_id) + print(f"Test dice score after training with {cfg.train.optimizer}(lr = {cfg.train.learning_rate}): {test_dice_score*100}") \ No newline at end of file diff --git a/segmentation_2d/MedSAM_2d.py b/segmentation_2d/MedSAM_2d.py new file mode 100644 index 0000000000000000000000000000000000000000..d7b59a71fb1a3b4703c76f7e6f0d40d859cda100 --- /dev/null +++ b/segmentation_2d/MedSAM_2d.py @@ -0,0 +1,298 @@ +import numpy as np +import os +join = os.path.join +import gc +from tqdm import tqdm +import torch +import monai, random +from dataloader.sam_transforms import ResizeLongestSide +from segment_anything import sam_model_registry +from dataloader.dataloader import sam_dataloader +from utils.SurfaceDice import compute_dice_coefficient + +def fit(cfg, + sam_model, + train_loader, + valid_loader, + optimizer, + criterion, + model_save_path): + """ + Function to fit model + """ + + best_valid_dice = 0 + + device = cfg.base.gpu_id + num_epochs = cfg.train.num_epochs + + for epoch in range(num_epochs): + sam_model.train() + + epoch_loss = 0 + valid_dice = 0 + + print(f"Epoch #{epoch+1}/{num_epochs}") + for step, batch in enumerate(tqdm(train_loader, desc='Model training', unit='batch', leave=True)): + + """ + Load precomputed image embeddings to ease training process + We also load mask labels and bounding boxes directly computed from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + sam_model = sam_model.to(f"cuda:{device}") + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + + """ + We freeze image encoder & prompt encoder, only finetune mask decoder + """ + with torch.no_grad(): + """ + Compute image embeddings from a batch of images with SAM's frozen encoder + """ + encoder = torch.nn.DataParallel(sam_model.image_encoder, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bounding boxes to make segmentation prediction + We follow the work by Jun Ma & Bo Wang in Segment Anything in Medical Images (2023) + to get bounding boxes from the masks as the boxes prompt for SAM + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (true_mask.shape[-2], true_mask.shape[-1])) + box_torch = torch.as_tensor(box, dtype=torch.float, device=f"cuda:{device}") + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Encode box prompts information with SAM's frozen prompt encoder + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + We now finetune mask decoder + """ + sam_model = sam_model.to(f"cuda:{device}") + predicted_mask, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 1, 256, 256) + + predicted_mask = predicted_mask.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + loss = criterion(predicted_mask, true_mask) + + """ + Upgrade model's params + """ + optimizer.zero_grad(set_to_none=True) + loss.backward() + + clip_value = 1 # Clip gradient + torch.nn.utils.clip_grad_norm_(sam_model.mask_decoder.parameters(), clip_value) + + optimizer.step() + epoch_loss += loss.item() + + """ + Validation step with Dice as the metric + """ + with torch.no_grad(): + valid_dice = eval_dice(sam_model, + valid_loader, + device=device) + + epoch_loss /= ((step + 1) * len(train_loader)) + print(f'Loss: {epoch_loss}\n---') + + """ + Save best model + """ + if best_valid_dice < valid_dice: + best_valid_dice = valid_dice + torch.save(sam_model.state_dict(), join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + + print(f"Valid dice: {valid_dice*100}") + print('=======================================') + + print(f"Best valid dice: {best_valid_dice*100}") + +#%% test +def eval_dice(sam_model, + loader, + device): + + """ + Function to evaluate model (for both validation and testing phase) + """ + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + dice_score = 0. + for _, batch in enumerate(tqdm(loader, leave=False)): + """ + Load precomputed embeddings, mask labels and bounding boxes computed directly from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}", dtype=torch.float32) + + """ + Compute image embeddings + """ + encoder = torch.nn.DataParallel(sam_model.image_encoder, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bboxes + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (image_embedding.shape[0], image_embedding.shape[1])) + box_torch = torch.as_tensor(box, dtype=torch.float32, device=device) + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Prompt encoder component + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + Mask decoder component + """ + sam_model = sam_model.to(f"cuda:{device}") + mask_segmentation, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 256, 256) + + """ + Transform prediction and evaluate + """ + true_mask = true_mask.to("cpu") + medsam_seg_prob = torch.sigmoid(mask_segmentation) + medsam_seg_prob = medsam_seg_prob.detach().cpu().numpy().squeeze() + medsam_seg = (medsam_seg_prob > 0.5).astype(np.uint8) # transform from hard masks to soft masks + dice_score += compute_dice_coefficient(true_mask>0, medsam_seg>0) + + return dice_score.cpu().numpy()/len(loader) + +def medsam_2d(yml_args, cfg): + """ + Training warm up + """ + torch.multiprocessing.set_start_method('spawn') + + random.seed(cfg.base.random_seed) + np.random.seed(cfg.base.random_seed) + torch.manual_seed(cfg.base.random_seed) + torch.cuda.manual_seed(cfg.base.random_seed) + + torch.backends.cudnn.deterministic = True + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + """ + General configuration + """ + img_shape = (3, 1024) # hard settings image shape as 3 x 1024 x 1024 + model_save_path = join("./work_dir", 'SAM-ViT-B') + os.makedirs(model_save_path, exist_ok=True) + + print(f"Fine-tuned SAM in {cfg.base.dataset_name} with {cfg.train.optimizer}, LR = {cfg.train.learning_rate}") + + """ + Load SAM with its original checkpoint + """ + sam_model = sam_model_registry["vit_b"](checkpoint=cfg.base.original_checkpoint) + + """ + Load precomputed embeddings + """ + train_loader, valid_loader, test_loader, _, _ = sam_dataloader(cfg) + + """ + Optimizer & learning rate scheduler config + """ + if cfg.train.optimizer == 'sgd': + optimizer = torch.optim.SGD(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + momentum=0.9) + elif cfg.train.optimizer == 'adam': + optimizer = torch.optim.Adam(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0, + amsgrad=True) + elif cfg.train.optimizer == 'adamw': + optimizer = torch.optim.AdamW(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0) + else: + raise NotImplementedError(f"Optimizer {cfg.train.optimizer} is not set up yet") + + """ + Loss function + In this work, we use a combination of Dice and Cross Entropy Loss to measure SAM's loss values + """ + criterion = monai.losses.DiceCELoss(sigmoid=True, + squared_pred=True, + reduction='mean') + + """ + Train model + """ + if not yml_args.use_test_mode: + fit(cfg, + sam_model=sam_model, + train_loader=train_loader, + valid_loader=valid_loader, + optimizer=optimizer, + criterion=criterion, + model_save_path=model_save_path) + + """ + Test model + """ + with torch.no_grad(): + sam_model_test_dice = sam_model_registry["vit_b"](checkpoint=join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + + sam_model_test_dice.eval() + test_dice_score = eval_dice(sam_model_test_dice, + test_loader, + device=cfg.base.gpu_id) + print(f"Test dice score after training with {cfg.train.optimizer}(lr = {cfg.train.learning_rate}): {test_dice_score*100}") \ No newline at end of file diff --git a/segmentation_2d/train_R50_seg_adam_optimizer_2d.py b/segmentation_2d/train_R50_seg_adam_optimizer_2d.py new file mode 100644 index 0000000000000000000000000000000000000000..49700cd0b5a352df05618b9f2f076fefbb0b468c --- /dev/null +++ b/segmentation_2d/train_R50_seg_adam_optimizer_2d.py @@ -0,0 +1,278 @@ +import argparse +import logging +import sys +from pathlib import Path +import torch +import torch.nn as nn +import torch.nn.functional as F +import wandb +from torch import optim +from torch.utils.data import DataLoader, random_split +from tqdm import tqdm +from torch.optim.lr_scheduler import ExponentialLR +import os +from dataloader.dataset_ete import SegmentationDataset_train, SegmentationDataset +from utils.endtoend import dice_loss +from utils.func import ( + parse_config, + load_config +) +from evaluate import evaluate, evaluate_3d_iou +#from models.segmentation import UNet +import segmentation_models_pytorch as smp +import numpy as np +import random +num_classes = 2 +np.random.seed(42) +random.seed(42) +torch.manual_seed(42) + +def train_net(net, + cfg, + trial, + device, + epochs: int = 30, + train_batch_size: int = 128, + val_batch_size: int = 128, + learning_rate: float = 0.1, + val_percent: float = 0.1, + save_checkpoint: bool = True, + img_scale = (224, 224), + amp: bool = True, + out_dir : str= './checkpoint/'): + + # 1. Create dataset + train_dir_img = Path(cfg.dataloader.train_dir_img) + train_dir_mask = Path(cfg.dataloader.train_dir_mask) + val_dir_img = Path(cfg.dataloader.valid_dir_img) + val_dir_mask = Path(cfg.dataloader.valid_dir_mask) + test_dir_img = Path(cfg.dataloader.test_dir_img) + test_dir_mask = Path(cfg.dataloader.test_dir_mask) + non_label_text = cfg.dataloader.non_label + have_label_text = cfg.dataloader.have_label + + dir_checkpoint = Path(out_dir) + Path(dir_checkpoint).mkdir(parents=True, exist_ok=True) + + train_dataset = SegmentationDataset_train(nonlabel_path= non_label_text, havelabel_path= have_label_text, dataset = cfg.base.dataset_name, scale= img_scale) + val_dataset = SegmentationDataset(name_dataset=cfg.base.dataset_name, images_dir = val_dir_img, masks_dir = val_dir_mask, scale = img_scale) + + test_dataset = SegmentationDataset(name_dataset=cfg.base.dataset_name, images_dir = test_dir_img, masks_dir= test_dir_mask, scale = img_scale) + + n_train = len(train_dataset) + n_val = len(val_dataset) + + # 3. Create data loaders + loader_args = dict(num_workers=10, pin_memory=True) + train_loader = DataLoader(train_dataset, shuffle=True, batch_size=train_batch_size, **loader_args) + import time + + val_loader = DataLoader(val_dataset, shuffle=False, drop_last=True, batch_size=val_batch_size, **loader_args) + test_loader = DataLoader(test_dataset, shuffle=False, drop_last=True, **loader_args) + + experiment = wandb.init(project='U-Net', resume='allow', anonymous='must') + experiment.config.update(dict(epochs=epochs, train_batch_size=train_batch_size, val_batch_size=val_batch_size, learning_rate=learning_rate, + val_percent=val_percent, save_checkpoint=save_checkpoint, img_scale=img_scale, + amp=amp)) + + logging.info(f'''Starting training: + Epochs: {epochs} + Train batch size: {train_batch_size} + Val batch size: {val_batch_size} + Learning rate: {learning_rate} + Training size: {n_train} + Validation size: {n_val} + Checkpoints: {save_checkpoint} + Device: {device.type} + Images scaling: {img_scale} + Mixed Precision: {amp} + ''') + + # 4. Set up the optimizer, the loss, the learning rate scheduler and the loss scaling for AMP + + # optimizer = optim.RMSprop(net.parameters(), lr=learning_rate, weight_decay=1e-8, momentum=0.9) + optimizer = optim.Adam(net.parameters(), lr=learning_rate, betas=(cfg.train.beta1, cfg.train.beta2), eps=1e-08, weight_decay=cfg.train.weight_decay) + if cfg.train.scheduler: + print("Use scheduler") + scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=10, eta_min=1e-05) + # optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9, weight_decay=1e-8) + # scheduler = ExponentialLR(optimizer, gamma=1.11) + # optimizer= optim.Adam(net.parameters(), lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False) + + # scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'max', patience=2) # goal: maximize Dice score + grad_scaler = torch.cuda.amp.GradScaler(enabled=amp) + criterion = nn.CrossEntropyLoss() + global_step = 0 + best_value = 0 + # 5. Begin training + for epoch in range(epochs): + net.train() + epoch_loss = 0 + with tqdm(total=n_train, desc=f'Epoch {epoch + 1}/{epochs}', unit='img') as pbar: + for batch in train_loader: + images = batch['image'] + true_masks = batch['mask_ete'] + + + images = images.to(device=device, dtype=torch.float32) + true_masks = true_masks.to(device=device, dtype=torch.long) + + with torch.cuda.amp.autocast(enabled=amp): + masks_pred = net(images) + loss = criterion(masks_pred, true_masks) \ + + dice_loss(F.softmax(masks_pred, dim=1).float(), + F.one_hot(true_masks, num_classes).permute(0, 3, 1, 2).float(), + multiclass=True) + + optimizer.zero_grad(set_to_none=True) + grad_scaler.scale(loss).backward() + clip_value = 1 + torch.nn.utils.clip_grad_norm_(net.parameters(), clip_value) + grad_scaler.step(optimizer) + grad_scaler.update() + + pbar.update(images.shape[0]) + global_step += 1 + epoch_loss += loss.item() + experiment.log({ + 'train loss': loss.item(), + 'step': global_step, + 'epoch': epoch + }) + pbar.set_postfix(**{'loss (batch)': loss.item()}) + + if cfg.train.scheduler: + scheduler.step() + # Evaluation round + if global_step % (n_train // (1 * train_batch_size)) == 0: + val_dice_score, val_iou_score = evaluate(net, val_loader, device, 1) + val_score = val_dice_score + + if (val_score > best_value): + best_value = val_score + logging.info("New best dice score: {} at epochs {}".format(best_value, epoch+1)) + torch.save(net.state_dict(), str(dir_checkpoint/'checkpoint_{}_{}_best_{}.pth'.format(cfg.base.dataset_name, cfg.base.original_checkpoint, str(trial)))) + + logging.info('Validation Dice score: {}, IoU score {}'.format(val_dice_score, val_iou_score)) + + if epoch + 1 == epochs: + val_dice_score, val_iou_score = evaluate(net, val_loader, device, 1) + logging.info('Validation Dice score: {}, IoU score {}'.format(val_dice_score, val_iou_score)) + + if save_checkpoint: + torch.save(net.state_dict(), str(dir_checkpoint / 'checkpoint_epoch{}.pth'.format(epoch + 1))) + logging.info(f'Checkpoint {epoch + 1} saved!') + + if epoch > 0 and epoch != (epochs % 2 - 1) : + os.remove( str(dir_checkpoint/'checkpoint_epoch{}.pth'.format(epoch))) + logging.info("Evalutating on test set") + logging.info("Loading best model on validation") + net.load_state_dict(torch.load(str(dir_checkpoint/'checkpoint_{}_{}_best_{}.pth'.format(cfg.base.dataset_name, cfg.base.original_checkpoint, str(trial))))) + test_dice, test_iou = evaluate(net, test_loader, device, 1) + + logging.info("Test dice score {}, IoU score {}".format(test_dice, test_iou)) + + logging.info("Loading model at last epochs %d" %epochs) + net.load_state_dict(torch.load(str(dir_checkpoint/'checkpoint_epoch{}.pth'.format(epochs)))) + test_dice_last, test_iou_last = evaluate(net, test_loader, device, 1) + logging.info("Test dice score {}, IoU score {}".format(test_dice_last, test_iou_last)) + + return test_dice, test_iou, test_dice_last, test_iou_last + +def eval(cfg, out_dir, net, device, img_scale, trial): + test_dir_img = Path(cfg.dataloader.test_dir_img) + test_dir_mask = Path(cfg.dataloader.test_dir_mask) + test_dataset = SegmentationDataset(name_dataset=cfg.base.dataset_name, images_dir = test_dir_img, masks_dir= test_dir_mask, scale = img_scale) + loader_args = dict(num_workers=10, pin_memory=True) + test_loader = DataLoader(test_dataset, shuffle=False, drop_last=True, **loader_args) + dir_checkpoint = Path(out_dir) + + print("Trial", trial+1) + logging.info("Evalutating on test set") + logging.info("Loading best model on validation") + net.load_state_dict(torch.load(str(dir_checkpoint/'checkpoint_{}_{}_best_{}.pth'.format(cfg.base.dataset_name, cfg.base.original_checkpoint, str(trial))))) + test_dice, test_iou = evaluate(net, test_loader, device, 1) + logging.info("Test dice score {}, IoU score {}".format(test_dice, test_iou)) + return test_dice, test_iou + +#if __name__ == '__main__': +def train_2d_R50(yml_args, cfg): + + logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + cuda_string = 'cuda:' + cfg.base.gpu_id + device = torch.device(cuda_string if torch.cuda.is_available() else 'cpu') + logging.info(f'Using device {device}') + + + # Change here to adapt to your data + # n_channels=3 for RGB images + # n_classes is the number of probabilities you want to get per pixel + + try: + _2d_dices = [] + _2d_ious = [] + _2d_dices_last = [] + _2d_ious_last = [] + + if not yml_args.use_test_mode: + for trial in range(3): + print ("----"*3) + if cfg.base.original_checkpoint == "scratch": + net = smp.Unet(encoder_name="resnet50", encoder_weights=None, in_channels=3, classes=num_classes) + else: + print ("Using pre-trained models from", cfg.base.original_checkpoint) + net = smp.Unet(encoder_name="resnet50", encoder_weights=cfg.base.original_checkpoint, + in_channels=3, classes=num_classes) + + + net.to(device=device) + + print("Trial", trial + 1) + _2d_dice, _2d_iou, _2d_dice_last, _2d_iou_last = train_net(net=net, cfg=cfg, trial=trial, + epochs=cfg.train.num_epochs, + train_batch_size=cfg.train.train_batch_size, + val_batch_size=cfg.train.valid_batch_size, + learning_rate=cfg.train.learning_rate, + device=device, + val_percent=10.0 / 100, + img_scale = (cfg.base.image_shape, cfg.base.image_shape), + amp=False, + out_dir= cfg.base.best_valid_model_checkpoint) + _2d_dices.append(_2d_dice.item()) + _2d_ious.append(_2d_iou.item()) + _2d_dices_last.append(_2d_dice_last.item()) + _2d_ious_last.append(_2d_iou_last.item()) + + print ("Average performance on best valid set") + print("2d dice {}, mean {}, std {}".format(_2d_dices, np.mean(_2d_dices), np.std(_2d_dices))) + print("2d iou {}, mean {}, std {}".format(_2d_ious, np.mean(_2d_ious), np.std(_2d_ious))) + + + print ("Average performance on the last epoch") + print("2d dice {}, mean {}, std {}".format(_2d_dices_last, np.mean(_2d_dices_last), np.std(_2d_dices_last))) + print("2d iou {}, mean {}, std {}".format(_2d_ious_last, np.mean(_2d_ious_last), np.std(_2d_ious_last))) + + else: + for trial in range(3): + print ("----"*3) + if cfg.base.original_checkpoint == "scratch": + net = smp.Unet(encoder_name="resnet50", encoder_weights=None, in_channels=3, classes=num_classes) + else: + print ("Using pre-trained models from", cfg.base.original_checkpoint) + net = smp.Unet(encoder_name="resnet50", encoder_weights=cfg.base.original_checkpoint ,in_channels=3, + classes=num_classes) + + + net.to(device=device) + _2d_dice, _2d_iou = eval(cfg = cfg, out_dir = cfg.base.best_valid_model_checkpoint, net = net, device = device, + img_scale = (cfg.base.image_shape, cfg.base.image_shape), trial=trial) + _2d_dices.append(_2d_dice.item()) + _2d_ious.append(_2d_iou.item()) + print ("Average performance on best valid set") + print("2d dice {}, mean {}, std {}".format(_2d_dices, np.mean(_2d_dices), np.std(_2d_dices))) + print("2d iou {}, mean {}, std {}".format(_2d_ious, np.mean(_2d_ious), np.std(_2d_ious))) + + except KeyboardInterrupt: + torch.save(net.state_dict(), 'INTERRUPTED.pth') + logging.info('Saved interrupt') + sys.exit(0) \ No newline at end of file diff --git a/segmentation_2d/zero_shot_LVMMed_SAM_2d.py b/segmentation_2d/zero_shot_LVMMed_SAM_2d.py new file mode 100644 index 0000000000000000000000000000000000000000..51847b834a340e67f089827215b2b491073825c2 --- /dev/null +++ b/segmentation_2d/zero_shot_LVMMed_SAM_2d.py @@ -0,0 +1,143 @@ +import numpy as np +import os +join = os.path.join +import gc +from tqdm import tqdm +import torch +import monai, random +from dataloader.sam_transforms import ResizeLongestSide +from segment_anything import ( + sam_model_registry, + our_vit +) +from dataloader.dataloader import sam_dataloader +from utils.SurfaceDice import compute_dice_coefficient + +#%% test +def eval_dice(sam_model, + lvm_med_encoder_path, + loader, + device): + + """ + Function to evaluate model (for both validation and testing phase) + """ + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + """ + Declare LVM Med backbone instead of using SAM's backbone + """ + arch = 'vit_encoder_b' + lvm_med_backbone = our_vit.__dict__[arch]() + lvm_weight = torch.load(lvm_med_encoder_path, map_location ='cpu') + lvm_med_backbone.load_state_dict(lvm_weight) + + dice_score = 0. + for _, batch in enumerate(tqdm(loader, leave=False)): + """ + Load precomputed embeddings, mask labels and bounding boxes computed directly from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}", dtype=torch.float32) + + """ + Compute image embeddings + """ + encoder = torch.nn.DataParallel(lvm_med_backbone, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bboxes + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (image_embedding.shape[0], image_embedding.shape[1])) + box_torch = torch.as_tensor(box, dtype=torch.float32, device=device) + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Prompt encoder component + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + Mask decoder component + """ + sam_model = sam_model.to(f"cuda:{device}") + mask_segmentation, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 256, 256) + + """ + Transform prediction and evaluate + """ + true_mask = true_mask.to("cpu") + medsam_seg_prob = torch.sigmoid(mask_segmentation) + medsam_seg_prob = medsam_seg_prob.detach().cpu().numpy().squeeze() + medsam_seg = (medsam_seg_prob > 0.5).astype(np.uint8) # transform from hard masks to soft masks + dice_score += compute_dice_coefficient(true_mask>0, medsam_seg>0) + + return dice_score.cpu().numpy()/len(loader) + +def zero_shot_lvmmed_sam_2d(yml_args, cfg): + """ + Training warm up + """ + torch.multiprocessing.set_start_method('spawn') + + random.seed(cfg.base.random_seed) + np.random.seed(cfg.base.random_seed) + torch.manual_seed(cfg.base.random_seed) + torch.cuda.manual_seed(cfg.base.random_seed) + + torch.backends.cudnn.deterministic = True + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + """ + General configuration + """ + img_shape = (3, 1024) # hard settings image shape as 3 x 1024 x 1024 + + """ + Load SAM with its original checkpoint + """ + sam_model = sam_model_registry["vit_b"](checkpoint=cfg.base.original_checkpoint) + + """ + Load precomputed embeddings + """ + _, _, test_loader, _, _ = sam_dataloader(cfg) + + """ + Test model + """ + with torch.no_grad(): + sam_model.eval() + test_dice_score = eval_dice(sam_model, + lvm_med_encoder_path=yml_args.lvm_med_encoder_path, + loader=test_loader, + device=cfg.base.gpu_id) + print(f"Dice score from zero-shot SAM: {test_dice_score*100}") \ No newline at end of file diff --git a/segmentation_2d/zero_shot_SAM_2d.py b/segmentation_2d/zero_shot_SAM_2d.py new file mode 100644 index 0000000000000000000000000000000000000000..83bce33cb140d0e630f88c8d6a90767c11ce9058 --- /dev/null +++ b/segmentation_2d/zero_shot_SAM_2d.py @@ -0,0 +1,130 @@ +import numpy as np +import os +join = os.path.join +import gc +from tqdm import tqdm +import torch +import monai, random +from dataloader.sam_transforms import ResizeLongestSide +from segment_anything import sam_model_registry +from dataloader.dataloader import sam_dataloader +from utils.SurfaceDice import compute_dice_coefficient + +#%% test +def eval_dice(sam_model, + loader, + device): + + """ + Function to evaluate model (for both validation and testing phase) + """ + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + dice_score = 0. + for _, batch in enumerate(tqdm(loader, leave=False)): + """ + Load precomputed embeddings, mask labels and bounding boxes computed directly from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}", dtype=torch.float32) + + """ + Compute image embeddings + """ + encoder = torch.nn.DataParallel(sam_model.image_encoder, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bboxes + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (image_embedding.shape[0], image_embedding.shape[1])) + box_torch = torch.as_tensor(box, dtype=torch.float32, device=device) + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Prompt encoder component + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + Mask decoder component + """ + sam_model = sam_model.to(f"cuda:{device}") + mask_segmentation, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 256, 256) + + """ + Transform prediction and evaluate + """ + true_mask = true_mask.to("cpu") + medsam_seg_prob = torch.sigmoid(mask_segmentation) + medsam_seg_prob = medsam_seg_prob.detach().cpu().numpy().squeeze() + medsam_seg = (medsam_seg_prob > 0.5).astype(np.uint8) # transform from hard masks to soft masks + dice_score += compute_dice_coefficient(true_mask>0, medsam_seg>0) + + return dice_score.cpu().numpy()/len(loader) + +def zero_shot_sam_2d(yml_args, cfg): + """ + Training warm up + """ + torch.multiprocessing.set_start_method('spawn') + + random.seed(cfg.base.random_seed) + np.random.seed(cfg.base.random_seed) + torch.manual_seed(cfg.base.random_seed) + torch.cuda.manual_seed(cfg.base.random_seed) + + torch.backends.cudnn.deterministic = True + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + """ + General configuration + """ + img_shape = (3, 1024) # hard settings image shape as 3 x 1024 x 1024 + + """ + Load SAM with its original checkpoint + """ + sam_model = sam_model_registry["vit_b"](checkpoint=cfg.base.original_checkpoint) + + """ + Load precomputed embeddings + """ + _, _, test_loader, _, _ = sam_dataloader(cfg) + + """ + Test model + """ + with torch.no_grad(): + sam_model.eval() + test_dice_score = eval_dice(sam_model, + test_loader, + device=cfg.base.gpu_id) + print(f"Dice score from zero-shot SAM: {test_dice_score*100}") \ No newline at end of file diff --git a/segmentation_3d/LVMMed_SAM_3d.py b/segmentation_3d/LVMMed_SAM_3d.py new file mode 100644 index 0000000000000000000000000000000000000000..a780ecbad5e69809cbc5f177250b271751ec2d6e --- /dev/null +++ b/segmentation_3d/LVMMed_SAM_3d.py @@ -0,0 +1,311 @@ +import numpy as np +import os +join = os.path.join +import gc +from tqdm import tqdm +import torch +import monai, random +from segment_anything import ( + sam_model_registry, + our_vit +) +from dataloader.sam_transforms import ResizeLongestSide +from dataloader.dataloader import sam_dataloader +from utils.SurfaceDice import multiclass_iou + +def fit(cfg, + sam_model, + lvm_med_backbone, + train_loader, + valid_dataset, + optimizer, + criterion, + model_save_path): + """ + Function to fit model + """ + + best_valid_iou3d = 0 + + device = cfg.base.gpu_id + num_epochs = cfg.train.num_epochs + + for epoch in range(num_epochs): + sam_model.train() + + epoch_loss = 0 + valid_iou3d = 0 + + print(f"Epoch #{epoch+1}/{num_epochs}") + for step, batch in enumerate(tqdm(train_loader, desc='Model training', unit='batch', leave=True)): + + """ + We load preprocessed images, mask labels and bounding boxes directly computed from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + sam_model = sam_model.to(f"cuda:{device}") + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + + """ + We freeze image encoder & prompt encoder, only finetune mask decoder + """ + with torch.no_grad(): + """ + Compute image embeddings from a batch of images with our LVM Med's frozen encoder + """ + encoder = torch.nn.DataParallel(lvm_med_backbone, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bounding boxes to make segmentation prediction + We follow the work by Jun Ma & Bo Wang in Segment Anything in Medical Images (2023) + to get bounding boxes from the masks as the boxes prompt for SAM + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (true_mask.shape[-2], true_mask.shape[-1])) + box_torch = torch.as_tensor(box, dtype=torch.float, device=f"cuda:{device}") + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Encode box prompts information with SAM's frozen prompt encoder + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + We now finetune mask decoder + """ + sam_model = sam_model.to(f"cuda:{device}") + predicted_mask, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 1, 256, 256) + + predicted_mask = predicted_mask.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + loss = criterion(predicted_mask, true_mask) + + """ + Upgrade model's params + """ + optimizer.zero_grad(set_to_none=True) + loss.backward() + + clip_value = 1 # Clip gradient + torch.nn.utils.clip_grad_norm_(sam_model.mask_decoder.parameters(), clip_value) + + optimizer.step() + epoch_loss += loss.item() + + """ + Validation step with IoU as the metric + """ + with torch.no_grad(): + valid_iou3d = eval_iou(sam_model, + lvm_med_backbone, + valid_dataset, + device=device) + + epoch_loss /= ((step + 1) * len(train_loader)) + print(f'Loss: {epoch_loss}\n---') + + """ + Save best model + """ + if best_valid_iou3d < valid_iou3d: + best_valid_iou3d = valid_iou3d + torch.save(sam_model.state_dict(), join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + + print(f"Valid 3D IoU: {valid_iou3d*100}") + print('=======================================') + + print(f"Best valid 3D IoU: {best_valid_iou3d*100}") + +def eval_iou(sam_model, + lvm_med_backbone, + loader, + device): + """ + We use IoU to evalute 3D samples. + + For 3D evaluation, we first concatenate 2D slices into 1 unified 3D volume and pass into model + However, due to limited computational resources, we could not perform 3D evaluation in GPU. + Hence, I set up to perform this function completely on CPU. + If you have enough resources, you could evaluate on multi-gpu the same as in training function. + """ + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + iou_score = 0 + num_volume = 0 + for _, batch in enumerate(tqdm(loader.get_3d_iter(), leave=False)): + """ + Load precomputed embeddings, mask labels and bounding boxes computed directly from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + image = image.to(f"cpu") + lvm_med_backbone = lvm_med_backbone.to("cpu") + true_mask = true_mask.to(f"cpu", dtype=torch.float32) + + """ + Compute image embeddings + """ + sam_model = sam_model.to(f"cpu") + image = image.to(f"cpu") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = lvm_med_backbone(image) + + """ + Get bboxes + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (image_embedding.shape[0], image_embedding.shape[1])) + box_torch = torch.as_tensor(box, dtype=torch.float32, device=device) + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Prompt encoder component + """ + box_torch = box_torch.to(f"cpu") + sparse_embeddings, dense_embeddings = sam_model.prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + Mask decoder component + """ + sam_model = sam_model.to(f"cpu") + mask_segmentation, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cpu"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 256, 256) + + """ + Transform prediction and evaluate + """ + true_mask = true_mask.to("cpu") + medsam_seg_prob = torch.sigmoid(mask_segmentation) + medsam_seg = (medsam_seg_prob > 0.5).to(dtype=torch.float32) + iou_score += multiclass_iou((true_mask>0).to(dtype=torch.float32), (medsam_seg>0).to(dtype=torch.float32)) + num_volume += 1 + return iou_score.cpu().numpy()/num_volume + +def lvm_medsam_3d(yml_args, cfg): + """ + Training warm up + """ + torch.multiprocessing.set_start_method('spawn') + + random.seed(cfg.base.random_seed) + np.random.seed(cfg.base.random_seed) + torch.manual_seed(cfg.base.random_seed) + torch.cuda.manual_seed(cfg.base.random_seed) + + torch.backends.cudnn.deterministic = True + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + """ + General configuration + """ + img_shape = (3, 1024) # hard settings image shape as 3 x 1024 x 1024 + model_save_path = join("./work_dir", 'SAM-ViT-B') + os.makedirs(model_save_path, exist_ok=True) + + print(f"Fine-tuned SAM (3D IoU) in {cfg.base.dataset_name} with {cfg.train.optimizer}, LR = {cfg.train.learning_rate}") + + """ + Load SAM with its original checkpoint + """ + sam_model = sam_model_registry["vit_b"](checkpoint=cfg.base.original_checkpoint) + + """ + Load precomputed embeddings + """ + train_loader, _, _, valid_dataset, test_dataset = sam_dataloader(cfg) + + """ + Optimizer & learning rate scheduler config + """ + if cfg.train.optimizer == 'sgd': + optimizer = torch.optim.SGD(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + momentum=0.9) + elif cfg.train.optimizer == 'adam': + optimizer = torch.optim.Adam(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0, + amsgrad=True) + elif cfg.train.optimizer == 'adamw': + optimizer = torch.optim.AdamW(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0) + else: + raise NotImplementedError(f"Optimizer {cfg.train.optimizer} is not set up yet") + + """ + Loss function + In this work, we use a combination of Dice and Cross Entropy Loss to measure SAM's loss values. + """ + criterion = monai.losses.DiceCELoss(sigmoid=True, + squared_pred=True, + reduction='mean') + + """ + Declare LVM Med backbone instead of using SAM's backbone + """ + arch = 'vit_encoder_b' + lvm_med_backbone = our_vit.__dict__[arch]() + lvm_weight = torch.load(yml_args.lvm_med_encoder_path, map_location ='cpu') + lvm_med_backbone.load_state_dict(lvm_weight) + + """ + Train model + """ + if not yml_args.use_test_mode: + fit(cfg, + sam_model=sam_model, + lvm_med_backbone=lvm_med_backbone, + train_loader=train_loader, + valid_loader=valid_dataset, + optimizer=optimizer, + criterion=criterion, + model_save_path=model_save_path) + + """ + Test model + """ + with torch.no_grad(): + sam_model_test_iou = sam_model_registry["vit_b"](checkpoint=join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + sam_model_test_iou.eval() + test_iou_score = eval_iou(sam_model_test_iou, + lvm_med_backbone, + test_dataset, + device=cfg.base.gpu_id) + print(f"Test 3D IoU score after training with {cfg.train.optimizer}(lr = {cfg.train.learning_rate}): {test_iou_score *100}") \ No newline at end of file diff --git a/segmentation_3d/MedSAM_3d.py b/segmentation_3d/MedSAM_3d.py new file mode 100644 index 0000000000000000000000000000000000000000..c79aada4f47ec541c03e916a7144d6c556e25b88 --- /dev/null +++ b/segmentation_3d/MedSAM_3d.py @@ -0,0 +1,296 @@ +import numpy as np +import os +join = os.path.join +import gc +from tqdm import tqdm +import torch +import monai, random +from segment_anything import sam_model_registry +from dataloader.sam_transforms import ResizeLongestSide +from dataloader.dataloader import sam_dataloader +from utils.SurfaceDice import multiclass_iou + + +def fit(cfg, + sam_model, + train_loader, + valid_dataset, + optimizer, + criterion, + model_save_path): + """ + Function to fit model + """ + + best_valid_iou3d = 0 + + device = cfg.base.gpu_id + num_epochs = cfg.train.num_epochs + + for epoch in range(num_epochs): + sam_model.train() + + epoch_loss = 0 + valid_iou3d = 0 + + print(f"Epoch #{epoch+1}/{num_epochs}") + for step, batch in enumerate(tqdm(train_loader, desc='Model training', unit='batch', leave=True)): + + """ + Load precomputed image embeddings to ease training process + We also load mask labels and bounding boxes directly computed from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + sam_model = sam_model.to(f"cuda:{device}") + image = image.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + + """ + We freeze image encoder & prompt encoder, only finetune mask decoder + """ + with torch.no_grad(): + """ + Compute image embeddings from a batch of images with SAM's frozen encoder + """ + encoder = torch.nn.DataParallel(sam_model.image_encoder, device_ids=[3, 2, 1, 0], output_device=device) + encoder = encoder.to(f"cuda:{encoder.device_ids[0]}") + sam_model = sam_model.to(f"cuda:{encoder.device_ids[0]}") + image = image.to(f"cuda:{encoder.device_ids[0]}") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = encoder(image) + + """ + Get bounding boxes to make segmentation prediction + We follow the work by Jun Ma & Bo Wang in Segment Anything in Medical Images (2023) + to get bounding boxes from the masks as the boxes prompt for SAM + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (true_mask.shape[-2], true_mask.shape[-1])) + box_torch = torch.as_tensor(box, dtype=torch.float, device=f"cuda:{device}") + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Encode box prompts information with SAM's frozen prompt encoder + """ + prompt_encoder = torch.nn.DataParallel(sam_model.prompt_encoder, device_ids=[0,1,2,3], output_device=device) + prompt_encoder = prompt_encoder.to(f"cuda:{prompt_encoder.device_ids[0]}") + box_torch = box_torch.to(f"cuda:{prompt_encoder.device_ids[0]}") + sparse_embeddings, dense_embeddings = prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + We now finetune mask decoder + """ + sam_model = sam_model.to(f"cuda:{device}") + predicted_mask, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cuda:{device}"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 1, 256, 256) + + predicted_mask = predicted_mask.to(f"cuda:{device}") + true_mask = true_mask.to(f"cuda:{device}") + loss = criterion(predicted_mask, true_mask) + + """ + Upgrade model's params + """ + optimizer.zero_grad(set_to_none=True) + loss.backward() + + clip_value = 1 # Clip gradient + torch.nn.utils.clip_grad_norm_(sam_model.mask_decoder.parameters(), clip_value) + + optimizer.step() + epoch_loss += loss.item() + + """ + Validation step with IoU as the metric + """ + with torch.no_grad(): + valid_iou3d = eval_iou(sam_model, + valid_dataset, + device=device) + + epoch_loss /= ((step + 1) * len(train_loader)) + print(f'Loss: {epoch_loss}\n---') + + """ + Save best model + """ + if best_valid_iou3d < valid_iou3d: + best_valid_iou3d = valid_iou3d + torch.save(sam_model.state_dict(), join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + + print(f"Valid 3D IoU: {valid_iou3d*100}") + print('=======================================') + + print(f"Best valid 3D IoU: {best_valid_iou3d*100}") + +def eval_iou(sam_model, + loader, + device): + """ + We use IoU to evalute 3D samples. + + For 3D evaluation, we first concatenate 2D slices into 1 unified 3D volume and pass into model + However, due to limited computational resources, we could not perform 3D evaluation in GPU. + Hence, I set up to perform this function completely on CPU. + If you have enough resources, you could evaluate on multi-gpu the same as in training function. + """ + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + iou_score = 0 + num_volume = 0 + for _, batch in enumerate(tqdm(loader.get_3d_iter(), leave=False)): + """ + Load precomputed embeddings, mask labels and bounding boxes computed directly from ground truth masks + """ + image, true_mask, boxes = batch['image'], batch['mask'], batch['bboxes'] + image = image.to(f"cpu") + true_mask = true_mask.to(f"cpu", dtype=torch.float32) + + """ + Compute image embeddings + """ + sam_model = sam_model.to(f"cpu") + image = image.to(f"cpu") + image = sam_model.preprocess(image[:, :, :]) + image_embedding = sam_model.image_encoder(image) + + """ + Get bboxes + """ + box_np = boxes.numpy() + sam_trans = ResizeLongestSide(sam_model.image_encoder.img_size) + box = sam_trans.apply_boxes(box_np, (image_embedding.shape[0], image_embedding.shape[1])) + box_torch = torch.as_tensor(box, dtype=torch.float32, device=device) + if len(box_torch.shape) == 2: + box_torch = box_torch[:, None, :] # (B, 1, 4) + + """ + Prompt encoder component + """ + box_torch = box_torch.to(f"cpu") + sparse_embeddings, dense_embeddings = sam_model.prompt_encoder( + points=None, + boxes=box_torch, + masks=None, + ) + + """ + Mask decoder component + """ + sam_model = sam_model.to(f"cpu") + mask_segmentation, iou_predictions = sam_model.mask_decoder( + image_embeddings=image_embedding.to(f"cpu"), # (B, 256, 64, 64) + image_pe=sam_model.prompt_encoder.get_dense_pe(), # (1, 256, 64, 64) + sparse_prompt_embeddings=sparse_embeddings, # (B, 2, 256) + dense_prompt_embeddings=dense_embeddings, # (B, 256, 64, 64) + multimask_output=False, + ) # -> (B, 256, 256) + + """ + Transform prediction and evaluate + """ + true_mask = true_mask.to("cpu") + medsam_seg_prob = torch.sigmoid(mask_segmentation) + medsam_seg = (medsam_seg_prob > 0.5).to(dtype=torch.float32) + iou_score += multiclass_iou((true_mask>0).to(dtype=torch.float32), (medsam_seg>0).to(dtype=torch.float32)) + num_volume += 1 + return iou_score.cpu().numpy()/num_volume + +def medsam_3d(yml_args, cfg): + """ + Training warm up + """ + torch.multiprocessing.set_start_method('spawn') + + random.seed(cfg.base.random_seed) + np.random.seed(cfg.base.random_seed) + torch.manual_seed(cfg.base.random_seed) + torch.cuda.manual_seed(cfg.base.random_seed) + + torch.backends.cudnn.deterministic = True + + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + + """ + General configuration + """ + img_shape = (3, 1024) # hard settings image shape as 3 x 1024 x 1024 + model_save_path = join("./work_dir", 'SAM-ViT-B') + os.makedirs(model_save_path, exist_ok=True) + + print(f"Fine-tuned SAM (3D IoU) in {cfg.base.dataset_name} with {cfg.train.optimizer}, LR = {cfg.train.learning_rate}") + + """ + Load SAM with its original checkpoint + """ + sam_model = sam_model_registry["vit_b"](checkpoint=cfg.base.original_checkpoint) + + """ + Load precomputed embeddings + """ + train_loader, _, _, valid_dataset, test_dataset = sam_dataloader(cfg) + + """ + Optimizer & learning rate scheduler config + """ + if cfg.train.optimizer == 'sgd': + optimizer = torch.optim.SGD(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + momentum=0.9) + elif cfg.train.optimizer == 'adam': + optimizer = torch.optim.Adam(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0, + amsgrad=True) + elif cfg.train.optimizer == 'adamw': + optimizer = torch.optim.AdamW(sam_model.mask_decoder.parameters(), + lr=float(cfg.train.learning_rate), + weight_decay=0) + else: + raise NotImplementedError(f"Optimizer {cfg.train.optimizer} is not set up yet") + + """ + Loss function + In this work, we use a combination of Dice and Cross Entropy Loss to measure SAM's loss values. + """ + criterion = monai.losses.DiceCELoss(sigmoid=True, + squared_pred=True, + reduction='mean') + + """ + Train model + """ + if not yml_args.use_test_mode: + fit(cfg, + sam_model=sam_model, + train_loader=train_loader, + valid_dataset=valid_dataset, + optimizer=optimizer, + criterion=criterion, + model_save_path=model_save_path) + + """ + Test model + """ + with torch.no_grad(): + sam_model_test_iou = sam_model_registry["vit_b"](checkpoint=join(model_save_path, f'{cfg.base.best_valid_model_checkpoint}{cfg.base.random_seed}.pth')) + sam_model_test_iou.eval() + test_iou_score = eval_iou(sam_model_test_iou, + test_dataset, + device=cfg.base.gpu_id) + print(f"Test 3D IoU score after training with {cfg.train.optimizer}(lr = {cfg.train.learning_rate}): {test_iou_score *100}") \ No newline at end of file diff --git a/segmentation_3d/__init__.py b/segmentation_3d/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/segmentation_3d/train_R50_seg_adam_optimizer_3d.py b/segmentation_3d/train_R50_seg_adam_optimizer_3d.py new file mode 100644 index 0000000000000000000000000000000000000000..ad80156edff6003fd7e93a679024367e009f1bab --- /dev/null +++ b/segmentation_3d/train_R50_seg_adam_optimizer_3d.py @@ -0,0 +1,294 @@ +import argparse +import logging +import sys +from pathlib import Path +import os +import torch +import torch.nn as nn +import torch.nn.functional as F +import wandb +from torch import optim +from torch.utils.data import DataLoader, random_split +from tqdm import tqdm +from torch.optim.lr_scheduler import ExponentialLR + +from dataloader.dataset_ete import SegmentationDataset_train, SegmentationDataset +from utils.endtoend import dice_loss +from evaluate import evaluate, evaluate_3d_iou +#from models.segmentation import UNet +import segmentation_models_pytorch as smp +import numpy as np +num_classes = 8 +np.random.seed(42) + +def train_net(net, + cfg, + trial, + device, + epochs: int = 30, + train_batch_size: int = 128, + val_batch_size: int = 128, + learning_rate: float = 0.1, + val_percent: float = 0.1, + save_checkpoint: bool = True, + img_scale: float = 0.5, + amp: bool = True, + out_dir : str= './checkpoint/'): + # 1. Create dataset + train_dir_img = Path(cfg.dataloader.train_dir_img) + train_dir_mask = Path(cfg.dataloader.train_dir_mask) + val_dir_img = Path(cfg.dataloader.valid_dir_img) + val_dir_mask = Path(cfg.dataloader.valid_dir_mask) + test_dir_img = Path(cfg.dataloader.test_dir_img) + test_dir_mask = Path(cfg.dataloader.test_dir_mask) + non_label_text = cfg.dataloader.non_label + have_label_text = cfg.dataloader.have_label + + dir_checkpoint = Path(out_dir) + Path(dir_checkpoint).mkdir(parents=True, exist_ok=True) + + train_dataset = SegmentationDataset_train(nonlabel_path= non_label_text, havelabel_path= have_label_text, dataset = cfg.base.dataset_name, scale= img_scale) + + val_dataset = SegmentationDataset(cfg.base.dataset_name, val_dir_img, val_dir_mask, scale=img_scale) + + test_dataset = SegmentationDataset(cfg.base.dataset_name, test_dir_img, test_dir_mask, scale=img_scale) + + n_train = len(train_dataset) + n_val = len(val_dataset) + + + # 3. Create data loaders + loader_args = dict(num_workers=10, pin_memory=True) + train_loader = DataLoader(train_dataset, shuffle=True, batch_size=train_batch_size, **loader_args) + import time + + val_loader = DataLoader(val_dataset, shuffle=False, drop_last=True, batch_size=val_batch_size, **loader_args) + test_loader = DataLoader(test_dataset, shuffle=False, drop_last=True, **loader_args) + # (Initialize logging) + experiment = wandb.init(project='U-Net', resume='allow', anonymous='must') + experiment.config.update(dict(epochs=epochs, train_batch_size=train_batch_size, val_batch_size=val_batch_size, learning_rate=learning_rate, + val_percent=val_percent, save_checkpoint=save_checkpoint, img_scale=img_scale, + amp=amp)) + + logging.info(f'''Starting training: + Epochs: {epochs} + Train batch size: {train_batch_size} + Val batch size: {val_batch_size} + Learning rate: {learning_rate} + Training size: {n_train} + Validation size: {n_val} + Checkpoints: {save_checkpoint} + Device: {device.type} + Images scaling: {img_scale} + Mixed Precision: {amp} + ''') + + # 4. Set up the optimizer, the loss, the learning rate scheduler and the loss scaling for AMP + # optimizer = optim.RMSprop(net.parameters(), lr=learning_rate, weight_decay=1e-8, momentum=0.9) + # optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9, weight_decay=1e-8) + optimizer = optim.Adam(net.parameters(), lr=learning_rate, betas=(cfg.train.beta1, cfg.train.beta2), eps=1e-08, weight_decay=cfg.train.weight_decay) + if cfg.train.scheduler: + print("Use scheduler") + scheduler = ExponentialLR(optimizer, gamma=0.9) + print(learning_rate) + # optimizer= optim.Adam(net.parameters(), lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False) + + # scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'max', patience=2) # goal: maximize Dice score + grad_scaler = torch.cuda.amp.GradScaler(enabled=amp) + criterion = nn.CrossEntropyLoss() + global_step = 0 + best_value = 0 + # 5. Begin training + for epoch in range(epochs): + net.train() + epoch_loss = 0 + with tqdm(total=n_train, desc=f'Epoch {epoch + 1}/{epochs}', unit='img') as pbar: + for batch in train_loader: + images = batch['image'] + true_masks = batch['mask_ete'] + + images = images.to(device=device, dtype=torch.float32) + true_masks = true_masks.to(device=device, dtype=torch.long) + + with torch.cuda.amp.autocast(enabled=amp): + masks_pred = net(images) + loss = criterion(masks_pred, true_masks) \ + + dice_loss(F.softmax(masks_pred, dim=1).float(), + F.one_hot(true_masks, num_classes).permute(0, 3, 1, 2).float(), + multiclass=True) + + optimizer.zero_grad(set_to_none=True) + grad_scaler.scale(loss).backward() + clip_value = 1 + torch.nn.utils.clip_grad_norm_(net.parameters(), clip_value) + grad_scaler.step(optimizer) + grad_scaler.update() + + pbar.update(images.shape[0]) + global_step += 1 + epoch_loss += loss.item() + experiment.log({ + 'train loss': loss.item(), + 'step': global_step, + 'epoch': epoch + }) + pbar.set_postfix(**{'loss (batch)': loss.item()}) + + # Evaluation round + if global_step % (n_train // (1 * train_batch_size)) == 0: + histograms = {} + for tag, value in net.named_parameters(): + tag = tag.replace('/', '.') + histograms['Weights/' + tag] = wandb.Histogram(value.data.cpu()) + histograms['Gradients/' + tag] = wandb.Histogram(value.grad.data.cpu()) + + val_dice_score, val_iou_score = evaluate(net, val_loader, device, 2) + val_3d_iou_score = evaluate_3d_iou(net, val_dataset, device, 2) + + + + val_score = val_3d_iou_score + + # scheduler.step(val_dice_score) + if (val_score > best_value): + best_value = val_score + logging.info("New best 3d iou score: {} at epochs {}".format(best_value, epoch+1)) + torch.save(net.state_dict(), str(dir_checkpoint/'checkpoint_{}_{}_best_{}.pth'.format(cfg.base.dataset_name, cfg.base.original_checkpoint, str(trial)))) + + logging.info('Validation Dice score: {}, IoU score {}, IoU 3d score {}'.format(val_dice_score, val_iou_score, val_3d_iou_score)) + + + # update learning rate + if cfg.train.scheduler: + if (epoch + 1 <= 0): + scheduler.step() + + # Evaluation the last model + if epoch + 1 == epochs: + val_dice_score, val_iou_score = evaluate(net, val_loader, device, 2) + val_3d_iou_score = evaluate_3d_iou(net, val_dataset, device, 2) + logging.info('Validation Dice score: {}, IoU score {}, IoU 3d score {}'.format(val_dice_score, val_iou_score, val_3d_iou_score)) + + + if save_checkpoint: + torch.save(net.state_dict(), str(dir_checkpoint / 'checkpoint_epoch{}.pth'.format(epoch + 1))) + logging.info(f'Checkpoint {epoch + 1} saved!') + if epoch > 0 and epoch != (epochs % 2 - 1) : + os.remove( str(dir_checkpoint/'checkpoint_epoch{}.pth'.format(epoch))) + logging.info("Evalutating on test set") + logging.info("Loading best model on validation") + net.load_state_dict(torch.load(str(dir_checkpoint/'checkpoint_{}_{}_best_{}.pth'.format(cfg.base.dataset_name, cfg.base.original_checkpoint, str(trial))))) + test_dice, test_iou = evaluate(net, test_loader, device, 2) + test_3d_iou = evaluate_3d_iou(net, test_dataset, device, 2) + logging.info("Test dice score {}, IoU score {}, 3d IoU {}".format(test_dice, test_iou, test_3d_iou)) + + logging.info("Loading model at last epochs %d" %epochs) + net.load_state_dict(torch.load(str(dir_checkpoint/'checkpoint_epoch{}.pth'.format(epochs)))) + test_dice_last, test_iou_last = evaluate(net, test_loader, device, 2) + test_3d_iou_last = evaluate_3d_iou(net, test_dataset, device, 2) + logging.info("Test dice score {}, IoU score {}, 3d IoU {}".format(test_dice_last, test_iou_last, test_3d_iou_last)) + + return test_dice, test_iou, test_3d_iou, test_dice_last, test_iou_last, test_3d_iou_last + + +def eval(cfg, out_dir, net, device, img_scale, trial): + test_dir_img = Path(cfg.dataloader.test_dir_img) + test_dir_mask = Path(cfg.dataloader.test_dir_mask) + test_dataset = SegmentationDataset(name_dataset=cfg.base.dataset_name, images_dir = test_dir_img, masks_dir= test_dir_mask, scale = img_scale) + loader_args = dict(num_workers=10, pin_memory=True) + test_loader = DataLoader(test_dataset, shuffle=False, drop_last=True, **loader_args) + dir_checkpoint = Path(out_dir) + + print("Trial", trial+1) + logging.info("Evalutating on test set") + logging.info("Loading best model on validation") + net.load_state_dict(torch.load(str(dir_checkpoint/'checkpoint_{}_{}_best_{}.pth'.format(cfg.base.dataset_name, cfg.base.original_checkpoint, str(trial))))) + test_dice, test_iou = evaluate(net, test_loader, device, 2) + test_3d_iou = evaluate_3d_iou(net, test_dataset, device, 2) + logging.info("Test dice score {}, IoU score {}, 3d IoU {}".format(test_dice, test_iou, test_3d_iou)) + return test_dice, test_iou, test_3d_iou + +def train_3d_R50(yml_args, cfg): + logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + cuda_string = 'cuda:' + cfg.base.gpu_id + device = torch.device(cuda_string if torch.cuda.is_available() else 'cpu') + logging.info(f'Using device {device}') + + # Change here to adapt to your data + # n_channels=3 for RGB images + # n_classes is the number of probabilities you want to get per pixel + + try: + _2d_dices = [] + _2d_ious = [] + _3d_ious = [] + _2d_dices_last = [] + _2d_ious_last = [] + _3d_ious_last = [] + + if not yml_args.use_test_mode: + for trial in range(5): + print ("----"*3) + if cfg.base.original_checkpoint == "scratch": + net = smp.Unet(encoder_name="resnet50", encoder_weights=None, in_channels=1, classes=num_classes) + else: + print ("Using pre-trained models from", cfg.base.original_checkpoint) + net = smp.Unet(encoder_name="resnet50", encoder_weights=cfg.base.original_checkpoint ,in_channels=1, classes=num_classes) + + + net.to(device=device) + + print("Trial", trial + 1) + _2d_dice, _2d_iou, _3d_iou, _2d_dice_last, _2d_iou_last, _3d_iou_last = train_net(net=net, cfg=cfg, trial=trial, + epochs=cfg.train.num_epochs, + train_batch_size=cfg.train.train_batch_size, + val_batch_size=cfg.train.valid_batch_size, + learning_rate=cfg.train.learning_rate, + device=device, + img_scale=(cfg.base.image_shape, cfg.base.image_shape), + val_percent=10.0 / 100, + amp=False, + out_dir= cfg.base.best_valid_model_checkpoint) + _2d_dices.append(_2d_dice.item()) + _2d_ious.append(_2d_iou.item()) + _3d_ious.append(_3d_iou.item()) + _2d_dices_last.append(_2d_dice_last.item()) + _2d_ious_last.append(_2d_iou_last.item()) + _3d_ious_last.append(_3d_iou_last.item()) + + print ("Average performance on best valid set") + print("2d dice {}, mean {}, std {}".format(_2d_dices, np.mean(_2d_dices), np.std(_2d_dices))) + print("2d iou {}, mean {}, std {}".format(_2d_ious, np.mean(_2d_ious), np.std(_2d_ious))) + print("3d iou {}, mean {}, std {}".format(_3d_ious, np.mean(_3d_ious), np.std(_3d_ious))) + + print ("Average performance on the last epoch") + print("2d dice {}, mean {}, std {}".format(_2d_dices_last, np.mean(_2d_dices_last), np.std(_2d_dices_last))) + print("2d iou {}, mean {}, std {}".format(_2d_ious_last, np.mean(_2d_ious_last), np.std(_2d_ious_last))) + print("3d iou {}, mean {}, std {}".format(_3d_ious_last, np.mean(_3d_ious_last), np.std(_3d_ious_last))) + else: + for trial in range(5): + print ("----"*3) + if cfg.base.original_checkpoint == "scratch": + net = smp.Unet(encoder_name="resnet50", encoder_weights=None, in_channels=1, classes=num_classes) + else: + print ("Using pre-trained models from", cfg.base.original_checkpoint) + net = smp.Unet(encoder_name="resnet50", encoder_weights=cfg.base.original_checkpoint ,in_channels=1, + classes=num_classes) + + + net.to(device=device) + _2d_dice, _2d_iou, _3d_iou = eval(cfg = cfg, out_dir = cfg.base.best_valid_model_checkpoint, net = net, device = device, + img_scale = (cfg.base.image_shape, cfg.base.image_shape), trial=trial) + _2d_dices.append(_2d_dice.item()) + _2d_ious.append(_2d_iou.item()) + _3d_ious.append(_3d_iou.item()) + print ("Average performance on best valid set") + print("2d dice {}, mean {}, std {}".format(_2d_dices, np.mean(_2d_dices), np.std(_2d_dices))) + print("2d iou {}, mean {}, std {}".format(_2d_ious, np.mean(_2d_ious), np.std(_2d_ious))) + print("3d iou {}, mean {}, std {}".format(_3d_ious, np.mean(_3d_ious), np.std(_3d_ious))) + + + except KeyboardInterrupt: + torch.save(net.state_dict(), 'INTERRUPTED.pth') + logging.info('Saved interrupt') + sys.exit(0) diff --git a/segmentation_models_pytorch_example/__init__.py b/segmentation_models_pytorch_example/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6628d293843890d079d4e4be0128748f645b1103 --- /dev/null +++ b/segmentation_models_pytorch_example/__init__.py @@ -0,0 +1,46 @@ +from .unet import Unet +from .unetplusplus import UnetPlusPlus +from .manet import MAnet +from .linknet import Linknet +from .fpn import FPN +from .pspnet import PSPNet +from .deeplabv3 import DeepLabV3, DeepLabV3Plus +from .pan import PAN + +from . import encoders +from . import utils +from . import losses + +from .__version__ import __version__ + +from typing import Optional +import torch + + +def create_model( + arch: str, + encoder_name: str = "resnet34", + encoder_weights: Optional[str] = "imagenet", + in_channels: int = 3, + classes: int = 1, + **kwargs, +) -> torch.nn.Module: + """Models wrapper. Allows to create any model just with parametes + + """ + + archs = [Unet, UnetPlusPlus, MAnet, Linknet, FPN, PSPNet, DeepLabV3, DeepLabV3Plus, PAN] + archs_dict = {a.__name__.lower(): a for a in archs} + try: + model_class = archs_dict[arch.lower()] + except KeyError: + raise KeyError("Wrong architecture type `{}`. Available options are: {}".format( + arch, list(archs_dict.keys()), + )) + return model_class( + encoder_name=encoder_name, + encoder_weights=encoder_weights, + in_channels=in_channels, + classes=classes, + **kwargs, + ) \ No newline at end of file diff --git a/segmentation_models_pytorch_example/__version__.py b/segmentation_models_pytorch_example/__version__.py new file mode 100644 index 0000000000000000000000000000000000000000..dfd69f99cd946e5a172c49dc30fdb713fe987c22 --- /dev/null +++ b/segmentation_models_pytorch_example/__version__.py @@ -0,0 +1,3 @@ +VERSION = (0, 2, 0) + +__version__ = '.'.join(map(str, VERSION)) diff --git a/segmentation_models_pytorch_example/base/__init__.py b/segmentation_models_pytorch_example/base/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..16a04016b915954c3b0dfb58f4098436375607d3 --- /dev/null +++ b/segmentation_models_pytorch_example/base/__init__.py @@ -0,0 +1,11 @@ +from .model import SegmentationModel + +from .modules import ( + Conv2dReLU, + Attention, +) + +from .heads import ( + SegmentationHead, + ClassificationHead, +) \ No newline at end of file diff --git a/segmentation_models_pytorch_example/base/heads.py b/segmentation_models_pytorch_example/base/heads.py new file mode 100644 index 0000000000000000000000000000000000000000..85317cf15c67b0680698d5ea513f9706810584d9 --- /dev/null +++ b/segmentation_models_pytorch_example/base/heads.py @@ -0,0 +1,24 @@ +import torch.nn as nn +from .modules import Activation + + +class SegmentationHead(nn.Sequential): + + def __init__(self, in_channels, out_channels, kernel_size=3, activation=None, upsampling=1): + conv2d = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=kernel_size // 2) + upsampling = nn.UpsamplingBilinear2d(scale_factor=upsampling) if upsampling > 1 else nn.Identity() + activation = Activation(activation) + super().__init__(conv2d, upsampling, activation) + + +class ClassificationHead(nn.Sequential): + + def __init__(self, in_channels, classes, pooling="avg", dropout=0.2, activation=None): + if pooling not in ("max", "avg"): + raise ValueError("Pooling should be one of ('max', 'avg'), got {}.".format(pooling)) + pool = nn.AdaptiveAvgPool2d(1) if pooling == 'avg' else nn.AdaptiveMaxPool2d(1) + flatten = nn.Flatten() + dropout = nn.Dropout(p=dropout, inplace=True) if dropout else nn.Identity() + linear = nn.Linear(in_channels, classes, bias=True) + activation = Activation(activation) + super().__init__(pool, flatten, dropout, linear, activation) diff --git a/segmentation_models_pytorch_example/base/initialization.py b/segmentation_models_pytorch_example/base/initialization.py new file mode 100644 index 0000000000000000000000000000000000000000..9622130204a0172d43a5f32f4ade065e100f746e --- /dev/null +++ b/segmentation_models_pytorch_example/base/initialization.py @@ -0,0 +1,27 @@ +import torch.nn as nn + + +def initialize_decoder(module): + for m in module.modules(): + + if isinstance(m, nn.Conv2d): + nn.init.kaiming_uniform_(m.weight, mode="fan_in", nonlinearity="relu") + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + elif isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + +def initialize_head(module): + for m in module.modules(): + if isinstance(m, (nn.Linear, nn.Conv2d)): + nn.init.xavier_uniform_(m.weight) + if m.bias is not None: + nn.init.constant_(m.bias, 0) diff --git a/segmentation_models_pytorch_example/base/model.py b/segmentation_models_pytorch_example/base/model.py new file mode 100644 index 0000000000000000000000000000000000000000..e5ffad15a69e1dfcafc92f47c79bff28d6dfd474 --- /dev/null +++ b/segmentation_models_pytorch_example/base/model.py @@ -0,0 +1,42 @@ +import torch +from . import initialization as init + + +class SegmentationModel(torch.nn.Module): + + def initialize(self): + init.initialize_decoder(self.decoder) + init.initialize_head(self.segmentation_head) + if self.classification_head is not None: + init.initialize_head(self.classification_head) + + def forward(self, x): + """Sequentially pass `x` trough model`s encoder, decoder and heads""" + features = self.encoder(x) + decoder_output = self.decoder(*features) + + masks = self.segmentation_head(decoder_output) + + if self.classification_head is not None: + labels = self.classification_head(features[-1]) + return masks, labels + + return masks + + def predict(self, x): + """Inference method. Switch model to `eval` mode, call `.forward(x)` with `torch.no_grad()` + + Args: + x: 4D torch tensor with shape (batch_size, channels, height, width) + + Return: + prediction: 4D torch tensor with shape (batch_size, classes, height, width) + + """ + if self.training: + self.eval() + + with torch.no_grad(): + x = self.forward(x) + + return x diff --git a/segmentation_models_pytorch_example/base/modules.py b/segmentation_models_pytorch_example/base/modules.py new file mode 100644 index 0000000000000000000000000000000000000000..4074b059c99ac6019e0e1fb5ce6b50c44021a092 --- /dev/null +++ b/segmentation_models_pytorch_example/base/modules.py @@ -0,0 +1,120 @@ +import torch +import torch.nn as nn + +try: + from inplace_abn import InPlaceABN +except ImportError: + InPlaceABN = None + + +class Conv2dReLU(nn.Sequential): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + padding=0, + stride=1, + use_batchnorm=True, + ): + + if use_batchnorm == "inplace" and InPlaceABN is None: + raise RuntimeError( + "In order to use `use_batchnorm='inplace'` inplace_abn package must be installed. " + + "To install see: https://github.com/mapillary/inplace_abn" + ) + + conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=padding, + bias=not (use_batchnorm), + ) + relu = nn.ReLU(inplace=True) + + if use_batchnorm == "inplace": + bn = InPlaceABN(out_channels, activation="leaky_relu", activation_param=0.0) + relu = nn.Identity() + + elif use_batchnorm and use_batchnorm != "inplace": + bn = nn.BatchNorm2d(out_channels) + + else: + bn = nn.Identity() + + super(Conv2dReLU, self).__init__(conv, bn, relu) + + +class SCSEModule(nn.Module): + def __init__(self, in_channels, reduction=16): + super().__init__() + self.cSE = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(in_channels, in_channels // reduction, 1), + nn.ReLU(inplace=True), + nn.Conv2d(in_channels // reduction, in_channels, 1), + nn.Sigmoid(), + ) + self.sSE = nn.Sequential(nn.Conv2d(in_channels, 1, 1), nn.Sigmoid()) + + def forward(self, x): + return x * self.cSE(x) + x * self.sSE(x) + + +class ArgMax(nn.Module): + + def __init__(self, dim=None): + super().__init__() + self.dim = dim + + def forward(self, x): + return torch.argmax(x, dim=self.dim) + + +class Activation(nn.Module): + + def __init__(self, name, **params): + + super().__init__() + + if name is None or name == 'identity': + self.activation = nn.Identity(**params) + elif name == 'sigmoid': + self.activation = nn.Sigmoid() + elif name == 'softmax2d': + self.activation = nn.Softmax(dim=1, **params) + elif name == 'softmax': + self.activation = nn.Softmax(**params) + elif name == 'logsoftmax': + self.activation = nn.LogSoftmax(**params) + elif name == 'tanh': + self.activation = nn.Tanh() + elif name == 'argmax': + self.activation = ArgMax(**params) + elif name == 'argmax2d': + self.activation = ArgMax(dim=1, **params) + elif callable(name): + self.activation = name(**params) + else: + raise ValueError('Activation should be callable/sigmoid/softmax/logsoftmax/tanh/None; got {}'.format(name)) + + def forward(self, x): + return self.activation(x) + + +class Attention(nn.Module): + + def __init__(self, name, **params): + super().__init__() + + if name is None: + self.attention = nn.Identity(**params) + elif name == 'scse': + self.attention = SCSEModule(**params) + else: + raise ValueError("Attention {} is not implemented".format(name)) + + def forward(self, x): + return self.attention(x) diff --git a/segmentation_models_pytorch_example/deeplabv3/__init__.py b/segmentation_models_pytorch_example/deeplabv3/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9acd50af70bad232b3459f1c2705fd7c041285d6 --- /dev/null +++ b/segmentation_models_pytorch_example/deeplabv3/__init__.py @@ -0,0 +1 @@ +from .model import DeepLabV3, DeepLabV3Plus \ No newline at end of file diff --git a/segmentation_models_pytorch_example/deeplabv3/decoder.py b/segmentation_models_pytorch_example/deeplabv3/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..c19a5eaa8f0c9faa912bb34104f8b876dfa6aede --- /dev/null +++ b/segmentation_models_pytorch_example/deeplabv3/decoder.py @@ -0,0 +1,220 @@ +""" +BSD 3-Clause License + +Copyright (c) Soumith Chintala 2016, +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" + +import torch +from torch import nn +from torch.nn import functional as F + +__all__ = ["DeepLabV3Decoder"] + + +class DeepLabV3Decoder(nn.Sequential): + def __init__(self, in_channels, out_channels=256, atrous_rates=(12, 24, 36)): + super().__init__( + ASPP(in_channels, out_channels, atrous_rates), + nn.Conv2d(out_channels, out_channels, 3, padding=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + ) + self.out_channels = out_channels + + def forward(self, *features): + return super().forward(features[-1]) + + +class DeepLabV3PlusDecoder(nn.Module): + def __init__( + self, + encoder_channels, + out_channels=256, + atrous_rates=(12, 24, 36), + output_stride=16, + ): + super().__init__() + if output_stride not in {8, 16}: + raise ValueError("Output stride should be 8 or 16, got {}.".format(output_stride)) + + self.out_channels = out_channels + self.output_stride = output_stride + + self.aspp = nn.Sequential( + ASPP(encoder_channels[-1], out_channels, atrous_rates, separable=True), + SeparableConv2d(out_channels, out_channels, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + ) + + scale_factor = 2 if output_stride == 8 else 4 + self.up = nn.UpsamplingBilinear2d(scale_factor=scale_factor) + + highres_in_channels = encoder_channels[-4] + highres_out_channels = 48 # proposed by authors of paper + self.block1 = nn.Sequential( + nn.Conv2d(highres_in_channels, highres_out_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(highres_out_channels), + nn.ReLU(), + ) + self.block2 = nn.Sequential( + SeparableConv2d( + highres_out_channels + out_channels, + out_channels, + kernel_size=3, + padding=1, + bias=False, + ), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + ) + + def forward(self, *features): + aspp_features = self.aspp(features[-1]) + aspp_features = self.up(aspp_features) + high_res_features = self.block1(features[-4]) + concat_features = torch.cat([aspp_features, high_res_features], dim=1) + fused_features = self.block2(concat_features) + return fused_features + + +class ASPPConv(nn.Sequential): + def __init__(self, in_channels, out_channels, dilation): + super().__init__( + nn.Conv2d( + in_channels, + out_channels, + kernel_size=3, + padding=dilation, + dilation=dilation, + bias=False, + ), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + ) + + +class ASPPSeparableConv(nn.Sequential): + def __init__(self, in_channels, out_channels, dilation): + super().__init__( + SeparableConv2d( + in_channels, + out_channels, + kernel_size=3, + padding=dilation, + dilation=dilation, + bias=False, + ), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + ) + + +class ASPPPooling(nn.Sequential): + def __init__(self, in_channels, out_channels): + super().__init__( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + ) + + def forward(self, x): + size = x.shape[-2:] + for mod in self: + x = mod(x) + return F.interpolate(x, size=size, mode='bilinear', align_corners=False) + + +class ASPP(nn.Module): + def __init__(self, in_channels, out_channels, atrous_rates, separable=False): + super(ASPP, self).__init__() + modules = [] + modules.append( + nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + ) + ) + + rate1, rate2, rate3 = tuple(atrous_rates) + ASPPConvModule = ASPPConv if not separable else ASPPSeparableConv + + modules.append(ASPPConvModule(in_channels, out_channels, rate1)) + modules.append(ASPPConvModule(in_channels, out_channels, rate2)) + modules.append(ASPPConvModule(in_channels, out_channels, rate3)) + modules.append(ASPPPooling(in_channels, out_channels)) + + self.convs = nn.ModuleList(modules) + + self.project = nn.Sequential( + nn.Conv2d(5 * out_channels, out_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(), + nn.Dropout(0.5), + ) + + def forward(self, x): + res = [] + for conv in self.convs: + res.append(conv(x)) + res = torch.cat(res, dim=1) + return self.project(res) + + +class SeparableConv2d(nn.Sequential): + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + bias=True, + ): + dephtwise_conv = nn.Conv2d( + in_channels, + in_channels, + kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=in_channels, + bias=False, + ) + pointwise_conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + bias=bias, + ) + super().__init__(dephtwise_conv, pointwise_conv) diff --git a/segmentation_models_pytorch_example/deeplabv3/model.py b/segmentation_models_pytorch_example/deeplabv3/model.py new file mode 100644 index 0000000000000000000000000000000000000000..0caf7325b93bcbd1e7dec479eaf7712c525a3978 --- /dev/null +++ b/segmentation_models_pytorch_example/deeplabv3/model.py @@ -0,0 +1,169 @@ +import torch.nn as nn + +from typing import Optional +from .decoder import DeepLabV3Decoder, DeepLabV3PlusDecoder +from ..base import SegmentationModel, SegmentationHead, ClassificationHead +from ..encoders import get_encoder + + +class DeepLabV3(SegmentationModel): + """DeepLabV3_ implementation from "Rethinking Atrous Convolution for Semantic Image Segmentation" + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + decoder_channels: A number of convolution filters in ASPP module. Default is 256 + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + upsampling: Final upsampling factor. Default is 8 to preserve input-output spatial shape identity + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + Returns: + ``torch.nn.Module``: **DeepLabV3** + + .. _DeeplabV3: + https://arxiv.org/abs/1706.05587 + + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_depth: int = 5, + encoder_weights: Optional[str] = "imagenet", + decoder_channels: int = 256, + in_channels: int = 3, + classes: int = 1, + activation: Optional[str] = None, + upsampling: int = 8, + aux_params: Optional[dict] = None, + ): + super().__init__() + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + output_stride=8, + ) + + self.decoder = DeepLabV3Decoder( + in_channels=self.encoder.out_channels[-1], + out_channels=decoder_channels, + ) + + self.segmentation_head = SegmentationHead( + in_channels=self.decoder.out_channels, + out_channels=classes, + activation=activation, + kernel_size=1, + upsampling=upsampling, + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + +class DeepLabV3Plus(SegmentationModel): + """DeepLabV3+ implementation from "Encoder-Decoder with Atrous Separable + Convolution for Semantic Image Segmentation" + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + encoder_output_stride: Downsampling factor for last encoder features (see original paper for explanation) + decoder_atrous_rates: Dilation rates for ASPP module (should be a tuple of 3 integer values) + decoder_channels: A number of convolution filters in ASPP module. Default is 256 + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + upsampling: Final upsampling factor. Default is 4 to preserve input-output spatial shape identity + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + Returns: + ``torch.nn.Module``: **DeepLabV3Plus** + + Reference: + https://arxiv.org/abs/1802.02611v3 + + """ + def __init__( + self, + encoder_name: str = "resnet34", + encoder_depth: int = 5, + encoder_weights: Optional[str] = "imagenet", + encoder_output_stride: int = 16, + decoder_channels: int = 256, + decoder_atrous_rates: tuple = (12, 24, 36), + in_channels: int = 3, + classes: int = 1, + activation: Optional[str] = None, + upsampling: int = 4, + aux_params: Optional[dict] = None, + ): + super().__init__() + + if encoder_output_stride not in [8, 16]: + raise ValueError( + "Encoder output stride should be 8 or 16, got {}".format(encoder_output_stride) + ) + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + output_stride=encoder_output_stride, + ) + + self.decoder = DeepLabV3PlusDecoder( + encoder_channels=self.encoder.out_channels, + out_channels=decoder_channels, + atrous_rates=decoder_atrous_rates, + output_stride=encoder_output_stride, + ) + + self.segmentation_head = SegmentationHead( + in_channels=self.decoder.out_channels, + out_channels=classes, + activation=activation, + kernel_size=1, + upsampling=upsampling, + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None diff --git a/segmentation_models_pytorch_example/encoders/__init__.py b/segmentation_models_pytorch_example/encoders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7cefbd571fc99b2b853dcb3d2b0244a419827f1b --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/__init__.py @@ -0,0 +1,132 @@ +import timm +import functools +import torch.utils.model_zoo as model_zoo + +from .resnet import resnet_encoders +from .dpn import dpn_encoders +from .vgg import vgg_encoders +from .senet import senet_encoders +from .densenet import densenet_encoders +from .inceptionresnetv2 import inceptionresnetv2_encoders +from .inceptionv4 import inceptionv4_encoders +from .efficientnet import efficient_net_encoders +from .mobilenet import mobilenet_encoders +from .xception import xception_encoders +from .timm_efficientnet import timm_efficientnet_encoders +from .timm_resnest import timm_resnest_encoders +from .timm_res2net import timm_res2net_encoders +from .timm_regnet import timm_regnet_encoders +from .timm_sknet import timm_sknet_encoders +from .timm_mobilenetv3 import timm_mobilenetv3_encoders +from .timm_gernet import timm_gernet_encoders +from .mix_transformer import mix_transformer_encoders +from .mobileone import mobileone_encoders + +from .timm_universal import TimmUniversalEncoder + +from ._preprocessing import preprocess_input + +encoders = {} +encoders.update(resnet_encoders) +encoders.update(dpn_encoders) +encoders.update(vgg_encoders) +encoders.update(senet_encoders) +encoders.update(densenet_encoders) +encoders.update(inceptionresnetv2_encoders) +encoders.update(inceptionv4_encoders) +encoders.update(efficient_net_encoders) +encoders.update(mobilenet_encoders) +encoders.update(xception_encoders) +encoders.update(timm_efficientnet_encoders) +encoders.update(timm_resnest_encoders) +encoders.update(timm_res2net_encoders) +encoders.update(timm_regnet_encoders) +encoders.update(timm_sknet_encoders) +encoders.update(timm_mobilenetv3_encoders) +encoders.update(timm_gernet_encoders) +encoders.update(mix_transformer_encoders) +encoders.update(mobileone_encoders) + + +def get_encoder(name, in_channels=3, depth=5, weights=None, output_stride=32, **kwargs): + + if name.startswith("tu-"): + name = name[3:] + encoder = TimmUniversalEncoder( + name=name, + in_channels=in_channels, + depth=depth, + output_stride=output_stride, + pretrained=weights is not None, + **kwargs, + ) + return encoder + + try: + Encoder = encoders[name]["encoder"] + except KeyError: + raise KeyError("Wrong encoder name `{}`, supported encoders: {}".format(name, list(encoders.keys()))) + + params = encoders[name]["params"] + params.update(depth=depth) + encoder = Encoder(**params) + + if weights is not None: + try: + settings = encoders[name]["pretrained_settings"][weights] + except KeyError: + raise KeyError( + "Wrong pretrained weights `{}` for encoder `{}`. Available options are: {}".format( + weights, + name, + list(encoders[name]["pretrained_settings"].keys()), + ) + ) +# encoder.load_state_dict(model_zoo.load_url(settings["url"])) + try: + if 'lvmmed' in settings["url"]: + print(settings['url']) + path = settings['url'] + import torch + weights = torch.load(path, map_location = 'cpu') + except KeyError: + raise KeyError( + "Pretrained weights not exist") + encoder.load_state_dict(weights) + + encoder.set_in_channels(in_channels, pretrained=weights is not None) + if output_stride != 32: + encoder.make_dilated(output_stride) + + return encoder + + +def get_encoder_names(): + return list(encoders.keys()) + + +def get_preprocessing_params(encoder_name, pretrained="imagenet"): + + if encoder_name.startswith("tu-"): + encoder_name = encoder_name[3:] + if not timm.models.is_model_pretrained(encoder_name): + raise ValueError(f"{encoder_name} does not have pretrained weights and preprocessing parameters") + settings = timm.models.get_pretrained_cfg(encoder_name).__dict__ + else: + all_settings = encoders[encoder_name]["pretrained_settings"] + if pretrained not in all_settings.keys(): + raise ValueError("Available pretrained options {}".format(all_settings.keys())) + settings = all_settings[pretrained] + + formatted_settings = {} + formatted_settings["input_space"] = settings.get("input_space", "RGB") + formatted_settings["input_range"] = list(settings.get("input_range", [0, 1])) + formatted_settings["mean"] = list(settings["mean"]) + formatted_settings["std"] = list(settings["std"]) + + return formatted_settings + + +def get_preprocessing_fn(encoder_name, pretrained="imagenet"): + params = get_preprocessing_params(encoder_name, pretrained=pretrained) + return functools.partial(preprocess_input, **params) diff --git a/segmentation_models_pytorch_example/encoders/_base.py b/segmentation_models_pytorch_example/encoders/_base.py new file mode 100644 index 0000000000000000000000000000000000000000..f4bca8b652e53b384e82a430ddea4494c0c3e848 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/_base.py @@ -0,0 +1,53 @@ +import torch +import torch.nn as nn +from typing import List +from collections import OrderedDict + +from . import _utils as utils + + +class EncoderMixin: + """Add encoder functionality such as: + - output channels specification of feature tensors (produced by encoder) + - patching first convolution for arbitrary input channels + """ + + @property + def out_channels(self): + """Return channels dimensions for each tensor of forward output of encoder""" + return self._out_channels[: self._depth + 1] + + def set_in_channels(self, in_channels, pretrained=True): + """Change first convolution channels""" + if in_channels == 3: + return + + self._in_channels = in_channels + if self._out_channels[0] == 3: + self._out_channels = tuple([in_channels] + list(self._out_channels)[1:]) + + utils.patch_first_conv(model=self, new_in_channels=in_channels, pretrained=pretrained) + + def get_stages(self): + """Method should be overridden in encoder""" + raise NotImplementedError + + def make_dilated(self, output_stride): + + if output_stride == 16: + stage_list=[5,] + dilation_list=[2,] + + elif output_stride == 8: + stage_list=[4, 5] + dilation_list=[2, 4] + + else: + raise ValueError("Output stride should be 16 or 8, got {}.".format(output_stride)) + + stages = self.get_stages() + for stage_indx, dilation_rate in zip(stage_list, dilation_list): + utils.replace_strides_with_dilation( + module=stages[stage_indx], + dilation_rate=dilation_rate, + ) diff --git a/segmentation_models_pytorch_example/encoders/_preprocessing.py b/segmentation_models_pytorch_example/encoders/_preprocessing.py new file mode 100644 index 0000000000000000000000000000000000000000..ec19d542f1fd8033525ef056adf252041db26e15 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/_preprocessing.py @@ -0,0 +1,23 @@ +import numpy as np + + +def preprocess_input( + x, mean=None, std=None, input_space="RGB", input_range=None, **kwargs +): + + if input_space == "BGR": + x = x[..., ::-1].copy() + + if input_range is not None: + if x.max() > 1 and input_range[1] == 1: + x = x / 255.0 + + if mean is not None: + mean = np.array(mean) + x = x - mean + + if std is not None: + std = np.array(std) + x = x / std + + return x diff --git a/segmentation_models_pytorch_example/encoders/_utils.py b/segmentation_models_pytorch_example/encoders/_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..859151c41d9de50ba90a3d0d3408b97803fae2fd --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/_utils.py @@ -0,0 +1,59 @@ +import torch +import torch.nn as nn + + +def patch_first_conv(model, new_in_channels, default_in_channels=3, pretrained=True): + """Change first convolution layer input channels. + In case: + in_channels == 1 or in_channels == 2 -> reuse original weights + in_channels > 3 -> make random kaiming normal initialization + """ + + # get first conv + for module in model.modules(): + if isinstance(module, nn.Conv2d) and module.in_channels == default_in_channels: + break + + weight = module.weight.detach() + module.in_channels = new_in_channels + + if not pretrained: + module.weight = nn.parameter.Parameter( + torch.Tensor( + module.out_channels, + new_in_channels // module.groups, + *module.kernel_size + ) + ) + module.reset_parameters() + + elif new_in_channels == 1: + new_weight = weight.sum(1, keepdim=True) + module.weight = nn.parameter.Parameter(new_weight) + + else: + new_weight = torch.Tensor( + module.out_channels, + new_in_channels // module.groups, + *module.kernel_size + ) + + for i in range(new_in_channels): + new_weight[:, i] = weight[:, i % default_in_channels] + + new_weight = new_weight * (default_in_channels / new_in_channels) + module.weight = nn.parameter.Parameter(new_weight) + + +def replace_strides_with_dilation(module, dilation_rate): + """Patch Conv2d modules replacing strides with dilation""" + for mod in module.modules(): + if isinstance(mod, nn.Conv2d): + mod.stride = (1, 1) + mod.dilation = (dilation_rate, dilation_rate) + kh, kw = mod.kernel_size + mod.padding = ((kh // 2) * dilation_rate, (kh // 2) * dilation_rate) + + # Kostyl for EfficientNet + if hasattr(mod, "static_padding"): + mod.static_padding = nn.Identity() diff --git a/segmentation_models_pytorch_example/encoders/densenet.py b/segmentation_models_pytorch_example/encoders/densenet.py new file mode 100644 index 0000000000000000000000000000000000000000..0247c8af145663c71bab62a30fb45da41a526b84 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/densenet.py @@ -0,0 +1,146 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" + +import re +import torch.nn as nn + +from pretrainedmodels.models.torchvision_models import pretrained_settings +from torchvision.models.densenet import DenseNet + +from ._base import EncoderMixin + + +class TransitionWithSkip(nn.Module): + + def __init__(self, module): + super().__init__() + self.module = module + + def forward(self, x): + for module in self.module: + x = module(x) + if isinstance(module, nn.ReLU): + skip = x + return x, skip + + +class DenseNetEncoder(DenseNet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + del self.classifier + + def make_dilated(self, stage_list, dilation_list): + raise ValueError("DenseNet encoders do not support dilated mode " + "due to pooling operation for downsampling!") + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.features.conv0, self.features.norm0, self.features.relu0), + nn.Sequential(self.features.pool0, self.features.denseblock1, + TransitionWithSkip(self.features.transition1)), + nn.Sequential(self.features.denseblock2, TransitionWithSkip(self.features.transition2)), + nn.Sequential(self.features.denseblock3, TransitionWithSkip(self.features.transition3)), + nn.Sequential(self.features.denseblock4, self.features.norm5) + ] + + def forward(self, x): + + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + if isinstance(x, (list, tuple)): + x, skip = x + features.append(skip) + else: + features.append(x) + + return features + + def load_state_dict(self, state_dict): + pattern = re.compile( + r"^(.*denselayer\d+\.(?:norm|relu|conv))\.((?:[12])\.(?:weight|bias|running_mean|running_var))$" + ) + for key in list(state_dict.keys()): + res = pattern.match(key) + if res: + new_key = res.group(1) + res.group(2) + state_dict[new_key] = state_dict[key] + del state_dict[key] + + # remove linear + state_dict.pop("classifier.bias", None) + state_dict.pop("classifier.weight", None) + + super().load_state_dict(state_dict) + + +densenet_encoders = { + "densenet121": { + "encoder": DenseNetEncoder, + "pretrained_settings": pretrained_settings["densenet121"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 1024), + "num_init_features": 64, + "growth_rate": 32, + "block_config": (6, 12, 24, 16), + }, + }, + "densenet169": { + "encoder": DenseNetEncoder, + "pretrained_settings": pretrained_settings["densenet169"], + "params": { + "out_channels": (3, 64, 256, 512, 1280, 1664), + "num_init_features": 64, + "growth_rate": 32, + "block_config": (6, 12, 32, 32), + }, + }, + "densenet201": { + "encoder": DenseNetEncoder, + "pretrained_settings": pretrained_settings["densenet201"], + "params": { + "out_channels": (3, 64, 256, 512, 1792, 1920), + "num_init_features": 64, + "growth_rate": 32, + "block_config": (6, 12, 48, 32), + }, + }, + "densenet161": { + "encoder": DenseNetEncoder, + "pretrained_settings": pretrained_settings["densenet161"], + "params": { + "out_channels": (3, 96, 384, 768, 2112, 2208), + "num_init_features": 96, + "growth_rate": 48, + "block_config": (6, 12, 36, 24), + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/dpn.py b/segmentation_models_pytorch_example/encoders/dpn.py new file mode 100644 index 0000000000000000000000000000000000000000..7f1bd7dac6f640df26ba371ceb02f6ebd326a0f2 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/dpn.py @@ -0,0 +1,170 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from pretrainedmodels.models.dpn import DPN +from pretrainedmodels.models.dpn import pretrained_settings + +from ._base import EncoderMixin + + +class DPNEncoder(DPN, EncoderMixin): + def __init__(self, stage_idxs, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._stage_idxs = stage_idxs + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + + del self.last_linear + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.features[0].conv, self.features[0].bn, self.features[0].act), + nn.Sequential(self.features[0].pool, self.features[1 : self._stage_idxs[0]]), + self.features[self._stage_idxs[0] : self._stage_idxs[1]], + self.features[self._stage_idxs[1] : self._stage_idxs[2]], + self.features[self._stage_idxs[2] : self._stage_idxs[3]], + ] + + def forward(self, x): + + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + if isinstance(x, (list, tuple)): + features.append(F.relu(torch.cat(x, dim=1), inplace=True)) + else: + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("last_linear.bias", None) + state_dict.pop("last_linear.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +dpn_encoders = { + "dpn68": { + "encoder": DPNEncoder, + "pretrained_settings": pretrained_settings["dpn68"], + "params": { + "stage_idxs": (4, 8, 20, 24), + "out_channels": (3, 10, 144, 320, 704, 832), + "groups": 32, + "inc_sec": (16, 32, 32, 64), + "k_r": 128, + "k_sec": (3, 4, 12, 3), + "num_classes": 1000, + "num_init_features": 10, + "small": True, + "test_time_pool": True, + }, + }, + "dpn68b": { + "encoder": DPNEncoder, + "pretrained_settings": pretrained_settings["dpn68b"], + "params": { + "stage_idxs": (4, 8, 20, 24), + "out_channels": (3, 10, 144, 320, 704, 832), + "b": True, + "groups": 32, + "inc_sec": (16, 32, 32, 64), + "k_r": 128, + "k_sec": (3, 4, 12, 3), + "num_classes": 1000, + "num_init_features": 10, + "small": True, + "test_time_pool": True, + }, + }, + "dpn92": { + "encoder": DPNEncoder, + "pretrained_settings": pretrained_settings["dpn92"], + "params": { + "stage_idxs": (4, 8, 28, 32), + "out_channels": (3, 64, 336, 704, 1552, 2688), + "groups": 32, + "inc_sec": (16, 32, 24, 128), + "k_r": 96, + "k_sec": (3, 4, 20, 3), + "num_classes": 1000, + "num_init_features": 64, + "test_time_pool": True, + }, + }, + "dpn98": { + "encoder": DPNEncoder, + "pretrained_settings": pretrained_settings["dpn98"], + "params": { + "stage_idxs": (4, 10, 30, 34), + "out_channels": (3, 96, 336, 768, 1728, 2688), + "groups": 40, + "inc_sec": (16, 32, 32, 128), + "k_r": 160, + "k_sec": (3, 6, 20, 3), + "num_classes": 1000, + "num_init_features": 96, + "test_time_pool": True, + }, + }, + "dpn107": { + "encoder": DPNEncoder, + "pretrained_settings": pretrained_settings["dpn107"], + "params": { + "stage_idxs": (5, 13, 33, 37), + "out_channels": (3, 128, 376, 1152, 2432, 2688), + "groups": 50, + "inc_sec": (20, 64, 64, 128), + "k_r": 200, + "k_sec": (4, 8, 20, 3), + "num_classes": 1000, + "num_init_features": 128, + "test_time_pool": True, + }, + }, + "dpn131": { + "encoder": DPNEncoder, + "pretrained_settings": pretrained_settings["dpn131"], + "params": { + "stage_idxs": (5, 13, 41, 45), + "out_channels": (3, 128, 352, 832, 1984, 2688), + "groups": 40, + "inc_sec": (16, 32, 32, 128), + "k_r": 160, + "k_sec": (4, 8, 28, 3), + "num_classes": 1000, + "num_init_features": 128, + "test_time_pool": True, + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/efficientnet.py b/segmentation_models_pytorch_example/encoders/efficientnet.py new file mode 100644 index 0000000000000000000000000000000000000000..1ea762fc93c548bc7f92619ba95693ecfbd9ccaf --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/efficientnet.py @@ -0,0 +1,179 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" +import torch.nn as nn +from efficientnet_pytorch import EfficientNet +from efficientnet_pytorch.utils import url_map, url_map_advprop, get_model_params + +from ._base import EncoderMixin + + +class EfficientNetEncoder(EfficientNet, EncoderMixin): + def __init__(self, stage_idxs, out_channels, model_name, depth=5): + + blocks_args, global_params = get_model_params(model_name, override_params=None) + super().__init__(blocks_args, global_params) + + self._stage_idxs = stage_idxs + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + + del self._fc + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self._conv_stem, self._bn0, self._swish), + self._blocks[:self._stage_idxs[0]], + self._blocks[self._stage_idxs[0]:self._stage_idxs[1]], + self._blocks[self._stage_idxs[1]:self._stage_idxs[2]], + self._blocks[self._stage_idxs[2]:], + ] + + def forward(self, x): + stages = self.get_stages() + + block_number = 0. + drop_connect_rate = self._global_params.drop_connect_rate + + features = [] + for i in range(self._depth + 1): + + # Identity and Sequential stages + if i < 2: + x = stages[i](x) + + # Block stages need drop_connect rate + else: + for module in stages[i]: + drop_connect = drop_connect_rate * block_number / len(self._blocks) + block_number += 1. + x = module(x, drop_connect) + + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("_fc.bias", None) + state_dict.pop("_fc.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +def _get_pretrained_settings(encoder): + pretrained_settings = { + "imagenet": { + "mean": [0.485, 0.456, 0.406], + "std": [0.229, 0.224, 0.225], + "url": url_map[encoder], + "input_space": "RGB", + "input_range": [0, 1], + }, + "advprop": { + "mean": [0.5, 0.5, 0.5], + "std": [0.5, 0.5, 0.5], + "url": url_map_advprop[encoder], + "input_space": "RGB", + "input_range": [0, 1], + } + } + return pretrained_settings + + +efficient_net_encoders = { + "efficientnet-b0": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b0"), + # "pretrained_settings": '/home/caduser/KOTORI/WEIGHTS/Supervised/efficientnet-b0-355c32eb.pth', + "params": { + "out_channels": (3, 32, 24, 40, 112, 320), + "stage_idxs": (3, 5, 9, 16), + "model_name": "efficientnet-b0", + }, + }, + "efficientnet-b1": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b1"), + "params": { + "out_channels": (3, 32, 24, 40, 112, 320), + "stage_idxs": (5, 8, 16, 23), + "model_name": "efficientnet-b1", + }, + }, + "efficientnet-b2": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b2"), + "params": { + "out_channels": (3, 32, 24, 48, 120, 352), + "stage_idxs": (5, 8, 16, 23), + "model_name": "efficientnet-b2", + }, + }, + "efficientnet-b3": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b3"), + "params": { + "out_channels": (3, 40, 32, 48, 136, 384), + "stage_idxs": (5, 8, 18, 26), + "model_name": "efficientnet-b3", + }, + }, + "efficientnet-b4": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b4"), + "params": { + "out_channels": (3, 48, 32, 56, 160, 448), + "stage_idxs": (6, 10, 22, 32), + "model_name": "efficientnet-b4", + }, + }, + "efficientnet-b5": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b5"), + "params": { + "out_channels": (3, 48, 40, 64, 176, 512), + "stage_idxs": (8, 13, 27, 39), + "model_name": "efficientnet-b5", + }, + }, + "efficientnet-b6": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b6"), + "params": { + "out_channels": (3, 56, 40, 72, 200, 576), + "stage_idxs": (9, 15, 31, 45), + "model_name": "efficientnet-b6", + }, + }, + "efficientnet-b7": { + "encoder": EfficientNetEncoder, + "pretrained_settings": _get_pretrained_settings("efficientnet-b7"), + "params": { + "out_channels": (3, 64, 48, 80, 224, 640), + "stage_idxs": (11, 18, 38, 55), + "model_name": "efficientnet-b7", + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/inceptionresnetv2.py b/segmentation_models_pytorch_example/encoders/inceptionresnetv2.py new file mode 100644 index 0000000000000000000000000000000000000000..8488ac8573752923e41dfaa440b435844d68c478 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/inceptionresnetv2.py @@ -0,0 +1,90 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" + +import torch.nn as nn +from pretrainedmodels.models.inceptionresnetv2 import InceptionResNetV2 +from pretrainedmodels.models.inceptionresnetv2 import pretrained_settings + +from ._base import EncoderMixin + + +class InceptionResNetV2Encoder(InceptionResNetV2, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + + # correct paddings + for m in self.modules(): + if isinstance(m, nn.Conv2d): + if m.kernel_size == (3, 3): + m.padding = (1, 1) + if isinstance(m, nn.MaxPool2d): + m.padding = (1, 1) + + # remove linear layers + del self.avgpool_1a + del self.last_linear + + def make_dilated(self, stage_list, dilation_list): + raise ValueError("InceptionResnetV2 encoder does not support dilated mode " + "due to pooling operation for downsampling!") + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.conv2d_1a, self.conv2d_2a, self.conv2d_2b), + nn.Sequential(self.maxpool_3a, self.conv2d_3b, self.conv2d_4a), + nn.Sequential(self.maxpool_5a, self.mixed_5b, self.repeat), + nn.Sequential(self.mixed_6a, self.repeat_1), + nn.Sequential(self.mixed_7a, self.repeat_2, self.block8, self.conv2d_7b), + ] + + def forward(self, x): + + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("last_linear.bias", None) + state_dict.pop("last_linear.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +inceptionresnetv2_encoders = { + "inceptionresnetv2": { + "encoder": InceptionResNetV2Encoder, + "pretrained_settings": pretrained_settings["inceptionresnetv2"], + "params": {"out_channels": (3, 64, 192, 320, 1088, 1536), "num_classes": 1000}, + } +} diff --git a/segmentation_models_pytorch_example/encoders/inceptionv4.py b/segmentation_models_pytorch_example/encoders/inceptionv4.py new file mode 100644 index 0000000000000000000000000000000000000000..bd180642be1f86a3665c829b0605163528a1b53b --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/inceptionv4.py @@ -0,0 +1,93 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" + +import torch.nn as nn +from pretrainedmodels.models.inceptionv4 import InceptionV4, BasicConv2d +from pretrainedmodels.models.inceptionv4 import pretrained_settings + +from ._base import EncoderMixin + + +class InceptionV4Encoder(InceptionV4, EncoderMixin): + def __init__(self, stage_idxs, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._stage_idxs = stage_idxs + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + + # correct paddings + for m in self.modules(): + if isinstance(m, nn.Conv2d): + if m.kernel_size == (3, 3): + m.padding = (1, 1) + if isinstance(m, nn.MaxPool2d): + m.padding = (1, 1) + + # remove linear layers + del self.last_linear + + def make_dilated(self, stage_list, dilation_list): + raise ValueError("InceptionV4 encoder does not support dilated mode " + "due to pooling operation for downsampling!") + + def get_stages(self): + return [ + nn.Identity(), + self.features[: self._stage_idxs[0]], + self.features[self._stage_idxs[0]: self._stage_idxs[1]], + self.features[self._stage_idxs[1]: self._stage_idxs[2]], + self.features[self._stage_idxs[2]: self._stage_idxs[3]], + self.features[self._stage_idxs[3]:], + ] + + def forward(self, x): + + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("last_linear.bias", None) + state_dict.pop("last_linear.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +inceptionv4_encoders = { + "inceptionv4": { + "encoder": InceptionV4Encoder, + "pretrained_settings": pretrained_settings["inceptionv4"], + "params": { + "stage_idxs": (3, 5, 9, 15), + "out_channels": (3, 64, 192, 384, 1024, 1536), + "num_classes": 1001, + }, + } +} diff --git a/segmentation_models_pytorch_example/encoders/mobilenet.py b/segmentation_models_pytorch_example/encoders/mobilenet.py new file mode 100644 index 0000000000000000000000000000000000000000..8bfdb1095b5343020a386b101fdad7aaebb2f009 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/mobilenet.py @@ -0,0 +1,83 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" + +import torchvision +import torch.nn as nn + +from ._base import EncoderMixin + + +class MobileNetV2Encoder(torchvision.models.MobileNetV2, EncoderMixin): + + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + del self.classifier + + def get_stages(self): + return [ + nn.Identity(), + self.features[:2], + self.features[2:4], + self.features[4:7], + self.features[7:14], + self.features[14:], + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("classifier.1.bias", None) + state_dict.pop("classifier.1.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +mobilenet_encoders = { + "mobilenet_v2": { + "encoder": MobileNetV2Encoder, + "pretrained_settings": { + "imagenet": { + "mean": [0.485, 0.456, 0.406], + "std": [0.229, 0.224, 0.225], + "url": "https://download.pytorch.org/models/mobilenet_v2-b0353104.pth", + "input_space": "RGB", + "input_range": [0, 1], + }, + }, + "params": { + "out_channels": (3, 16, 24, 32, 96, 1280), + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/resnet.py b/segmentation_models_pytorch_example/encoders/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..dd271cb1f76cc8c3e31901deeea61db12942e547 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/resnet.py @@ -0,0 +1,239 @@ +"""Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" +from copy import deepcopy + +import torch.nn as nn + +from torchvision.models.resnet import ResNet +from torchvision.models.resnet import BasicBlock +from torchvision.models.resnet import Bottleneck +from pretrainedmodels.models.torchvision_models import pretrained_settings + +from ._base import EncoderMixin + + +class ResNetEncoder(ResNet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + + del self.fc + del self.avgpool + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.conv1, self.bn1, self.relu), + nn.Sequential(self.maxpool, self.layer1), + self.layer2, + self.layer3, + self.layer4, + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("fc.bias", None) + state_dict.pop("fc.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +new_settings = { + "resnet18": { + "ssl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnet18-d92f0530.pth", # noqa + "swsl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnet18-118f1556.pth", # noqa + }, + "resnet50": { + "ssl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnet50-08389792.pth", # noqa + "swsl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnet50-16a12f1b.pth", # noqa + "lvm-med-resnet" : "./lvm_med_weights/lvmmed_resnet.torch", # <- set absolute path to your weights here, + }, + "resnext50_32x4d": { + "imagenet": "https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth", + "ssl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext50_32x4-ddb3e555.pth", # noqa + "swsl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext50_32x4-72679e44.pth", # noqa + }, + "resnext101_32x4d": { + "ssl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext101_32x4-dc43570a.pth", # noqa + "swsl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x4-3f87e46b.pth", # noqa + }, + "resnext101_32x8d": { + "imagenet": "https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth", + "instagram": "https://download.pytorch.org/models/ig_resnext101_32x8-c38310e5.pth", + "ssl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext101_32x8-2cfe2f8b.pth", # noqa + "swsl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x8-b4712904.pth", # noqa + }, + "resnext101_32x16d": { + "instagram": "https://download.pytorch.org/models/ig_resnext101_32x16-c6f796b0.pth", + "ssl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_supervised_resnext101_32x16-15fffa57.pth", # noqa + "swsl": "https://dl.fbaipublicfiles.com/semiweaksupervision/model_files/semi_weakly_supervised_resnext101_32x16-f3559a9c.pth", # noqa + }, + "resnext101_32x32d": { + "instagram": "https://download.pytorch.org/models/ig_resnext101_32x32-e4b90b00.pth", + }, + "resnext101_32x48d": { + "instagram": "https://download.pytorch.org/models/ig_resnext101_32x48-3e41cc8a.pth", + }, +} + +pretrained_settings = deepcopy(pretrained_settings) +for model_name, sources in new_settings.items(): + if model_name not in pretrained_settings: + pretrained_settings[model_name] = {} + + for source_name, source_url in sources.items(): + pretrained_settings[model_name][source_name] = { + "url": source_url, + "input_size": [3, 224, 224], + "input_range": [0, 1], + "mean": [0.485, 0.456, 0.406], + "std": [0.229, 0.224, 0.225], + "num_classes": 1000, + } + + +resnet_encoders = { + "resnet18": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnet18"], + "params": { + "out_channels": (3, 64, 64, 128, 256, 512), + "block": BasicBlock, + "layers": [2, 2, 2, 2], + }, + }, + "resnet34": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnet34"], + "params": { + "out_channels": (3, 64, 64, 128, 256, 512), + "block": BasicBlock, + "layers": [3, 4, 6, 3], + }, + }, + "resnet50": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnet50"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 6, 3], + }, + }, + "resnet101": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnet101"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 23, 3], + }, + }, + "resnet152": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnet152"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 8, 36, 3], + }, + }, + "resnext50_32x4d": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnext50_32x4d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 6, 3], + "groups": 32, + "width_per_group": 4, + }, + }, + "resnext101_32x4d": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnext101_32x4d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 23, 3], + "groups": 32, + "width_per_group": 4, + }, + }, + "resnext101_32x8d": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnext101_32x8d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 23, 3], + "groups": 32, + "width_per_group": 8, + }, + }, + "resnext101_32x16d": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnext101_32x16d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 23, 3], + "groups": 32, + "width_per_group": 16, + }, + }, + "resnext101_32x32d": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnext101_32x32d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 23, 3], + "groups": 32, + "width_per_group": 32, + }, + }, + "resnext101_32x48d": { + "encoder": ResNetEncoder, + "pretrained_settings": pretrained_settings["resnext101_32x48d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": Bottleneck, + "layers": [3, 4, 23, 3], + "groups": 32, + "width_per_group": 48, + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/senet.py b/segmentation_models_pytorch_example/encoders/senet.py new file mode 100644 index 0000000000000000000000000000000000000000..7cdbdbe14d3ad177547a9367639489345b1afc35 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/senet.py @@ -0,0 +1,174 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" + +import torch.nn as nn + +from pretrainedmodels.models.senet import ( + SENet, + SEBottleneck, + SEResNetBottleneck, + SEResNeXtBottleneck, + pretrained_settings, +) +from ._base import EncoderMixin + + +class SENetEncoder(SENet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + + del self.last_linear + del self.avg_pool + + def get_stages(self): + return [ + nn.Identity(), + self.layer0[:-1], + nn.Sequential(self.layer0[-1], self.layer1), + self.layer2, + self.layer3, + self.layer4, + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("last_linear.bias", None) + state_dict.pop("last_linear.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +senet_encoders = { + "senet154": { + "encoder": SENetEncoder, + "pretrained_settings": pretrained_settings["senet154"], + "params": { + "out_channels": (3, 128, 256, 512, 1024, 2048), + "block": SEBottleneck, + "dropout_p": 0.2, + "groups": 64, + "layers": [3, 8, 36, 3], + "num_classes": 1000, + "reduction": 16, + }, + }, + "se_resnet50": { + "encoder": SENetEncoder, + "pretrained_settings": pretrained_settings["se_resnet50"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": SEResNetBottleneck, + "layers": [3, 4, 6, 3], + "downsample_kernel_size": 1, + "downsample_padding": 0, + "dropout_p": None, + "groups": 1, + "inplanes": 64, + "input_3x3": False, + "num_classes": 1000, + "reduction": 16, + }, + }, + "se_resnet101": { + "encoder": SENetEncoder, + "pretrained_settings": pretrained_settings["se_resnet101"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": SEResNetBottleneck, + "layers": [3, 4, 23, 3], + "downsample_kernel_size": 1, + "downsample_padding": 0, + "dropout_p": None, + "groups": 1, + "inplanes": 64, + "input_3x3": False, + "num_classes": 1000, + "reduction": 16, + }, + }, + "se_resnet152": { + "encoder": SENetEncoder, + "pretrained_settings": pretrained_settings["se_resnet152"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": SEResNetBottleneck, + "layers": [3, 8, 36, 3], + "downsample_kernel_size": 1, + "downsample_padding": 0, + "dropout_p": None, + "groups": 1, + "inplanes": 64, + "input_3x3": False, + "num_classes": 1000, + "reduction": 16, + }, + }, + "se_resnext50_32x4d": { + "encoder": SENetEncoder, + "pretrained_settings": pretrained_settings["se_resnext50_32x4d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": SEResNeXtBottleneck, + "layers": [3, 4, 6, 3], + "downsample_kernel_size": 1, + "downsample_padding": 0, + "dropout_p": None, + "groups": 32, + "inplanes": 64, + "input_3x3": False, + "num_classes": 1000, + "reduction": 16, + }, + }, + "se_resnext101_32x4d": { + "encoder": SENetEncoder, + "pretrained_settings": pretrained_settings["se_resnext101_32x4d"], + "params": { + "out_channels": (3, 64, 256, 512, 1024, 2048), + "block": SEResNeXtBottleneck, + "layers": [3, 4, 23, 3], + "downsample_kernel_size": 1, + "downsample_padding": 0, + "dropout_p": None, + "groups": 32, + "inplanes": 64, + "input_3x3": False, + "num_classes": 1000, + "reduction": 16, + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/timm_efficientnet.py b/segmentation_models_pytorch_example/encoders/timm_efficientnet.py new file mode 100644 index 0000000000000000000000000000000000000000..ddac946b3b63c60679cba3bf3abc9404616da863 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_efficientnet.py @@ -0,0 +1,382 @@ +from functools import partial + +import torch +import torch.nn as nn + +from timm.models.efficientnet import EfficientNet +from timm.models.efficientnet import decode_arch_def, round_channels, default_cfgs +from timm.models.layers.activations import Swish + +from ._base import EncoderMixin + + +def get_efficientnet_kwargs(channel_multiplier=1.0, depth_multiplier=1.0, drop_rate=0.2): + """Creates an EfficientNet model. + Ref impl: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/efficientnet_model.py + Paper: https://arxiv.org/abs/1905.11946 + EfficientNet params + name: (channel_multiplier, depth_multiplier, resolution, dropout_rate) + 'efficientnet-b0': (1.0, 1.0, 224, 0.2), + 'efficientnet-b1': (1.0, 1.1, 240, 0.2), + 'efficientnet-b2': (1.1, 1.2, 260, 0.3), + 'efficientnet-b3': (1.2, 1.4, 300, 0.3), + 'efficientnet-b4': (1.4, 1.8, 380, 0.4), + 'efficientnet-b5': (1.6, 2.2, 456, 0.4), + 'efficientnet-b6': (1.8, 2.6, 528, 0.5), + 'efficientnet-b7': (2.0, 3.1, 600, 0.5), + 'efficientnet-b8': (2.2, 3.6, 672, 0.5), + 'efficientnet-l2': (4.3, 5.3, 800, 0.5), + Args: + channel_multiplier: multiplier to number of channels per layer + depth_multiplier: multiplier to number of repeats per stage + """ + arch_def = [ + ['ds_r1_k3_s1_e1_c16_se0.25'], + ['ir_r2_k3_s2_e6_c24_se0.25'], + ['ir_r2_k5_s2_e6_c40_se0.25'], + ['ir_r3_k3_s2_e6_c80_se0.25'], + ['ir_r3_k5_s1_e6_c112_se0.25'], + ['ir_r4_k5_s2_e6_c192_se0.25'], + ['ir_r1_k3_s1_e6_c320_se0.25'], + ] + model_kwargs = dict( + block_args=decode_arch_def(arch_def, depth_multiplier), + num_features=round_channels(1280, channel_multiplier, 8, None), + stem_size=32, + round_chs_fn=partial(round_channels, multiplier=channel_multiplier), + act_layer=Swish, + drop_rate=drop_rate, + drop_path_rate=0.2, + ) + return model_kwargs + +def gen_efficientnet_lite_kwargs(channel_multiplier=1.0, depth_multiplier=1.0, drop_rate=0.2): + """Creates an EfficientNet-Lite model. + + Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/lite + Paper: https://arxiv.org/abs/1905.11946 + + EfficientNet params + name: (channel_multiplier, depth_multiplier, resolution, dropout_rate) + 'efficientnet-lite0': (1.0, 1.0, 224, 0.2), + 'efficientnet-lite1': (1.0, 1.1, 240, 0.2), + 'efficientnet-lite2': (1.1, 1.2, 260, 0.3), + 'efficientnet-lite3': (1.2, 1.4, 280, 0.3), + 'efficientnet-lite4': (1.4, 1.8, 300, 0.3), + + Args: + channel_multiplier: multiplier to number of channels per layer + depth_multiplier: multiplier to number of repeats per stage + """ + arch_def = [ + ['ds_r1_k3_s1_e1_c16'], + ['ir_r2_k3_s2_e6_c24'], + ['ir_r2_k5_s2_e6_c40'], + ['ir_r3_k3_s2_e6_c80'], + ['ir_r3_k5_s1_e6_c112'], + ['ir_r4_k5_s2_e6_c192'], + ['ir_r1_k3_s1_e6_c320'], + ] + model_kwargs = dict( + block_args=decode_arch_def(arch_def, depth_multiplier, fix_first_last=True), + num_features=1280, + stem_size=32, + fix_stem=True, + round_chs_fn=partial(round_channels, multiplier=channel_multiplier), + act_layer=nn.ReLU6, + drop_rate=drop_rate, + drop_path_rate=0.2, + ) + return model_kwargs + +class EfficientNetBaseEncoder(EfficientNet, EncoderMixin): + + def __init__(self, stage_idxs, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + + self._stage_idxs = stage_idxs + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + + del self.classifier + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.conv_stem, self.bn1, self.act1), + self.blocks[:self._stage_idxs[0]], + self.blocks[self._stage_idxs[0]:self._stage_idxs[1]], + self.blocks[self._stage_idxs[1]:self._stage_idxs[2]], + self.blocks[self._stage_idxs[2]:], + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("classifier.bias", None) + state_dict.pop("classifier.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +class EfficientNetEncoder(EfficientNetBaseEncoder): + + def __init__(self, stage_idxs, out_channels, depth=5, channel_multiplier=1.0, depth_multiplier=1.0, drop_rate=0.2): + kwargs = get_efficientnet_kwargs(channel_multiplier, depth_multiplier, drop_rate) + super().__init__(stage_idxs, out_channels, depth, **kwargs) + + +class EfficientNetLiteEncoder(EfficientNetBaseEncoder): + + def __init__(self, stage_idxs, out_channels, depth=5, channel_multiplier=1.0, depth_multiplier=1.0, drop_rate=0.2): + kwargs = gen_efficientnet_lite_kwargs(channel_multiplier, depth_multiplier, drop_rate) + super().__init__(stage_idxs, out_channels, depth, **kwargs) + + +def prepare_settings(settings): + return { + "mean": settings["mean"], + "std": settings["std"], + "url": settings["url"], + "input_range": (0, 1), + "input_space": "RGB", + } + + +timm_efficientnet_encoders = { + + "timm-efficientnet-b0": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b0"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b0_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b0_ns"]), + }, + "params": { + "out_channels": (3, 32, 24, 40, 112, 320), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.0, + "depth_multiplier": 1.0, + "drop_rate": 0.2, + }, + }, + + "timm-efficientnet-b1": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b1"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b1_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b1_ns"]), + }, + "params": { + "out_channels": (3, 32, 24, 40, 112, 320), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.0, + "depth_multiplier": 1.1, + "drop_rate": 0.2, + }, + }, + + "timm-efficientnet-b2": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b2"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b2_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b2_ns"]), + }, + "params": { + "out_channels": (3, 32, 24, 48, 120, 352), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.1, + "depth_multiplier": 1.2, + "drop_rate": 0.3, + }, + }, + + "timm-efficientnet-b3": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b3"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b3_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b3_ns"]), + }, + "params": { + "out_channels": (3, 40, 32, 48, 136, 384), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.2, + "depth_multiplier": 1.4, + "drop_rate": 0.3, + }, + }, + + "timm-efficientnet-b4": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b4"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b4_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b4_ns"]), + }, + "params": { + "out_channels": (3, 48, 32, 56, 160, 448), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.4, + "depth_multiplier": 1.8, + "drop_rate": 0.4, + }, + }, + + "timm-efficientnet-b5": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b5"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b5_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b5_ns"]), + }, + "params": { + "out_channels": (3, 48, 40, 64, 176, 512), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.6, + "depth_multiplier": 2.2, + "drop_rate": 0.4, + }, + }, + + "timm-efficientnet-b6": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b6"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b6_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b6_ns"]), + }, + "params": { + "out_channels": (3, 56, 40, 72, 200, 576), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.8, + "depth_multiplier": 2.6, + "drop_rate": 0.5, + }, + }, + + "timm-efficientnet-b7": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b7"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b7_ap"]), + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_b7_ns"]), + }, + "params": { + "out_channels": (3, 64, 48, 80, 224, 640), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 2.0, + "depth_multiplier": 3.1, + "drop_rate": 0.5, + }, + }, + + "timm-efficientnet-b8": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_b8"]), + "advprop": prepare_settings(default_cfgs["tf_efficientnet_b8_ap"]), + }, + "params": { + "out_channels": (3, 72, 56, 88, 248, 704), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 2.2, + "depth_multiplier": 3.6, + "drop_rate": 0.5, + }, + }, + + "timm-efficientnet-l2": { + "encoder": EfficientNetEncoder, + "pretrained_settings": { + "noisy-student": prepare_settings(default_cfgs["tf_efficientnet_l2_ns"]), + }, + "params": { + "out_channels": (3, 136, 104, 176, 480, 1376), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 4.3, + "depth_multiplier": 5.3, + "drop_rate": 0.5, + }, + }, + + "timm-tf_efficientnet_lite0": { + "encoder": EfficientNetLiteEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_lite0"]), + }, + "params": { + "out_channels": (3, 32, 24, 40, 112, 320), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.0, + "depth_multiplier": 1.0, + "drop_rate": 0.2, + }, + }, + + "timm-tf_efficientnet_lite1": { + "encoder": EfficientNetLiteEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_lite1"]), + }, + "params": { + "out_channels": (3, 32, 24, 40, 112, 320), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.0, + "depth_multiplier": 1.1, + "drop_rate": 0.2, + }, + }, + + "timm-tf_efficientnet_lite2": { + "encoder": EfficientNetLiteEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_lite2"]), + }, + "params": { + "out_channels": (3, 32, 24, 48, 120, 352), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.1, + "depth_multiplier": 1.2, + "drop_rate": 0.3, + }, + }, + + "timm-tf_efficientnet_lite3": { + "encoder": EfficientNetLiteEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_lite3"]), + }, + "params": { + "out_channels": (3, 32, 32, 48, 136, 384), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.2, + "depth_multiplier": 1.4, + "drop_rate": 0.3, + }, + }, + + "timm-tf_efficientnet_lite4": { + "encoder": EfficientNetLiteEncoder, + "pretrained_settings": { + "imagenet": prepare_settings(default_cfgs["tf_efficientnet_lite4"]), + }, + "params": { + "out_channels": (3, 32, 32, 56, 160, 448), + "stage_idxs": (2, 3, 5), + "channel_multiplier": 1.4, + "depth_multiplier": 1.8, + "drop_rate": 0.4, + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/timm_gernet.py b/segmentation_models_pytorch_example/encoders/timm_gernet.py new file mode 100644 index 0000000000000000000000000000000000000000..f98c030af3e62c36c28a88c53a4d18765ba78482 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_gernet.py @@ -0,0 +1,124 @@ +from timm.models import ByoModelCfg, ByoBlockCfg, ByobNet + +from ._base import EncoderMixin +import torch.nn as nn + + +class GERNetEncoder(ByobNet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + + del self.head + + def get_stages(self): + return [ + nn.Identity(), + self.stem, + self.stages[0], + self.stages[1], + self.stages[2], + nn.Sequential(self.stages[3], self.stages[4], self.final_conv) + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("head.fc.weight", None) + state_dict.pop("head.fc.bias", None) + super().load_state_dict(state_dict, **kwargs) + + +regnet_weights = { + 'timm-gernet_s': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-ger-weights/gernet_s-756b4751.pth', + }, + 'timm-gernet_m': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-ger-weights/gernet_m-0873c53a.pth', + }, + 'timm-gernet_l': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-ger-weights/gernet_l-f31e2e8d.pth', + }, +} + +pretrained_settings = {} +for model_name, sources in regnet_weights.items(): + pretrained_settings[model_name] = {} + for source_name, source_url in sources.items(): + pretrained_settings[model_name][source_name] = { + "url": source_url, + 'input_range': [0, 1], + 'mean': [0.485, 0.456, 0.406], + 'std': [0.229, 0.224, 0.225], + 'num_classes': 1000 + } + +timm_gernet_encoders = { + 'timm-gernet_s': { + 'encoder': GERNetEncoder, + "pretrained_settings": pretrained_settings["timm-gernet_s"], + 'params': { + 'out_channels': (3, 13, 48, 48, 384, 1920), + 'cfg': ByoModelCfg( + blocks=( + ByoBlockCfg(type='basic', d=1, c=48, s=2, gs=0, br=1.), + ByoBlockCfg(type='basic', d=3, c=48, s=2, gs=0, br=1.), + ByoBlockCfg(type='bottle', d=7, c=384, s=2, gs=0, br=1 / 4), + ByoBlockCfg(type='bottle', d=2, c=560, s=2, gs=1, br=3.), + ByoBlockCfg(type='bottle', d=1, c=256, s=1, gs=1, br=3.), + ), + stem_chs=13, + stem_pool=None, + num_features=1920, + ) + }, + }, + 'timm-gernet_m': { + 'encoder': GERNetEncoder, + "pretrained_settings": pretrained_settings["timm-gernet_m"], + 'params': { + 'out_channels': (3, 32, 128, 192, 640, 2560), + 'cfg': ByoModelCfg( + blocks=( + ByoBlockCfg(type='basic', d=1, c=128, s=2, gs=0, br=1.), + ByoBlockCfg(type='basic', d=2, c=192, s=2, gs=0, br=1.), + ByoBlockCfg(type='bottle', d=6, c=640, s=2, gs=0, br=1 / 4), + ByoBlockCfg(type='bottle', d=4, c=640, s=2, gs=1, br=3.), + ByoBlockCfg(type='bottle', d=1, c=640, s=1, gs=1, br=3.), + ), + stem_chs=32, + stem_pool=None, + num_features=2560, + ) + }, + }, + 'timm-gernet_l': { + 'encoder': GERNetEncoder, + "pretrained_settings": pretrained_settings["timm-gernet_l"], + 'params': { + 'out_channels': (3, 32, 128, 192, 640, 2560), + 'cfg': ByoModelCfg( + blocks=( + ByoBlockCfg(type='basic', d=1, c=128, s=2, gs=0, br=1.), + ByoBlockCfg(type='basic', d=2, c=192, s=2, gs=0, br=1.), + ByoBlockCfg(type='bottle', d=6, c=640, s=2, gs=0, br=1 / 4), + ByoBlockCfg(type='bottle', d=5, c=640, s=2, gs=1, br=3.), + ByoBlockCfg(type='bottle', d=4, c=640, s=1, gs=1, br=3.), + ), + stem_chs=32, + stem_pool=None, + num_features=2560, + ) + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/timm_mobilenetv3.py b/segmentation_models_pytorch_example/encoders/timm_mobilenetv3.py new file mode 100644 index 0000000000000000000000000000000000000000..a4ab6ecfc0aa7f6570be453a3356d8196aa591bf --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_mobilenetv3.py @@ -0,0 +1,175 @@ +import timm +import numpy as np +import torch.nn as nn + +from ._base import EncoderMixin + + +def _make_divisible(x, divisible_by=8): + return int(np.ceil(x * 1. / divisible_by) * divisible_by) + + +class MobileNetV3Encoder(nn.Module, EncoderMixin): + def __init__(self, model_name, width_mult, depth=5, **kwargs): + super().__init__() + if "large" not in model_name and "small" not in model_name: + raise ValueError( + 'MobileNetV3 wrong model name {}'.format(model_name) + ) + + self._mode = "small" if "small" in model_name else "large" + self._depth = depth + self._out_channels = self._get_channels(self._mode, width_mult) + self._in_channels = 3 + + # minimal models replace hardswish with relu + self.model = timm.create_model( + model_name=model_name, + scriptable=True, # torch.jit scriptable + exportable=True, # onnx export + features_only=True, + ) + + def _get_channels(self, mode, width_mult): + if mode == "small": + channels = [16, 16, 24, 48, 576] + else: + channels = [16, 24, 40, 112, 960] + channels = [3,] + [_make_divisible(x * width_mult) for x in channels] + return tuple(channels) + + def get_stages(self): + if self._mode == 'small': + return [ + nn.Identity(), + nn.Sequential( + self.model.conv_stem, + self.model.bn1, + self.model.act1, + ), + self.model.blocks[0], + self.model.blocks[1], + self.model.blocks[2:4], + self.model.blocks[4:], + ] + elif self._mode == 'large': + return [ + nn.Identity(), + nn.Sequential( + self.model.conv_stem, + self.model.bn1, + self.model.act1, + self.model.blocks[0], + ), + self.model.blocks[1], + self.model.blocks[2], + self.model.blocks[3:5], + self.model.blocks[5:], + ] + else: + ValueError('MobileNetV3 mode should be small or large, got {}'.format(self._mode)) + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop('conv_head.weight', None) + state_dict.pop('conv_head.bias', None) + state_dict.pop('classifier.weight', None) + state_dict.pop('classifier.bias', None) + self.model.load_state_dict(state_dict, **kwargs) + + +mobilenetv3_weights = { + 'tf_mobilenetv3_large_075': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_075-150ee8b0.pth' + }, + 'tf_mobilenetv3_large_100': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_100-427764d5.pth' + }, + 'tf_mobilenetv3_large_minimal_100': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_minimal_100-8596ae28.pth' + }, + 'tf_mobilenetv3_small_075': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_075-da427f52.pth' + }, + 'tf_mobilenetv3_small_100': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_100-37f49e2b.pth' + }, + 'tf_mobilenetv3_small_minimal_100': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_minimal_100-922a7843.pth' + }, + + +} + +pretrained_settings = {} +for model_name, sources in mobilenetv3_weights.items(): + pretrained_settings[model_name] = {} + for source_name, source_url in sources.items(): + pretrained_settings[model_name][source_name] = { + "url": source_url, + 'input_range': [0, 1], + 'mean': [0.485, 0.456, 0.406], + 'std': [0.229, 0.224, 0.225], + 'input_space': 'RGB', + } + + +timm_mobilenetv3_encoders = { + 'timm-mobilenetv3_large_075': { + 'encoder': MobileNetV3Encoder, + 'pretrained_settings': pretrained_settings['tf_mobilenetv3_large_075'], + 'params': { + 'model_name': 'tf_mobilenetv3_large_075', + 'width_mult': 0.75 + } + }, + 'timm-mobilenetv3_large_100': { + 'encoder': MobileNetV3Encoder, + 'pretrained_settings': pretrained_settings['tf_mobilenetv3_large_100'], + 'params': { + 'model_name': 'tf_mobilenetv3_large_100', + 'width_mult': 1.0 + } + }, + 'timm-mobilenetv3_large_minimal_100': { + 'encoder': MobileNetV3Encoder, + 'pretrained_settings': pretrained_settings['tf_mobilenetv3_large_minimal_100'], + 'params': { + 'model_name': 'tf_mobilenetv3_large_minimal_100', + 'width_mult': 1.0 + } + }, + 'timm-mobilenetv3_small_075': { + 'encoder': MobileNetV3Encoder, + 'pretrained_settings': pretrained_settings['tf_mobilenetv3_small_075'], + 'params': { + 'model_name': 'tf_mobilenetv3_small_075', + 'width_mult': 0.75 + } + }, + 'timm-mobilenetv3_small_100': { + 'encoder': MobileNetV3Encoder, + 'pretrained_settings': pretrained_settings['tf_mobilenetv3_small_100'], + 'params': { + 'model_name': 'tf_mobilenetv3_small_100', + 'width_mult': 1.0 + } + }, + 'timm-mobilenetv3_small_minimal_100': { + 'encoder': MobileNetV3Encoder, + 'pretrained_settings': pretrained_settings['tf_mobilenetv3_small_minimal_100'], + 'params': { + 'model_name': 'tf_mobilenetv3_small_minimal_100', + 'width_mult': 1.0 + } + }, +} diff --git a/segmentation_models_pytorch_example/encoders/timm_regnet.py b/segmentation_models_pytorch_example/encoders/timm_regnet.py new file mode 100644 index 0000000000000000000000000000000000000000..7d801becd02c5b00ff838e61a82141398984aa3d --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_regnet.py @@ -0,0 +1,332 @@ +from ._base import EncoderMixin +from timm.models.regnet import RegNet +import torch.nn as nn + + +class RegNetEncoder(RegNet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + + del self.head + + def get_stages(self): + return [ + nn.Identity(), + self.stem, + self.s1, + self.s2, + self.s3, + self.s4, + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("head.fc.weight", None) + state_dict.pop("head.fc.bias", None) + super().load_state_dict(state_dict, **kwargs) + + +regnet_weights = { + 'timm-regnetx_002': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_002-e7e85e5c.pth', + }, + 'timm-regnetx_004': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_004-7d0e9424.pth', + }, + 'timm-regnetx_006': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_006-85ec1baa.pth', + }, + 'timm-regnetx_008': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_008-d8b470eb.pth', + }, + 'timm-regnetx_016': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_016-65ca972a.pth', + }, + 'timm-regnetx_032': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_032-ed0c7f7e.pth', + }, + 'timm-regnetx_040': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_040-73c2a654.pth', + }, + 'timm-regnetx_064': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_064-29278baa.pth', + }, + 'timm-regnetx_080': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_080-7c7fcab1.pth', + }, + 'timm-regnetx_120': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_120-65d5521e.pth', + }, + 'timm-regnetx_160': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_160-c98c4112.pth', + }, + 'timm-regnetx_320': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_320-8ea38b93.pth', + }, + 'timm-regnety_002': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_002-e68ca334.pth', + }, + 'timm-regnety_004': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_004-0db870e6.pth', + }, + 'timm-regnety_006': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_006-c67e57ec.pth', + }, + 'timm-regnety_008': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_008-dc900dbe.pth', + }, + 'timm-regnety_016': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_016-54367f74.pth', + }, + 'timm-regnety_032': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/regnety_032_ra-7f2439f9.pth' + }, + 'timm-regnety_040': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_040-f0d569f9.pth' + }, + 'timm-regnety_064': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_064-0a48325c.pth' + }, + 'timm-regnety_080': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_080-e7f3eb93.pth', + }, + 'timm-regnety_120': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_120-721ba79a.pth', + }, + 'timm-regnety_160': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_160-d64013cd.pth', + }, + 'timm-regnety_320': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_320-ba464b29.pth' + } +} + +pretrained_settings = {} +for model_name, sources in regnet_weights.items(): + pretrained_settings[model_name] = {} + for source_name, source_url in sources.items(): + pretrained_settings[model_name][source_name] = { + "url": source_url, + 'input_size': [3, 224, 224], + 'input_range': [0, 1], + 'mean': [0.485, 0.456, 0.406], + 'std': [0.229, 0.224, 0.225], + 'num_classes': 1000 + } + +# at this point I am too lazy to copy configs, so I just used the same configs from timm's repo + + +def _mcfg(**kwargs): + cfg = dict(se_ratio=0., bottle_ratio=1., stem_width=32) + cfg.update(**kwargs) + return cfg + + +timm_regnet_encoders = { + 'timm-regnetx_002': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_002"], + 'params': { + 'out_channels': (3, 32, 24, 56, 152, 368), + 'cfg': _mcfg(w0=24, wa=36.44, wm=2.49, group_w=8, depth=13) + }, + }, + 'timm-regnetx_004': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_004"], + 'params': { + 'out_channels': (3, 32, 32, 64, 160, 384), + 'cfg': _mcfg(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22) + }, + }, + 'timm-regnetx_006': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_006"], + 'params': { + 'out_channels': (3, 32, 48, 96, 240, 528), + 'cfg': _mcfg(w0=48, wa=36.97, wm=2.24, group_w=24, depth=16) + }, + }, + 'timm-regnetx_008': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_008"], + 'params': { + 'out_channels': (3, 32, 64, 128, 288, 672), + 'cfg': _mcfg(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16) + }, + }, + 'timm-regnetx_016': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_016"], + 'params': { + 'out_channels': (3, 32, 72, 168, 408, 912), + 'cfg': _mcfg(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18) + }, + }, + 'timm-regnetx_032': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_032"], + 'params': { + 'out_channels': (3, 32, 96, 192, 432, 1008), + 'cfg': _mcfg(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25) + }, + }, + 'timm-regnetx_040': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_040"], + 'params': { + 'out_channels': (3, 32, 80, 240, 560, 1360), + 'cfg': _mcfg(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23) + }, + }, + 'timm-regnetx_064': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_064"], + 'params': { + 'out_channels': (3, 32, 168, 392, 784, 1624), + 'cfg': _mcfg(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17) + }, + }, + 'timm-regnetx_080': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_080"], + 'params': { + 'out_channels': (3, 32, 80, 240, 720, 1920), + 'cfg': _mcfg(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23) + }, + }, + 'timm-regnetx_120': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_120"], + 'params': { + 'out_channels': (3, 32, 224, 448, 896, 2240), + 'cfg': _mcfg(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19) + }, + }, + 'timm-regnetx_160': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_160"], + 'params': { + 'out_channels': (3, 32, 256, 512, 896, 2048), + 'cfg': _mcfg(w0=216, wa=55.59, wm=2.1, group_w=128, depth=22) + }, + }, + 'timm-regnetx_320': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnetx_320"], + 'params': { + 'out_channels': (3, 32, 336, 672, 1344, 2520), + 'cfg': _mcfg(w0=320, wa=69.86, wm=2.0, group_w=168, depth=23) + }, + }, + #regnety + 'timm-regnety_002': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_002"], + 'params': { + 'out_channels': (3, 32, 24, 56, 152, 368), + 'cfg': _mcfg(w0=24, wa=36.44, wm=2.49, group_w=8, depth=13, se_ratio=0.25) + }, + }, + 'timm-regnety_004': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_004"], + 'params': { + 'out_channels': (3, 32, 48, 104, 208, 440), + 'cfg': _mcfg(w0=48, wa=27.89, wm=2.09, group_w=8, depth=16, se_ratio=0.25) + }, + }, + 'timm-regnety_006': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_006"], + 'params': { + 'out_channels': (3, 32, 48, 112, 256, 608), + 'cfg': _mcfg(w0=48, wa=32.54, wm=2.32, group_w=16, depth=15, se_ratio=0.25) + }, + }, + 'timm-regnety_008': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_008"], + 'params': { + 'out_channels': (3, 32, 64, 128, 320, 768), + 'cfg': _mcfg(w0=56, wa=38.84, wm=2.4, group_w=16, depth=14, se_ratio=0.25) + }, + }, + 'timm-regnety_016': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_016"], + 'params': { + 'out_channels': (3, 32, 48, 120, 336, 888), + 'cfg': _mcfg(w0=48, wa=20.71, wm=2.65, group_w=24, depth=27, se_ratio=0.25) + }, + }, + 'timm-regnety_032': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_032"], + 'params': { + 'out_channels': (3, 32, 72, 216, 576, 1512), + 'cfg': _mcfg(w0=80, wa=42.63, wm=2.66, group_w=24, depth=21, se_ratio=0.25) + }, + }, + 'timm-regnety_040': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_040"], + 'params': { + 'out_channels': (3, 32, 128, 192, 512, 1088), + 'cfg': _mcfg(w0=96, wa=31.41, wm=2.24, group_w=64, depth=22, se_ratio=0.25) + }, + }, + 'timm-regnety_064': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_064"], + 'params': { + 'out_channels': (3, 32, 144, 288, 576, 1296), + 'cfg': _mcfg(w0=112, wa=33.22, wm=2.27, group_w=72, depth=25, se_ratio=0.25) + }, + }, + 'timm-regnety_080': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_080"], + 'params': { + 'out_channels': (3, 32, 168, 448, 896, 2016), + 'cfg': _mcfg(w0=192, wa=76.82, wm=2.19, group_w=56, depth=17, se_ratio=0.25) + }, + }, + 'timm-regnety_120': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_120"], + 'params': { + 'out_channels': (3, 32, 224, 448, 896, 2240), + 'cfg': _mcfg(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, se_ratio=0.25) + }, + }, + 'timm-regnety_160': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_160"], + 'params': { + 'out_channels': (3, 32, 224, 448, 1232, 3024), + 'cfg': _mcfg(w0=200, wa=106.23, wm=2.48, group_w=112, depth=18, se_ratio=0.25) + }, + }, + 'timm-regnety_320': { + 'encoder': RegNetEncoder, + "pretrained_settings": pretrained_settings["timm-regnety_320"], + 'params': { + 'out_channels': (3, 32, 232, 696, 1392, 3712), + 'cfg': _mcfg(w0=232, wa=115.89, wm=2.53, group_w=232, depth=20, se_ratio=0.25) + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/timm_res2net.py b/segmentation_models_pytorch_example/encoders/timm_res2net.py new file mode 100644 index 0000000000000000000000000000000000000000..2b63a0b678a18307bc5f4bbab2d045dc2cfd4e12 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_res2net.py @@ -0,0 +1,163 @@ +from ._base import EncoderMixin +from timm.models.resnet import ResNet +from timm.models.res2net import Bottle2neck +import torch.nn as nn + + +class Res2NetEncoder(ResNet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + + del self.fc + del self.global_pool + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.conv1, self.bn1, self.act1), + nn.Sequential(self.maxpool, self.layer1), + self.layer2, + self.layer3, + self.layer4, + ] + + def make_dilated(self, stage_list, dilation_list): + raise ValueError("Res2Net encoders do not support dilated mode") + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("fc.bias", None) + state_dict.pop("fc.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +res2net_weights = { + 'timm-res2net50_26w_4s': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_26w_4s-06e79181.pth' + }, + 'timm-res2net50_48w_2s': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_48w_2s-afed724a.pth' + }, + 'timm-res2net50_14w_8s': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_14w_8s-6527dddc.pth', + }, + 'timm-res2net50_26w_6s': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_26w_6s-19041792.pth', + }, + 'timm-res2net50_26w_8s': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net50_26w_8s-2c7c9f12.pth', + }, + 'timm-res2net101_26w_4s': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2net101_26w_4s-02a759a1.pth', + }, + 'timm-res2next50': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-res2net/res2next50_4s-6ef7e7bf.pth', + } +} + +pretrained_settings = {} +for model_name, sources in res2net_weights.items(): + pretrained_settings[model_name] = {} + for source_name, source_url in sources.items(): + pretrained_settings[model_name][source_name] = { + "url": source_url, + 'input_size': [3, 224, 224], + 'input_range': [0, 1], + 'mean': [0.485, 0.456, 0.406], + 'std': [0.229, 0.224, 0.225], + 'num_classes': 1000 + } + + +timm_res2net_encoders = { + 'timm-res2net50_26w_4s': { + 'encoder': Res2NetEncoder, + "pretrained_settings": pretrained_settings["timm-res2net50_26w_4s"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': Bottle2neck, + 'layers': [3, 4, 6, 3], + 'base_width': 26, + 'block_args': {'scale': 4} + }, + }, + 'timm-res2net101_26w_4s': { + 'encoder': Res2NetEncoder, + "pretrained_settings": pretrained_settings["timm-res2net101_26w_4s"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': Bottle2neck, + 'layers': [3, 4, 23, 3], + 'base_width': 26, + 'block_args': {'scale': 4} + }, + }, + 'timm-res2net50_26w_6s': { + 'encoder': Res2NetEncoder, + "pretrained_settings": pretrained_settings["timm-res2net50_26w_6s"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': Bottle2neck, + 'layers': [3, 4, 6, 3], + 'base_width': 26, + 'block_args': {'scale': 6} + }, + }, + 'timm-res2net50_26w_8s': { + 'encoder': Res2NetEncoder, + "pretrained_settings": pretrained_settings["timm-res2net50_26w_8s"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': Bottle2neck, + 'layers': [3, 4, 6, 3], + 'base_width': 26, + 'block_args': {'scale': 8} + }, + }, + 'timm-res2net50_48w_2s': { + 'encoder': Res2NetEncoder, + "pretrained_settings": pretrained_settings["timm-res2net50_48w_2s"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': Bottle2neck, + 'layers': [3, 4, 6, 3], + 'base_width': 48, + 'block_args': {'scale': 2} + }, + }, + 'timm-res2net50_14w_8s': { + 'encoder': Res2NetEncoder, + "pretrained_settings": pretrained_settings["timm-res2net50_14w_8s"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': Bottle2neck, + 'layers': [3, 4, 6, 3], + 'base_width': 14, + 'block_args': {'scale': 8} + }, + }, + 'timm-res2next50': { + 'encoder': Res2NetEncoder, + "pretrained_settings": pretrained_settings["timm-res2next50"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': Bottle2neck, + 'layers': [3, 4, 6, 3], + 'base_width': 4, + 'cardinality': 8, + 'block_args': {'scale': 4} + }, + } +} diff --git a/segmentation_models_pytorch_example/encoders/timm_resnest.py b/segmentation_models_pytorch_example/encoders/timm_resnest.py new file mode 100644 index 0000000000000000000000000000000000000000..bcc30d5e016b217c5ac8c444a30dda422566d175 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_resnest.py @@ -0,0 +1,208 @@ +from ._base import EncoderMixin +from timm.models.resnet import ResNet +from timm.models.resnest import ResNestBottleneck +import torch.nn as nn + + +class ResNestEncoder(ResNet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + + del self.fc + del self.global_pool + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.conv1, self.bn1, self.act1), + nn.Sequential(self.maxpool, self.layer1), + self.layer2, + self.layer3, + self.layer4, + ] + + def make_dilated(self, stage_list, dilation_list): + raise ValueError("ResNest encoders do not support dilated mode") + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("fc.bias", None) + state_dict.pop("fc.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +resnest_weights = { + 'timm-resnest14d': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gluon_resnest14-9c8fe254.pth' + }, + 'timm-resnest26d': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gluon_resnest26-50eb607c.pth' + }, + 'timm-resnest50d': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest50-528c19ca.pth', + }, + 'timm-resnest101e': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest101-22405ba7.pth', + }, + 'timm-resnest200e': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest200-75117900.pth', + }, + 'timm-resnest269e': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest269-0cc87c48.pth', + }, + 'timm-resnest50d_4s2x40d': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest50_fast_4s2x40d-41d14ed0.pth', + }, + 'timm-resnest50d_1s4x24d': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-resnest/resnest50_fast_1s4x24d-d4a4f76f.pth', + } +} + +pretrained_settings = {} +for model_name, sources in resnest_weights.items(): + pretrained_settings[model_name] = {} + for source_name, source_url in sources.items(): + pretrained_settings[model_name][source_name] = { + "url": source_url, + 'input_size': [3, 224, 224], + 'input_range': [0, 1], + 'mean': [0.485, 0.456, 0.406], + 'std': [0.229, 0.224, 0.225], + 'num_classes': 1000 + } + + +timm_resnest_encoders = { + 'timm-resnest14d': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest14d"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [1, 1, 1, 1], + 'stem_type': 'deep', + 'stem_width': 32, + 'avg_down': True, + 'base_width': 64, + 'cardinality': 1, + 'block_args': {'radix': 2, 'avd': True, 'avd_first': False} + } + }, + 'timm-resnest26d': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest26d"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [2, 2, 2, 2], + 'stem_type': 'deep', + 'stem_width': 32, + 'avg_down': True, + 'base_width': 64, + 'cardinality': 1, + 'block_args': {'radix': 2, 'avd': True, 'avd_first': False} + } + }, + 'timm-resnest50d': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest50d"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [3, 4, 6, 3], + 'stem_type': 'deep', + 'stem_width': 32, + 'avg_down': True, + 'base_width': 64, + 'cardinality': 1, + 'block_args': {'radix': 2, 'avd': True, 'avd_first': False} + } + }, + 'timm-resnest101e': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest101e"], + 'params': { + 'out_channels': (3, 128, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [3, 4, 23, 3], + 'stem_type': 'deep', + 'stem_width': 64, + 'avg_down': True, + 'base_width': 64, + 'cardinality': 1, + 'block_args': {'radix': 2, 'avd': True, 'avd_first': False} + } + }, + 'timm-resnest200e': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest200e"], + 'params': { + 'out_channels': (3, 128, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [3, 24, 36, 3], + 'stem_type': 'deep', + 'stem_width': 64, + 'avg_down': True, + 'base_width': 64, + 'cardinality': 1, + 'block_args': {'radix': 2, 'avd': True, 'avd_first': False} + } + }, + 'timm-resnest269e': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest269e"], + 'params': { + 'out_channels': (3, 128, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [3, 30, 48, 8], + 'stem_type': 'deep', + 'stem_width': 64, + 'avg_down': True, + 'base_width': 64, + 'cardinality': 1, + 'block_args': {'radix': 2, 'avd': True, 'avd_first': False} + }, + }, + 'timm-resnest50d_4s2x40d': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest50d_4s2x40d"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [3, 4, 6, 3], + 'stem_type': 'deep', + 'stem_width': 32, + 'avg_down': True, + 'base_width': 40, + 'cardinality': 2, + 'block_args': {'radix': 4, 'avd': True, 'avd_first': True} + } + }, + 'timm-resnest50d_1s4x24d': { + 'encoder': ResNestEncoder, + "pretrained_settings": pretrained_settings["timm-resnest50d_1s4x24d"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': ResNestBottleneck, + 'layers': [3, 4, 6, 3], + 'stem_type': 'deep', + 'stem_width': 32, + 'avg_down': True, + 'base_width': 24, + 'cardinality': 4, + 'block_args': {'radix': 1, 'avd': True, 'avd_first': True} + } + } +} diff --git a/segmentation_models_pytorch_example/encoders/timm_sknet.py b/segmentation_models_pytorch_example/encoders/timm_sknet.py new file mode 100644 index 0000000000000000000000000000000000000000..38804d9b0032ced9b7d3e381ec50bd7fc8bb3a92 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_sknet.py @@ -0,0 +1,103 @@ +from ._base import EncoderMixin +from timm.models.resnet import ResNet +from timm.models.sknet import SelectiveKernelBottleneck, SelectiveKernelBasic +import torch.nn as nn + + +class SkNetEncoder(ResNet, EncoderMixin): + def __init__(self, out_channels, depth=5, **kwargs): + super().__init__(**kwargs) + self._depth = depth + self._out_channels = out_channels + self._in_channels = 3 + + del self.fc + del self.global_pool + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.conv1, self.bn1, self.act1), + nn.Sequential(self.maxpool, self.layer1), + self.layer2, + self.layer3, + self.layer4, + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + state_dict.pop("fc.bias", None) + state_dict.pop("fc.weight", None) + super().load_state_dict(state_dict, **kwargs) + + +sknet_weights = { + 'timm-skresnet18': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/skresnet18_ra-4eec2804.pth' + }, + 'timm-skresnet34': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/skresnet34_ra-bdc0ccde.pth' + }, + 'timm-skresnext50_32x4d': { + 'imagenet': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/skresnext50_ra-f40e40bf.pth', + } +} + +pretrained_settings = {} +for model_name, sources in sknet_weights.items(): + pretrained_settings[model_name] = {} + for source_name, source_url in sources.items(): + pretrained_settings[model_name][source_name] = { + "url": source_url, + 'input_size': [3, 224, 224], + 'input_range': [0, 1], + 'mean': [0.485, 0.456, 0.406], + 'std': [0.229, 0.224, 0.225], + 'num_classes': 1000 + } + +timm_sknet_encoders = { + 'timm-skresnet18': { + 'encoder': SkNetEncoder, + "pretrained_settings": pretrained_settings["timm-skresnet18"], + 'params': { + 'out_channels': (3, 64, 64, 128, 256, 512), + 'block': SelectiveKernelBasic, + 'layers': [2, 2, 2, 2], + 'zero_init_last_bn': False, + 'block_args': {'sk_kwargs': {'rd_ratio': 1/8, 'split_input': True}} + } + }, + 'timm-skresnet34': { + 'encoder': SkNetEncoder, + "pretrained_settings": pretrained_settings["timm-skresnet34"], + 'params': { + 'out_channels': (3, 64, 64, 128, 256, 512), + 'block': SelectiveKernelBasic, + 'layers': [3, 4, 6, 3], + 'zero_init_last_bn': False, + 'block_args': {'sk_kwargs': {'rd_ratio': 1/8, 'split_input': True}} + } + }, + 'timm-skresnext50_32x4d': { + 'encoder': SkNetEncoder, + "pretrained_settings": pretrained_settings["timm-skresnext50_32x4d"], + 'params': { + 'out_channels': (3, 64, 256, 512, 1024, 2048), + 'block': SelectiveKernelBottleneck, + 'layers': [3, 4, 6, 3], + 'zero_init_last_bn': False, + 'cardinality': 32, + 'base_width': 4 + } + } +} diff --git a/segmentation_models_pytorch_example/encoders/timm_universal.py b/segmentation_models_pytorch_example/encoders/timm_universal.py new file mode 100644 index 0000000000000000000000000000000000000000..105102c0da91b5ddd94dc5f19056ea0c92c5cf82 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/timm_universal.py @@ -0,0 +1,34 @@ +import timm +import torch.nn as nn + + +class TimmUniversalEncoder(nn.Module): + + def __init__(self, name, pretrained=True, in_channels=3, depth=5, output_stride=32): + super().__init__() + kwargs = dict( + in_chans=in_channels, + features_only=True, + output_stride=output_stride, + pretrained=pretrained, + out_indices=tuple(range(depth)), + ) + + # not all models support output stride argument, drop it by default + if output_stride == 32: + kwargs.pop("output_stride") + + self.model = timm.create_model(name, **kwargs) + + self._in_channels = in_channels + self._out_channels = [in_channels, ] + self.model.feature_info.channels() + self._depth = depth + + def forward(self, x): + features = self.model(x) + features = [x,] + features + return features + + @property + def out_channels(self): + return self._out_channels diff --git a/segmentation_models_pytorch_example/encoders/vgg.py b/segmentation_models_pytorch_example/encoders/vgg.py new file mode 100644 index 0000000000000000000000000000000000000000..bdc83a65b93e39664beeed8aca5f33aefbe38fcd --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/vgg.py @@ -0,0 +1,157 @@ +""" Each encoder should have following attributes and methods and be inherited from `_base.EncoderMixin` + +Attributes: + + _out_channels (list of int): specify number of channels for each encoder feature tensor + _depth (int): specify number of stages in decoder (in other words number of downsampling operations) + _in_channels (int): default number of input channels in first Conv2d layer for encoder (usually 3) + +Methods: + + forward(self, x: torch.Tensor) + produce list of features of different spatial resolutions, each feature is a 4D torch.tensor of + shape NCHW (features should be sorted in descending order according to spatial resolution, starting + with resolution same as input `x` tensor). + + Input: `x` with shape (1, 3, 64, 64) + Output: [f0, f1, f2, f3, f4, f5] - features with corresponding shapes + [(1, 3, 64, 64), (1, 64, 32, 32), (1, 128, 16, 16), (1, 256, 8, 8), + (1, 512, 4, 4), (1, 1024, 2, 2)] (C - dim may differ) + + also should support number of features according to specified depth, e.g. if depth = 5, + number of feature tensors = 6 (one with same resolution as input and 5 downsampled), + depth = 3 -> number of feature tensors = 4 (one with same resolution as input and 3 downsampled). +""" + +import torch.nn as nn +from torchvision.models.vgg import VGG +from torchvision.models.vgg import make_layers +from pretrainedmodels.models.torchvision_models import pretrained_settings + +from ._base import EncoderMixin + +# fmt: off +cfg = { + 'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], + 'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], + 'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'], + 'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], +} +# fmt: on + + +class VGGEncoder(VGG, EncoderMixin): + def __init__(self, out_channels, config, batch_norm=False, depth=5, **kwargs): + super().__init__(make_layers(config, batch_norm=batch_norm), **kwargs) + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + del self.classifier + + def make_dilated(self, stage_list, dilation_list): + raise ValueError("'VGG' models do not support dilated mode due to Max Pooling" + " operations for downsampling!") + + def get_stages(self): + stages = [] + stage_modules = [] + for module in self.features: + if isinstance(module, nn.MaxPool2d): + stages.append(nn.Sequential(*stage_modules)) + stage_modules = [] + stage_modules.append(module) + stages.append(nn.Sequential(*stage_modules)) + return stages + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict, **kwargs): + keys = list(state_dict.keys()) + for k in keys: + if k.startswith("classifier"): + state_dict.pop(k, None) + super().load_state_dict(state_dict, **kwargs) + + +vgg_encoders = { + "vgg11": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg11"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["A"], + "batch_norm": False, + }, + }, + "vgg11_bn": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg11_bn"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["A"], + "batch_norm": True, + }, + }, + "vgg13": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg13"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["B"], + "batch_norm": False, + }, + }, + "vgg13_bn": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg13_bn"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["B"], + "batch_norm": True, + }, + }, + "vgg16": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg16"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["D"], + "batch_norm": False, + }, + }, + "vgg16_bn": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg16_bn"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["D"], + "batch_norm": True, + }, + }, + "vgg19": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg19"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["E"], + "batch_norm": False, + }, + }, + "vgg19_bn": { + "encoder": VGGEncoder, + "pretrained_settings": pretrained_settings["vgg19_bn"], + "params": { + "out_channels": (64, 128, 256, 512, 512, 512), + "config": cfg["E"], + "batch_norm": True, + }, + }, +} diff --git a/segmentation_models_pytorch_example/encoders/xception.py b/segmentation_models_pytorch_example/encoders/xception.py new file mode 100644 index 0000000000000000000000000000000000000000..4d106e160263623d2febb258efc26d4bfde8cbe9 --- /dev/null +++ b/segmentation_models_pytorch_example/encoders/xception.py @@ -0,0 +1,66 @@ +import re +import torch.nn as nn + +from pretrainedmodels.models.xception import pretrained_settings +from pretrainedmodels.models.xception import Xception + +from ._base import EncoderMixin + + +class XceptionEncoder(Xception, EncoderMixin): + + def __init__(self, out_channels, *args, depth=5, **kwargs): + super().__init__(*args, **kwargs) + + self._out_channels = out_channels + self._depth = depth + self._in_channels = 3 + + # modify padding to maintain output shape + self.conv1.padding = (1, 1) + self.conv2.padding = (1, 1) + + del self.fc + + def make_dilated(self, stage_list, dilation_list): + raise ValueError("Xception encoder does not support dilated mode " + "due to pooling operation for downsampling!") + + def get_stages(self): + return [ + nn.Identity(), + nn.Sequential(self.conv1, self.bn1, self.relu, self.conv2, self.bn2, self.relu), + self.block1, + self.block2, + nn.Sequential(self.block3, self.block4, self.block5, self.block6, self.block7, + self.block8, self.block9, self.block10, self.block11), + nn.Sequential(self.block12, self.conv3, self.bn3, self.relu, self.conv4, self.bn4), + ] + + def forward(self, x): + stages = self.get_stages() + + features = [] + for i in range(self._depth + 1): + x = stages[i](x) + features.append(x) + + return features + + def load_state_dict(self, state_dict): + # remove linear + state_dict.pop('fc.bias', None) + state_dict.pop('fc.weight', None) + + super().load_state_dict(state_dict) + + +xception_encoders = { + 'xception': { + 'encoder': XceptionEncoder, + 'pretrained_settings': pretrained_settings['xception'], + 'params': { + 'out_channels': (3, 64, 128, 256, 728, 2048), + } + }, +} diff --git a/segmentation_models_pytorch_example/fpn/__init__.py b/segmentation_models_pytorch_example/fpn/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..62ba22d0eb9bcb08f237a010895e8503a2034655 --- /dev/null +++ b/segmentation_models_pytorch_example/fpn/__init__.py @@ -0,0 +1 @@ +from .model import FPN \ No newline at end of file diff --git a/segmentation_models_pytorch_example/fpn/decoder.py b/segmentation_models_pytorch_example/fpn/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..00f748e7146830dccfbe37a09eb41d016cd0f464 --- /dev/null +++ b/segmentation_models_pytorch_example/fpn/decoder.py @@ -0,0 +1,119 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class Conv3x3GNReLU(nn.Module): + def __init__(self, in_channels, out_channels, upsample=False): + super().__init__() + self.upsample = upsample + self.block = nn.Sequential( + nn.Conv2d( + in_channels, out_channels, (3, 3), stride=1, padding=1, bias=False + ), + nn.GroupNorm(32, out_channels), + nn.ReLU(inplace=True), + ) + + def forward(self, x): + x = self.block(x) + if self.upsample: + x = F.interpolate(x, scale_factor=2, mode="bilinear", align_corners=True) + return x + + +class FPNBlock(nn.Module): + def __init__(self, pyramid_channels, skip_channels): + super().__init__() + self.skip_conv = nn.Conv2d(skip_channels, pyramid_channels, kernel_size=1) + + def forward(self, x, skip=None): + x = F.interpolate(x, scale_factor=2, mode="nearest") + skip = self.skip_conv(skip) + x = x + skip + return x + + +class SegmentationBlock(nn.Module): + def __init__(self, in_channels, out_channels, n_upsamples=0): + super().__init__() + + blocks = [Conv3x3GNReLU(in_channels, out_channels, upsample=bool(n_upsamples))] + + if n_upsamples > 1: + for _ in range(1, n_upsamples): + blocks.append(Conv3x3GNReLU(out_channels, out_channels, upsample=True)) + + self.block = nn.Sequential(*blocks) + + def forward(self, x): + return self.block(x) + + +class MergeBlock(nn.Module): + def __init__(self, policy): + super().__init__() + if policy not in ["add", "cat"]: + raise ValueError( + "`merge_policy` must be one of: ['add', 'cat'], got {}".format( + policy + ) + ) + self.policy = policy + + def forward(self, x): + if self.policy == 'add': + return sum(x) + elif self.policy == 'cat': + return torch.cat(x, dim=1) + else: + raise ValueError( + "`merge_policy` must be one of: ['add', 'cat'], got {}".format(self.policy) + ) + + +class FPNDecoder(nn.Module): + def __init__( + self, + encoder_channels, + encoder_depth=5, + pyramid_channels=256, + segmentation_channels=128, + dropout=0.2, + merge_policy="add", + ): + super().__init__() + + self.out_channels = segmentation_channels if merge_policy == "add" else segmentation_channels * 4 + if encoder_depth < 3: + raise ValueError("Encoder depth for FPN decoder cannot be less than 3, got {}.".format(encoder_depth)) + + encoder_channels = encoder_channels[::-1] + encoder_channels = encoder_channels[:encoder_depth + 1] + + self.p5 = nn.Conv2d(encoder_channels[0], pyramid_channels, kernel_size=1) + self.p4 = FPNBlock(pyramid_channels, encoder_channels[1]) + self.p3 = FPNBlock(pyramid_channels, encoder_channels[2]) + self.p2 = FPNBlock(pyramid_channels, encoder_channels[3]) + + self.seg_blocks = nn.ModuleList([ + SegmentationBlock(pyramid_channels, segmentation_channels, n_upsamples=n_upsamples) + for n_upsamples in [3, 2, 1, 0] + ]) + + self.merge = MergeBlock(merge_policy) + self.dropout = nn.Dropout2d(p=dropout, inplace=True) + + def forward(self, *features): + c2, c3, c4, c5 = features[-4:] + + p5 = self.p5(c5) + p4 = self.p4(p5, c4) + p3 = self.p3(p4, c3) + p2 = self.p2(p3, c2) + + feature_pyramid = [seg_block(p) for seg_block, p in zip(self.seg_blocks, [p5, p4, p3, p2])] + x = self.merge(feature_pyramid) + x = self.dropout(x) + + return x diff --git a/segmentation_models_pytorch_example/fpn/model.py b/segmentation_models_pytorch_example/fpn/model.py new file mode 100644 index 0000000000000000000000000000000000000000..827c7bcbabae4ca68d9bcd6750fec84bf9b3413a --- /dev/null +++ b/segmentation_models_pytorch_example/fpn/model.py @@ -0,0 +1,93 @@ +from typing import Optional, Union +from .decoder import FPNDecoder +from ..base import SegmentationModel, SegmentationHead, ClassificationHead +from ..encoders import get_encoder + + +class FPN(SegmentationModel): + """FPN_ is a fully convolution neural network for image semantic segmentation. + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + decoder_pyramid_channels: A number of convolution filters in Feature Pyramid of FPN_ + decoder_segmentation_channels: A number of convolution filters in segmentation blocks of FPN_ + decoder_merge_policy: Determines how to merge pyramid features inside FPN. Available options are **add** and **cat** + decoder_dropout: Spatial dropout rate in range (0, 1) for feature pyramid in FPN_ + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + upsampling: Final upsampling factor. Default is 4 to preserve input-output spatial shape identity + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + + Returns: + ``torch.nn.Module``: **FPN** + + .. _FPN: + http://presentations.cocodataset.org/COCO17-Stuff-FAIR.pdf + + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_depth: int = 5, + encoder_weights: Optional[str] = "imagenet", + decoder_pyramid_channels: int = 256, + decoder_segmentation_channels: int = 128, + decoder_merge_policy: str = "add", + decoder_dropout: float = 0.2, + in_channels: int = 3, + classes: int = 1, + activation: Optional[str] = None, + upsampling: int = 4, + aux_params: Optional[dict] = None, + ): + super().__init__() + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + ) + + self.decoder = FPNDecoder( + encoder_channels=self.encoder.out_channels, + encoder_depth=encoder_depth, + pyramid_channels=decoder_pyramid_channels, + segmentation_channels=decoder_segmentation_channels, + dropout=decoder_dropout, + merge_policy=decoder_merge_policy, + ) + + self.segmentation_head = SegmentationHead( + in_channels=self.decoder.out_channels, + out_channels=classes, + activation=activation, + kernel_size=1, + upsampling=upsampling, + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + self.name = "fpn-{}".format(encoder_name) + self.initialize() diff --git a/segmentation_models_pytorch_example/linknet/__init__.py b/segmentation_models_pytorch_example/linknet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6a26d57217858cf07e82ca10dbbadae0a72e0998 --- /dev/null +++ b/segmentation_models_pytorch_example/linknet/__init__.py @@ -0,0 +1 @@ +from .model import Linknet \ No newline at end of file diff --git a/segmentation_models_pytorch_example/linknet/decoder.py b/segmentation_models_pytorch_example/linknet/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..c7e05ebe1ef3761c8ba00c70571931e33f46787f --- /dev/null +++ b/segmentation_models_pytorch_example/linknet/decoder.py @@ -0,0 +1,70 @@ +import torch.nn as nn + +from ..base import modules + + +class TransposeX2(nn.Sequential): + + def __init__(self, in_channels, out_channels, use_batchnorm=True): + super().__init__() + layers = [ + nn.ConvTranspose2d(in_channels, out_channels, kernel_size=4, stride=2, padding=1), + nn.ReLU(inplace=True) + ] + + if use_batchnorm: + layers.insert(1, nn.BatchNorm2d(out_channels)) + + super().__init__(*layers) + + +class DecoderBlock(nn.Module): + def __init__(self, in_channels, out_channels, use_batchnorm=True): + super().__init__() + + self.block = nn.Sequential( + modules.Conv2dReLU(in_channels, in_channels // 4, kernel_size=1, use_batchnorm=use_batchnorm), + TransposeX2(in_channels // 4, in_channels // 4, use_batchnorm=use_batchnorm), + modules.Conv2dReLU(in_channels // 4, out_channels, kernel_size=1, use_batchnorm=use_batchnorm), + ) + + def forward(self, x, skip=None): + x = self.block(x) + if skip is not None: + x = x + skip + return x + + +class LinknetDecoder(nn.Module): + + def __init__( + self, + encoder_channels, + prefinal_channels=32, + n_blocks=5, + use_batchnorm=True, + ): + super().__init__() + + encoder_channels = encoder_channels[1:] # remove first skip + encoder_channels = encoder_channels[::-1] # reverse channels to start from head of encoder + + channels = list(encoder_channels) + [prefinal_channels] + + self.blocks = nn.ModuleList([ + DecoderBlock(channels[i], channels[i + 1], use_batchnorm=use_batchnorm) + for i in range(n_blocks) + ]) + + def forward(self, *features): + features = features[1:] # remove first skip + features = features[::-1] # reverse channels to start from head of encoder + + x = features[0] + skips = features[1:] + + for i, decoder_block in enumerate(self.blocks): + skip = skips[i] if i < len(skips) else None + x = decoder_block(x, skip) + + return x diff --git a/segmentation_models_pytorch_example/linknet/model.py b/segmentation_models_pytorch_example/linknet/model.py new file mode 100644 index 0000000000000000000000000000000000000000..1ce842eb63c1d59a39b863b9ab43d5d5f65d554b --- /dev/null +++ b/segmentation_models_pytorch_example/linknet/model.py @@ -0,0 +1,86 @@ +from typing import Optional, Union +from .decoder import LinknetDecoder +from ..base import SegmentationHead, SegmentationModel, ClassificationHead +from ..encoders import get_encoder + + +class Linknet(SegmentationModel): + """Linknet_ is a fully convolution neural network for image semantic segmentation. Consist of *encoder* + and *decoder* parts connected with *skip connections*. Encoder extract features of different spatial + resolution (skip connections) which are used by decoder to define accurate segmentation mask. Use *sum* + for fusing decoder blocks with skip connections. + + Note: + This implementation by default has 4 skip connections (original - 3). + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + decoder_use_batchnorm: If **True**, BatchNorm2d layer between Conv2D and Activation layers + is used. If **"inplace"** InplaceABN will be used, allows to decrease memory consumption. + Available options are **True, False, "inplace"** + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + + Returns: + ``torch.nn.Module``: **Linknet** + + .. _Linknet: + https://arxiv.org/abs/1707.03718 + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_depth: int = 5, + encoder_weights: Optional[str] = "imagenet", + decoder_use_batchnorm: bool = True, + in_channels: int = 3, + classes: int = 1, + activation: Optional[Union[str, callable]] = None, + aux_params: Optional[dict] = None, + ): + super().__init__() + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + ) + + self.decoder = LinknetDecoder( + encoder_channels=self.encoder.out_channels, + n_blocks=encoder_depth, + prefinal_channels=32, + use_batchnorm=decoder_use_batchnorm, + ) + + self.segmentation_head = SegmentationHead( + in_channels=32, out_channels=classes, activation=activation, kernel_size=1 + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + self.name = "link-{}".format(encoder_name) + self.initialize() diff --git a/segmentation_models_pytorch_example/losses/__init__.py b/segmentation_models_pytorch_example/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a972d49a9b8f786511a0528f248bb7fed2b4d65f --- /dev/null +++ b/segmentation_models_pytorch_example/losses/__init__.py @@ -0,0 +1,9 @@ +from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE + +from .jaccard import JaccardLoss +from .dice import DiceLoss +from .focal import FocalLoss +from .lovasz import LovaszLoss +from .soft_bce import SoftBCEWithLogitsLoss +from .soft_ce import SoftCrossEntropyLoss +from .tversky import TverskyLoss diff --git a/segmentation_models_pytorch_example/losses/_functional.py b/segmentation_models_pytorch_example/losses/_functional.py new file mode 100644 index 0000000000000000000000000000000000000000..4d402e47f16d27735268601cf7ee37c01a0e4a89 --- /dev/null +++ b/segmentation_models_pytorch_example/losses/_functional.py @@ -0,0 +1,269 @@ +import math +import numpy as np + +from typing import Optional + +import torch +import torch.nn.functional as F + +__all__ = [ + "focal_loss_with_logits", + "softmax_focal_loss_with_logits", + "soft_jaccard_score", + "soft_dice_score", + "wing_loss", +] + + +def to_tensor(x, dtype=None) -> torch.Tensor: + if isinstance(x, torch.Tensor): + if dtype is not None: + x = x.type(dtype) + return x + if isinstance(x, np.ndarray): + x = torch.from_numpy(x) + if dtype is not None: + x = x.type(dtype) + return x + if isinstance(x, (list, tuple)): + x = np.array(x) + x = torch.from_numpy(x) + if dtype is not None: + x = x.type(dtype) + return x + + +def focal_loss_with_logits( + output: torch.Tensor, + target: torch.Tensor, + gamma: float = 2.0, + alpha: Optional[float] = 0.25, + reduction: str = "mean", + normalized: bool = False, + reduced_threshold: Optional[float] = None, + eps: float = 1e-6, +) -> torch.Tensor: + """Compute binary focal loss between target and output logits. + See :class:`~pytorch_toolbelt.losses.FocalLoss` for details. + + Args: + output: Tensor of arbitrary shape (predictions of the model) + target: Tensor of the same shape as input + gamma: Focal loss power factor + alpha: Weight factor to balance positive and negative samples. Alpha must be in [0...1] range, + high values will give more weight to positive class. + reduction (string, optional): Specifies the reduction to apply to the output: + 'none' | 'mean' | 'sum' | 'batchwise_mean'. 'none': no reduction will be applied, + 'mean': the sum of the output will be divided by the number of + elements in the output, 'sum': the output will be summed. Note: :attr:`size_average` + and :attr:`reduce` are in the process of being deprecated, and in the meantime, + specifying either of those two args will override :attr:`reduction`. + 'batchwise_mean' computes mean loss per sample in batch. Default: 'mean' + normalized (bool): Compute normalized focal loss (https://arxiv.org/pdf/1909.07829.pdf). + reduced_threshold (float, optional): Compute reduced focal loss (https://arxiv.org/abs/1903.01347). + + References: + https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/loss/losses.py + """ + target = target.type(output.type()) + + logpt = F.binary_cross_entropy_with_logits(output, target, reduction="none") + pt = torch.exp(-logpt) + + # compute the loss + if reduced_threshold is None: + focal_term = (1.0 - pt).pow(gamma) + else: + focal_term = ((1.0 - pt) / reduced_threshold).pow(gamma) + focal_term[pt < reduced_threshold] = 1 + + loss = focal_term * logpt + + if alpha is not None: + loss *= alpha * target + (1 - alpha) * (1 - target) + + if normalized: + norm_factor = focal_term.sum().clamp_min(eps) + loss /= norm_factor + + if reduction == "mean": + loss = loss.mean() + if reduction == "sum": + loss = loss.sum() + if reduction == "batchwise_mean": + loss = loss.sum(0) + + return loss + + +def softmax_focal_loss_with_logits( + output: torch.Tensor, + target: torch.Tensor, + gamma: float = 2.0, + reduction="mean", + normalized=False, + reduced_threshold: Optional[float] = None, + eps: float = 1e-6, +) -> torch.Tensor: + """Softmax version of focal loss between target and output logits. + See :class:`~pytorch_toolbelt.losses.FocalLoss` for details. + + Args: + output: Tensor of shape [B, C, *] (Similar to nn.CrossEntropyLoss) + target: Tensor of shape [B, *] (Similar to nn.CrossEntropyLoss) + reduction (string, optional): Specifies the reduction to apply to the output: + 'none' | 'mean' | 'sum' | 'batchwise_mean'. 'none': no reduction will be applied, + 'mean': the sum of the output will be divided by the number of + elements in the output, 'sum': the output will be summed. Note: :attr:`size_average` + and :attr:`reduce` are in the process of being deprecated, and in the meantime, + specifying either of those two args will override :attr:`reduction`. + 'batchwise_mean' computes mean loss per sample in batch. Default: 'mean' + normalized (bool): Compute normalized focal loss (https://arxiv.org/pdf/1909.07829.pdf). + reduced_threshold (float, optional): Compute reduced focal loss (https://arxiv.org/abs/1903.01347). + """ + log_softmax = F.log_softmax(output, dim=1) + + loss = F.nll_loss(log_softmax, target, reduction="none") + pt = torch.exp(-loss) + + # compute the loss + if reduced_threshold is None: + focal_term = (1.0 - pt).pow(gamma) + else: + focal_term = ((1.0 - pt) / reduced_threshold).pow(gamma) + focal_term[pt < reduced_threshold] = 1 + + loss = focal_term * loss + + if normalized: + norm_factor = focal_term.sum().clamp_min(eps) + loss = loss / norm_factor + + if reduction == "mean": + loss = loss.mean() + if reduction == "sum": + loss = loss.sum() + if reduction == "batchwise_mean": + loss = loss.sum(0) + + return loss + + +def soft_jaccard_score( + output: torch.Tensor, target: torch.Tensor, smooth: float = 0.0, eps: float = 1e-7, dims=None +) -> torch.Tensor: + assert output.size() == target.size() + if dims is not None: + intersection = torch.sum(output * target, dim=dims) + cardinality = torch.sum(output + target, dim=dims) + else: + intersection = torch.sum(output * target) + cardinality = torch.sum(output + target) + + union = cardinality - intersection + jaccard_score = (intersection + smooth) / (union + smooth).clamp_min(eps) + return jaccard_score + + +def soft_dice_score( + output: torch.Tensor, target: torch.Tensor, smooth: float = 0.0, eps: float = 1e-7, dims=None +) -> torch.Tensor: + assert output.size() == target.size() + if dims is not None: + intersection = torch.sum(output * target, dim=dims) + cardinality = torch.sum(output + target, dim=dims) + else: + intersection = torch.sum(output * target) + cardinality = torch.sum(output + target) + dice_score = (2.0 * intersection + smooth) / (cardinality + smooth).clamp_min(eps) + return dice_score + + +def soft_tversky_score(output: torch.Tensor, target: torch.Tensor, alpha: float, beta: float, + smooth: float = 0.0, eps: float = 1e-7, dims=None) -> torch.Tensor: + assert output.size() == target.size() + if dims is not None: + intersection = torch.sum(output * target, dim=dims) # TP + fp = torch.sum(output * (1. - target), dim=dims) + fn = torch.sum((1 - output) * target, dim=dims) + else: + intersection = torch.sum(output * target) # TP + fp = torch.sum(output * (1. - target)) + fn = torch.sum((1 - output) * target) + + tversky_score = (intersection + smooth) / (intersection + alpha * fp + beta * fn + smooth).clamp_min(eps) + return tversky_score + + +def wing_loss(output: torch.Tensor, target: torch.Tensor, width=5, curvature=0.5, reduction="mean"): + """ + https://arxiv.org/pdf/1711.06753.pdf + :param output: + :param target: + :param width: + :param curvature: + :param reduction: + :return: + """ + diff_abs = (target - output).abs() + loss = diff_abs.clone() + + idx_smaller = diff_abs < width + idx_bigger = diff_abs >= width + + loss[idx_smaller] = width * torch.log(1 + diff_abs[idx_smaller] / curvature) + + C = width - width * math.log(1 + width / curvature) + loss[idx_bigger] = loss[idx_bigger] - C + + if reduction == "sum": + loss = loss.sum() + + if reduction == "mean": + loss = loss.mean() + + return loss + + +def label_smoothed_nll_loss( + lprobs: torch.Tensor, target: torch.Tensor, epsilon: float, ignore_index=None, reduction="mean", dim=-1 +) -> torch.Tensor: + """ + Source: https://github.com/pytorch/fairseq/blob/master/fairseq/criterions/label_smoothed_cross_entropy.py + :param lprobs: Log-probabilities of predictions (e.g after log_softmax) + :param target: + :param epsilon: + :param ignore_index: + :param reduction: + :return: + """ + if target.dim() == lprobs.dim() - 1: + target = target.unsqueeze(dim) + + if ignore_index is not None: + pad_mask = target.eq(ignore_index) + target = target.masked_fill(pad_mask, 0) + nll_loss = -lprobs.gather(dim=dim, index=target) + smooth_loss = -lprobs.sum(dim=dim, keepdim=True) + + # nll_loss.masked_fill_(pad_mask, 0.0) + # smooth_loss.masked_fill_(pad_mask, 0.0) + nll_loss = nll_loss.masked_fill(pad_mask, 0.0) + smooth_loss = smooth_loss.masked_fill(pad_mask, 0.0) + else: + nll_loss = -lprobs.gather(dim=dim, index=target) + smooth_loss = -lprobs.sum(dim=dim, keepdim=True) + + nll_loss = nll_loss.squeeze(dim) + smooth_loss = smooth_loss.squeeze(dim) + + if reduction == "sum": + nll_loss = nll_loss.sum() + smooth_loss = smooth_loss.sum() + if reduction == "mean": + nll_loss = nll_loss.mean() + smooth_loss = smooth_loss.mean() + + eps_i = epsilon / lprobs.size(dim) + loss = (1.0 - epsilon) * nll_loss + eps_i * smooth_loss + return loss diff --git a/segmentation_models_pytorch_example/losses/constants.py b/segmentation_models_pytorch_example/losses/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..9640190a4a83044964fec261ca597bd0cc864288 --- /dev/null +++ b/segmentation_models_pytorch_example/losses/constants.py @@ -0,0 +1,18 @@ +#: Loss binary mode suppose you are solving binary segmentation task. +#: That mean yor have only one class which pixels are labled as **1**, +#: the rest pixels are background and labeled as **0**. +#: Target mask shape - (N, H, W), model output mask shape (N, 1, H, W). +BINARY_MODE: str = "binary" + +#: Loss multiclass mode suppose you are solving multi-**class** segmentation task. +#: That mean you have *C = 1..N* classes which have unique label values, +#: classes are mutually exclusive and all pixels are labeled with theese values. +#: Target mask shape - (N, H, W), model output mask shape (N, C, H, W). +MULTICLASS_MODE: str = "multiclass" + +#: Loss multilabel mode suppose you are solving multi-**label** segmentation task. +#: That mean you have *C = 1..N* classes which pixels are labeled as **1**, +#: classes are not mutually exclusive and each class have its own *channel*, +#: pixels in each channel which are not belong to class labeled as **0**. +#: Target mask shape - (N, C, H, W), model output mask shape (N, C, H, W). +MULTILABEL_MODE: str = "multilabel" diff --git a/segmentation_models_pytorch_example/losses/dice.py b/segmentation_models_pytorch_example/losses/dice.py new file mode 100644 index 0000000000000000000000000000000000000000..b09746e67092a05fde41a254a07290aafa3b7bb7 --- /dev/null +++ b/segmentation_models_pytorch_example/losses/dice.py @@ -0,0 +1,131 @@ +from typing import Optional, List + +import torch +import torch.nn.functional as F +from torch.nn.modules.loss import _Loss +from ._functional import soft_dice_score, to_tensor +from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE + +__all__ = ["DiceLoss"] + + +class DiceLoss(_Loss): + + def __init__( + self, + mode: str, + classes: Optional[List[int]] = None, + log_loss: bool = False, + from_logits: bool = True, + smooth: float = 0.0, + ignore_index: Optional[int] = None, + eps: float = 1e-7, + ): + """Implementation of Dice loss for image segmentation task. + It supports binary, multiclass and multilabel cases + + Args: + mode: Loss mode 'binary', 'multiclass' or 'multilabel' + classes: List of classes that contribute in loss computation. By default, all channels are included. + log_loss: If True, loss computed as `- log(dice_coeff)`, otherwise `1 - dice_coeff` + from_logits: If True, assumes input is raw logits + smooth: Smoothness constant for dice coefficient (a) + ignore_index: Label that indicates ignored pixels (does not contribute to loss) + eps: A small epsilon for numerical stability to avoid zero division error + (denominator will be always greater or equal to eps) + + Shape + - **y_pred** - torch.Tensor of shape (N, C, H, W) + - **y_true** - torch.Tensor of shape (N, H, W) or (N, C, H, W) + + Reference + https://github.com/BloodAxe/pytorch-toolbelt + """ + assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE} + super(DiceLoss, self).__init__() + self.mode = mode + if classes is not None: + assert mode != BINARY_MODE, "Masking classes is not supported with mode=binary" + classes = to_tensor(classes, dtype=torch.long) + + self.classes = classes + self.from_logits = from_logits + self.smooth = smooth + self.eps = eps + self.log_loss = log_loss + self.ignore_index = ignore_index + + def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor: + + assert y_true.size(0) == y_pred.size(0) + + if self.from_logits: + # Apply activations to get [0..1] class probabilities + # Using Log-Exp as this gives more numerically stable result and does not cause vanishing gradient on + # extreme values 0 and 1 + if self.mode == MULTICLASS_MODE: + y_pred = y_pred.log_softmax(dim=1).exp() + else: + y_pred = F.logsigmoid(y_pred).exp() + + bs = y_true.size(0) + num_classes = y_pred.size(1) + dims = (0, 2) + + if self.mode == BINARY_MODE: + y_true = y_true.view(bs, 1, -1) + y_pred = y_pred.view(bs, 1, -1) + + if self.ignore_index is not None: + mask = y_true != self.ignore_index + y_pred = y_pred * mask + y_true = y_true * mask + + if self.mode == MULTICLASS_MODE: + y_true = y_true.view(bs, -1) + y_pred = y_pred.view(bs, num_classes, -1) + + if self.ignore_index is not None: + mask = y_true != self.ignore_index + y_pred = y_pred * mask.unsqueeze(1) + + y_true = F.one_hot((y_true * mask).to(torch.long), num_classes) # N,H*W -> N,H*W, C + y_true = y_true.permute(0, 2, 1) * mask.unsqueeze(1) # H, C, H*W + else: + y_true = F.one_hot(y_true, num_classes) # N,H*W -> N,H*W, C + y_true = y_true.permute(0, 2, 1) # H, C, H*W + + if self.mode == MULTILABEL_MODE: + y_true = y_true.view(bs, num_classes, -1) + y_pred = y_pred.view(bs, num_classes, -1) + + if self.ignore_index is not None: + mask = y_true != self.ignore_index + y_pred = y_pred * mask + y_true = y_true * mask + + scores = self.compute_score(y_pred, y_true.type_as(y_pred), smooth=self.smooth, eps=self.eps, dims=dims) + + if self.log_loss: + loss = -torch.log(scores.clamp_min(self.eps)) + else: + loss = 1.0 - scores + + # Dice loss is undefined for non-empty classes + # So we zero contribution of channel that does not have true pixels + # NOTE: A better workaround would be to use loss term `mean(y_pred)` + # for this case, however it will be a modified jaccard loss + + mask = y_true.sum(dims) > 0 + loss *= mask.to(loss.dtype) + + if self.classes is not None: + loss = loss[self.classes] + + return self.aggregate_loss(loss) + + def aggregate_loss(self, loss): + return loss.mean() + + def compute_score(self, output, target, smooth=0.0, eps=1e-7, dims=None) -> torch.Tensor: + return soft_dice_score(output, target, smooth, eps, dims) diff --git a/segmentation_models_pytorch_example/losses/focal.py b/segmentation_models_pytorch_example/losses/focal.py new file mode 100644 index 0000000000000000000000000000000000000000..d5c9a670c4cb61fea1b0f4a3bab0e230ff905a45 --- /dev/null +++ b/segmentation_models_pytorch_example/losses/focal.py @@ -0,0 +1,90 @@ +from typing import Optional +from functools import partial + +import torch +from torch.nn.modules.loss import _Loss +from ._functional import focal_loss_with_logits +from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE + +__all__ = ["FocalLoss"] + + +class FocalLoss(_Loss): + + def __init__( + self, + mode: str, + alpha: Optional[float] = None, + gamma: Optional[float] = 2., + ignore_index: Optional[int] = None, + reduction: Optional[str] = "mean", + normalized: bool = False, + reduced_threshold: Optional[float] = None, + ): + """Compute Focal loss + + Args: + mode: Loss mode 'binary', 'multiclass' or 'multilabel' + alpha: Prior probability of having positive value in target. + gamma: Power factor for dampening weight (focal strength). + ignore_index: If not None, targets may contain values to be ignored. + Target values equal to ignore_index will be ignored from loss computation. + normalized: Compute normalized focal loss (https://arxiv.org/pdf/1909.07829.pdf). + reduced_threshold: Switch to reduced focal loss. Note, when using this mode you should use `reduction="sum"`. + + Shape + - **y_pred** - torch.Tensor of shape (N, C, H, W) + - **y_true** - torch.Tensor of shape (N, H, W) or (N, C, H, W) + + Reference + https://github.com/BloodAxe/pytorch-toolbelt + + """ + assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE} + super().__init__() + + self.mode = mode + self.ignore_index = ignore_index + self.focal_loss_fn = partial( + focal_loss_with_logits, + alpha=alpha, + gamma=gamma, + reduced_threshold=reduced_threshold, + reduction=reduction, + normalized=normalized, + ) + + def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor: + + if self.mode in {BINARY_MODE, MULTILABEL_MODE}: + y_true = y_true.view(-1) + y_pred = y_pred.view(-1) + + if self.ignore_index is not None: + # Filter predictions with ignore label from loss computation + not_ignored = y_true != self.ignore_index + y_pred = y_pred[not_ignored] + y_true = y_true[not_ignored] + + loss = self.focal_loss_fn(y_pred, y_true) + + elif self.mode == MULTICLASS_MODE: + + num_classes = y_pred.size(1) + loss = 0 + + # Filter anchors with -1 label from loss computation + if self.ignore_index is not None: + not_ignored = y_true != self.ignore_index + + for cls in range(num_classes): + cls_y_true = (y_true == cls).long() + cls_y_pred = y_pred[:, cls, ...] + + if self.ignore_index is not None: + cls_y_true = cls_y_true[not_ignored] + cls_y_pred = cls_y_pred[not_ignored] + + loss += self.focal_loss_fn(cls_y_pred, cls_y_true) + + return loss diff --git a/segmentation_models_pytorch_example/losses/jaccard.py b/segmentation_models_pytorch_example/losses/jaccard.py new file mode 100644 index 0000000000000000000000000000000000000000..5e2c0b06926d40892bd041fab54e7c0bd0d1908a --- /dev/null +++ b/segmentation_models_pytorch_example/losses/jaccard.py @@ -0,0 +1,106 @@ +from typing import Optional, List + +import torch +import torch.nn.functional as F +from torch.nn.modules.loss import _Loss +from ._functional import soft_jaccard_score, to_tensor +from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE + +__all__ = ["JaccardLoss"] + + +class JaccardLoss(_Loss): + + def __init__( + self, + mode: str, + classes: Optional[List[int]] = None, + log_loss: bool = False, + from_logits: bool = True, + smooth: float = 0., + eps: float = 1e-7, + ): + """Implementation of Jaccard loss for image segmentation task. + It supports binary, multiclass and multilabel cases + + Args: + mode: Loss mode 'binary', 'multiclass' or 'multilabel' + classes: List of classes that contribute in loss computation. By default, all channels are included. + log_loss: If True, loss computed as `- log(jaccard_coeff)`, otherwise `1 - jaccard_coeff` + from_logits: If True, assumes input is raw logits + smooth: Smoothness constant for dice coefficient + eps: A small epsilon for numerical stability to avoid zero division error + (denominator will be always greater or equal to eps) + + Shape + - **y_pred** - torch.Tensor of shape (N, C, H, W) + - **y_true** - torch.Tensor of shape (N, H, W) or (N, C, H, W) + + Reference + https://github.com/BloodAxe/pytorch-toolbelt + """ + assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE} + super(JaccardLoss, self).__init__() + + self.mode = mode + if classes is not None: + assert mode != BINARY_MODE, "Masking classes is not supported with mode=binary" + classes = to_tensor(classes, dtype=torch.long) + + self.classes = classes + self.from_logits = from_logits + self.smooth = smooth + self.eps = eps + self.log_loss = log_loss + + def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor: + + assert y_true.size(0) == y_pred.size(0) + + if self.from_logits: + # Apply activations to get [0..1] class probabilities + # Using Log-Exp as this gives more numerically stable result and does not cause vanishing gradient on + # extreme values 0 and 1 + if self.mode == MULTICLASS_MODE: + y_pred = y_pred.log_softmax(dim=1).exp() + else: + y_pred = F.logsigmoid(y_pred).exp() + + bs = y_true.size(0) + num_classes = y_pred.size(1) + dims = (0, 2) + + if self.mode == BINARY_MODE: + y_true = y_true.view(bs, 1, -1) + y_pred = y_pred.view(bs, 1, -1) + + if self.mode == MULTICLASS_MODE: + y_true = y_true.view(bs, -1) + y_pred = y_pred.view(bs, num_classes, -1) + + y_true = F.one_hot(y_true, num_classes) # N,H*W -> N,H*W, C + y_true = y_true.permute(0, 2, 1) # H, C, H*W + + if self.mode == MULTILABEL_MODE: + y_true = y_true.view(bs, num_classes, -1) + y_pred = y_pred.view(bs, num_classes, -1) + + scores = soft_jaccard_score(y_pred, y_true.type(y_pred.dtype), smooth=self.smooth, eps=self.eps, dims=dims) + + if self.log_loss: + loss = -torch.log(scores.clamp_min(self.eps)) + else: + loss = 1.0 - scores + + # IoU loss is defined for non-empty classes + # So we zero contribution of channel that does not have true pixels + # NOTE: A better workaround would be to use loss term `mean(y_pred)` + # for this case, however it will be a modified jaccard loss + + mask = y_true.sum(dims) > 0 + loss *= mask.float() + + if self.classes is not None: + loss = loss[self.classes] + + return loss.mean() diff --git a/segmentation_models_pytorch_example/losses/lovasz.py b/segmentation_models_pytorch_example/losses/lovasz.py new file mode 100644 index 0000000000000000000000000000000000000000..c7311c9431f4f311fbbaa3448719a578983ef7b4 --- /dev/null +++ b/segmentation_models_pytorch_example/losses/lovasz.py @@ -0,0 +1,227 @@ +""" +Lovasz-Softmax and Jaccard hinge loss in PyTorch +Maxim Berman 2018 ESAT-PSI KU Leuven (MIT License) +""" + +from __future__ import print_function, division +from typing import Optional + +import torch +import torch.nn.functional as F +from torch.nn.modules.loss import _Loss +from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE + +try: + from itertools import ifilterfalse +except ImportError: # py3k + from itertools import filterfalse as ifilterfalse + +__all__ = ["LovaszLoss"] + + +def _lovasz_grad(gt_sorted): + """Compute gradient of the Lovasz extension w.r.t sorted errors + See Alg. 1 in paper + """ + p = len(gt_sorted) + gts = gt_sorted.sum() + intersection = gts - gt_sorted.float().cumsum(0) + union = gts + (1 - gt_sorted).float().cumsum(0) + jaccard = 1.0 - intersection / union + if p > 1: # cover 1-pixel case + jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] + return jaccard + + +def _lovasz_hinge(logits, labels, per_image=True, ignore=None): + """ + Binary Lovasz hinge loss + logits: [B, H, W] Logits at each pixel (between -infinity and +infinity) + labels: [B, H, W] Tensor, binary ground truth masks (0 or 1) + per_image: compute the loss per image instead of per batch + ignore: void class id + """ + if per_image: + loss = mean( + _lovasz_hinge_flat(*_flatten_binary_scores(log.unsqueeze(0), lab.unsqueeze(0), ignore)) + for log, lab in zip(logits, labels) + ) + else: + loss = _lovasz_hinge_flat(*_flatten_binary_scores(logits, labels, ignore)) + return loss + + +def _lovasz_hinge_flat(logits, labels): + """Binary Lovasz hinge loss + Args: + logits: [P] Logits at each prediction (between -infinity and +infinity) + labels: [P] Tensor, binary ground truth labels (0 or 1) + ignore: label to ignore + """ + if len(labels) == 0: + # only void pixels, the gradients should be 0 + return logits.sum() * 0.0 + signs = 2.0 * labels.float() - 1.0 + errors = 1.0 - logits * signs + errors_sorted, perm = torch.sort(errors, dim=0, descending=True) + perm = perm.data + gt_sorted = labels[perm] + grad = _lovasz_grad(gt_sorted) + loss = torch.dot(F.relu(errors_sorted), grad) + return loss + + +def _flatten_binary_scores(scores, labels, ignore=None): + """Flattens predictions in the batch (binary case) + Remove labels equal to 'ignore' + """ + scores = scores.view(-1) + labels = labels.view(-1) + if ignore is None: + return scores, labels + valid = labels != ignore + vscores = scores[valid] + vlabels = labels[valid] + return vscores, vlabels + + +# --------------------------- MULTICLASS LOSSES --------------------------- + + +def _lovasz_softmax(probas, labels, classes="present", per_image=False, ignore=None): + """Multi-class Lovasz-Softmax loss + Args: + @param probas: [B, C, H, W] Class probabilities at each prediction (between 0 and 1). + Interpreted as binary (sigmoid) output with outputs of size [B, H, W]. + @param labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1) + @param classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average. + @param per_image: compute the loss per image instead of per batch + @param ignore: void class labels + """ + if per_image: + loss = mean( + _lovasz_softmax_flat(*_flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), classes=classes) + for prob, lab in zip(probas, labels) + ) + else: + loss = _lovasz_softmax_flat(*_flatten_probas(probas, labels, ignore), classes=classes) + return loss + + +def _lovasz_softmax_flat(probas, labels, classes="present"): + """Multi-class Lovasz-Softmax loss + Args: + @param probas: [P, C] Class probabilities at each prediction (between 0 and 1) + @param labels: [P] Tensor, ground truth labels (between 0 and C - 1) + @param classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average. + """ + if probas.numel() == 0: + # only void pixels, the gradients should be 0 + return probas * 0.0 + C = probas.size(1) + losses = [] + class_to_sum = list(range(C)) if classes in ["all", "present"] else classes + for c in class_to_sum: + fg = (labels == c).type_as(probas) # foreground for class c + if classes == "present" and fg.sum() == 0: + continue + if C == 1: + if len(classes) > 1: + raise ValueError("Sigmoid output possible only with 1 class") + class_pred = probas[:, 0] + else: + class_pred = probas[:, c] + errors = (fg - class_pred).abs() + errors_sorted, perm = torch.sort(errors, 0, descending=True) + perm = perm.data + fg_sorted = fg[perm] + losses.append(torch.dot(errors_sorted, _lovasz_grad(fg_sorted))) + return mean(losses) + + +def _flatten_probas(probas, labels, ignore=None): + """Flattens predictions in the batch + """ + if probas.dim() == 3: + # assumes output of a sigmoid layer + B, H, W = probas.size() + probas = probas.view(B, 1, H, W) + + C = probas.size(1) + probas = torch.movedim(probas, 1, -1) # [B, C, Di, Dj, ...] -> [B, Di, Dj, ..., C] + probas = probas.contiguous().view(-1, C) # [P, C] + + labels = labels.view(-1) + if ignore is None: + return probas, labels + valid = labels != ignore + vprobas = probas[valid] + vlabels = labels[valid] + return vprobas, vlabels + + +# --------------------------- HELPER FUNCTIONS --------------------------- +def isnan(x): + return x != x + + +def mean(values, ignore_nan=False, empty=0): + """Nanmean compatible with generators. + """ + values = iter(values) + if ignore_nan: + values = ifilterfalse(isnan, values) + try: + n = 1 + acc = next(values) + except StopIteration: + if empty == "raise": + raise ValueError("Empty mean") + return empty + for n, v in enumerate(values, 2): + acc += v + if n == 1: + return acc + return acc / n + + +class LovaszLoss(_Loss): + def __init__( + self, + mode: str, + per_image: bool = False, + ignore_index: Optional[int] = None, + from_logits: bool = True, + ): + """Implementation of Lovasz loss for image segmentation task. + It supports binary, multiclass and multilabel cases + + Args: + mode: Loss mode 'binary', 'multiclass' or 'multilabel' + ignore_index: Label that indicates ignored pixels (does not contribute to loss) + per_image: If True loss computed per each image and then averaged, else computed per whole batch + + Shape + - **y_pred** - torch.Tensor of shape (N, C, H, W) + - **y_true** - torch.Tensor of shape (N, H, W) or (N, C, H, W) + + Reference + https://github.com/BloodAxe/pytorch-toolbelt + """ + assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE} + super().__init__() + + self.mode = mode + self.ignore_index = ignore_index + self.per_image = per_image + + def forward(self, y_pred, y_true): + + if self.mode in {BINARY_MODE, MULTILABEL_MODE}: + loss = _lovasz_hinge(y_pred, y_true, per_image=self.per_image, ignore=self.ignore_index) + elif self.mode == MULTICLASS_MODE: + y_pred = y_pred.softmax(dim=1) + loss = _lovasz_softmax(y_pred, y_true, per_image=self.per_image, ignore=self.ignore_index) + else: + raise ValueError("Wrong mode {}.".format(self.mode)) + return loss diff --git a/segmentation_models_pytorch_example/losses/soft_bce.py b/segmentation_models_pytorch_example/losses/soft_bce.py new file mode 100644 index 0000000000000000000000000000000000000000..b48d67608fd4f2cd565d382434968777fb263bd4 --- /dev/null +++ b/segmentation_models_pytorch_example/losses/soft_bce.py @@ -0,0 +1,72 @@ +from typing import Optional + +import torch +import torch.nn.functional as F +from torch import nn, Tensor + +__all__ = ["SoftBCEWithLogitsLoss"] + + +class SoftBCEWithLogitsLoss(nn.Module): + + __constants__ = ["weight", "pos_weight", "reduction", "ignore_index", "smooth_factor"] + + def __init__( + self, + weight: Optional[torch.Tensor] = None, + ignore_index: Optional[int] = -100, + reduction: str = "mean", + smooth_factor: Optional[float] = None, + pos_weight: Optional[torch.Tensor] = None, + ): + """Drop-in replacement for torch.nn.BCEWithLogitsLoss with few additions: ignore_index and label_smoothing + + Args: + ignore_index: Specifies a target value that is ignored and does not contribute to the input gradient. + smooth_factor: Factor to smooth target (e.g. if smooth_factor=0.1 then [1, 0, 1] -> [0.9, 0.1, 0.9]) + + Shape + - **y_pred** - torch.Tensor of shape NxCxHxW + - **y_true** - torch.Tensor of shape NxHxW or Nx1xHxW + + Reference + https://github.com/BloodAxe/pytorch-toolbelt + + """ + super().__init__() + self.ignore_index = ignore_index + self.reduction = reduction + self.smooth_factor = smooth_factor + self.register_buffer("weight", weight) + self.register_buffer("pos_weight", pos_weight) + + def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor: + """ + Args: + y_pred: torch.Tensor of shape (N, C, H, W) + y_true: torch.Tensor of shape (N, H, W) or (N, 1, H, W) + + Returns: + loss: torch.Tensor + """ + + if self.smooth_factor is not None: + soft_targets = (1 - y_true) * self.smooth_factor + y_true * (1 - self.smooth_factor) + else: + soft_targets = y_true + + loss = F.binary_cross_entropy_with_logits( + y_pred, soft_targets, self.weight, pos_weight=self.pos_weight, reduction="none" + ) + + if self.ignore_index is not None: + not_ignored_mask = y_true != self.ignore_index + loss *= not_ignored_mask.type_as(loss) + + if self.reduction == "mean": + loss = loss.mean() + + if self.reduction == "sum": + loss = loss.sum() + + return loss diff --git a/segmentation_models_pytorch_example/losses/soft_ce.py b/segmentation_models_pytorch_example/losses/soft_ce.py new file mode 100644 index 0000000000000000000000000000000000000000..dd196104dbef2c2faa48118c1512a3e6c7cb805b --- /dev/null +++ b/segmentation_models_pytorch_example/losses/soft_ce.py @@ -0,0 +1,48 @@ +from typing import Optional +from torch import nn, Tensor +import torch +import torch.nn.functional as F +from ._functional import label_smoothed_nll_loss + +__all__ = ["SoftCrossEntropyLoss"] + + +class SoftCrossEntropyLoss(nn.Module): + + __constants__ = ["reduction", "ignore_index", "smooth_factor"] + + def __init__( + self, + reduction: str = "mean", + smooth_factor: Optional[float] = None, + ignore_index: Optional[int] = -100, + dim: int = 1, + ): + """Drop-in replacement for torch.nn.CrossEntropyLoss with label_smoothing + + Args: + smooth_factor: Factor to smooth target (e.g. if smooth_factor=0.1 then [1, 0, 0] -> [0.9, 0.05, 0.05]) + + Shape + - **y_pred** - torch.Tensor of shape (N, C, H, W) + - **y_true** - torch.Tensor of shape (N, H, W) + + Reference + https://github.com/BloodAxe/pytorch-toolbelt + """ + super().__init__() + self.smooth_factor = smooth_factor + self.ignore_index = ignore_index + self.reduction = reduction + self.dim = dim + + def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor: + log_prob = F.log_softmax(y_pred, dim=self.dim) + return label_smoothed_nll_loss( + log_prob, + y_true, + epsilon=self.smooth_factor, + ignore_index=self.ignore_index, + reduction=self.reduction, + dim=self.dim, + ) diff --git a/segmentation_models_pytorch_example/losses/tversky.py b/segmentation_models_pytorch_example/losses/tversky.py new file mode 100644 index 0000000000000000000000000000000000000000..919d52b80dc1377e526121d79457bf7790057a47 --- /dev/null +++ b/segmentation_models_pytorch_example/losses/tversky.py @@ -0,0 +1,59 @@ +from typing import List, Optional + +import torch +from ._functional import soft_tversky_score +from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE +from .dice import DiceLoss + +__all__ = ["TverskyLoss", "TverskyLossFocal"] + + +class TverskyLoss(DiceLoss): + """Implementation of Tversky loss for image segmentation task. + Where TP and FP is weighted by alpha and beta params. + With alpha == beta == 0.5, this loss becomes equal DiceLoss. + It supports binary, multiclass and multilabel cases + + Args: + mode: Metric mode {'binary', 'multiclass', 'multilabel'} + classes: Optional list of classes that contribute in loss computation; + By default, all channels are included. + log_loss: If True, loss computed as ``-log(tversky)`` otherwise ``1 - tversky`` + from_logits: If True assumes input is raw logits + smooth: + ignore_index: Label that indicates ignored pixels (does not contribute to loss) + eps: Small epsilon for numerical stability + alpha: Weight constant that penalize model for FPs (False Positives) + beta: Weight constant that penalize model for FNs (False Positives) + gamma: Constant that squares the error function. Defaults to ``1.0`` + + Return: + loss: torch.Tensor + + """ + + def __init__( + self, + mode: str, + classes: List[int] = None, + log_loss: bool = False, + from_logits: bool = True, + smooth: float = 0.0, + ignore_index: Optional[int] = None, + eps: float = 1e-7, + alpha: float = 0.5, + beta: float = 0.5, + gamma: float = 1.0, + ): + + assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE} + super().__init__(mode, classes, log_loss, from_logits, smooth, ignore_index, eps) + self.alpha = alpha + self.beta = beta + self.gamma = gamma + + def aggregate_loss(self, loss): + return loss.mean() ** self.gamma + + def compute_score(self, output, target, smooth=0.0, eps=1e-7, dims=None) -> torch.Tensor: + return soft_tversky_score(output, target, self.alpha, self.beta, smooth, eps, dims) diff --git a/segmentation_models_pytorch_example/manet/__init__.py b/segmentation_models_pytorch_example/manet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f3bdc788d300d6aa95b3894f2bba78214fd437e3 --- /dev/null +++ b/segmentation_models_pytorch_example/manet/__init__.py @@ -0,0 +1 @@ +from .model import MAnet diff --git a/segmentation_models_pytorch_example/manet/decoder.py b/segmentation_models_pytorch_example/manet/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..818220912def3d3b2df5a66f87ad3146eddf646e --- /dev/null +++ b/segmentation_models_pytorch_example/manet/decoder.py @@ -0,0 +1,189 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from ..base import modules as md + + +class PAB(nn.Module): + def __init__(self, in_channels, out_channels, pab_channels=64): + super(PAB, self).__init__() + # Series of 1x1 conv to generate attention feature maps + self.pab_channels = pab_channels + self.in_channels = in_channels + self.top_conv = nn.Conv2d(in_channels, pab_channels, kernel_size=1) + self.center_conv = nn.Conv2d(in_channels, pab_channels, kernel_size=1) + self.bottom_conv = nn.Conv2d(in_channels, in_channels, kernel_size=3, padding=1) + self.map_softmax = nn.Softmax(dim=1) + self.out_conv = nn.Conv2d(in_channels, in_channels, kernel_size=3, padding=1) + + def forward(self, x): + bsize = x.size()[0] + h = x.size()[2] + w = x.size()[3] + x_top = self.top_conv(x) + x_center = self.center_conv(x) + x_bottom = self.bottom_conv(x) + + x_top = x_top.flatten(2) + x_center = x_center.flatten(2).transpose(1, 2) + x_bottom = x_bottom.flatten(2).transpose(1, 2) + + sp_map = torch.matmul(x_center, x_top) + sp_map = self.map_softmax(sp_map.view(bsize, -1)).view(bsize, h*w, h*w) + sp_map = torch.matmul(sp_map, x_bottom) + sp_map = sp_map.reshape(bsize, self.in_channels, h, w) + x = x + sp_map + x = self.out_conv(x) + return x + + +class MFAB(nn.Module): + def __init__(self, in_channels, skip_channels, out_channels, use_batchnorm=True, reduction=16): + # MFAB is just a modified version of SE-blocks, one for skip, one for input + super(MFAB, self).__init__() + self.hl_conv = nn.Sequential( + md.Conv2dReLU( + in_channels, + in_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ), + md.Conv2dReLU( + in_channels, + skip_channels, + kernel_size=1, + use_batchnorm=use_batchnorm, + ) + ) + reduced_channels = max(1, skip_channels // reduction) + self.SE_ll = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(skip_channels, reduced_channels, 1), + nn.ReLU(inplace=True), + nn.Conv2d(reduced_channels, skip_channels, 1), + nn.Sigmoid(), + ) + self.SE_hl = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(skip_channels, reduced_channels, 1), + nn.ReLU(inplace=True), + nn.Conv2d(reduced_channels, skip_channels, 1), + nn.Sigmoid(), + ) + self.conv1 = md.Conv2dReLU( + skip_channels + skip_channels, # we transform C-prime form high level to C from skip connection + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + self.conv2 = md.Conv2dReLU( + out_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + + def forward(self, x, skip=None): + x = self.hl_conv(x) + x = F.interpolate(x, scale_factor=2, mode="nearest") + attention_hl = self.SE_hl(x) + if skip is not None: + attention_ll = self.SE_ll(skip) + attention_hl = attention_hl + attention_ll + x = x * attention_hl + x = torch.cat([x, skip], dim=1) + x = self.conv1(x) + x = self.conv2(x) + return x + + +class DecoderBlock(nn.Module): + def __init__( + self, + in_channels, + skip_channels, + out_channels, + use_batchnorm=True + ): + super().__init__() + self.conv1 = md.Conv2dReLU( + in_channels + skip_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + self.conv2 = md.Conv2dReLU( + out_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + + def forward(self, x, skip=None): + x = F.interpolate(x, scale_factor=2, mode="nearest") + if skip is not None: + x = torch.cat([x, skip], dim=1) + x = self.conv1(x) + x = self.conv2(x) + return x + + +class MAnetDecoder(nn.Module): + def __init__( + self, + encoder_channels, + decoder_channels, + n_blocks=5, + reduction=16, + use_batchnorm=True, + pab_channels=64 + ): + super().__init__() + + if n_blocks != len(decoder_channels): + raise ValueError( + "Model depth is {}, but you provide `decoder_channels` for {} blocks.".format( + n_blocks, len(decoder_channels) + ) + ) + + encoder_channels = encoder_channels[1:] # remove first skip with same spatial resolution + encoder_channels = encoder_channels[::-1] # reverse channels to start from head of encoder + + # computing blocks input and output channels + head_channels = encoder_channels[0] + in_channels = [head_channels] + list(decoder_channels[:-1]) + skip_channels = list(encoder_channels[1:]) + [0] + out_channels = decoder_channels + + self.center = PAB(head_channels, head_channels, pab_channels=pab_channels) + + # combine decoder keyword arguments + kwargs = dict(use_batchnorm=use_batchnorm) # no attention type here + blocks = [ + MFAB(in_ch, skip_ch, out_ch, reduction=reduction, **kwargs) if skip_ch > 0 else + DecoderBlock(in_ch, skip_ch, out_ch, **kwargs) + for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels) + ] + # for the last we dont have skip connection -> use simple decoder block + self.blocks = nn.ModuleList(blocks) + + def forward(self, *features): + + features = features[1:] # remove first skip with same spatial resolution + features = features[::-1] # reverse channels to start from head of encoder + + head = features[0] + skips = features[1:] + + x = self.center(head) + for i, decoder_block in enumerate(self.blocks): + skip = skips[i] if i < len(skips) else None + x = decoder_block(x, skip) + + return x diff --git a/segmentation_models_pytorch_example/manet/model.py b/segmentation_models_pytorch_example/manet/model.py new file mode 100644 index 0000000000000000000000000000000000000000..f835d3b9513386f0e3de3bbe9ac7c5360af99c0b --- /dev/null +++ b/segmentation_models_pytorch_example/manet/model.py @@ -0,0 +1,96 @@ +from typing import Optional, Union, List +from .decoder import MAnetDecoder +from ..encoders import get_encoder +from ..base import SegmentationModel +from ..base import SegmentationHead, ClassificationHead + + +class MAnet(SegmentationModel): + """MAnet_ : Multi-scale Attention Net. The MA-Net can capture rich contextual dependencies based on the attention mechanism, + using two blocks: + - Position-wise Attention Block (PAB), which captures the spatial dependencies between pixels in a global view + - Multi-scale Fusion Attention Block (MFAB), which captures the channel dependencies between any feature map by + multi-scale semantic feature fusion + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + decoder_channels: List of integers which specify **in_channels** parameter for convolutions used in decoder. + Length of the list should be the same as **encoder_depth** + decoder_use_batchnorm: If **True**, BatchNorm2d layer between Conv2D and Activation layers + is used. If **"inplace"** InplaceABN will be used, allows to decrease memory consumption. + Available options are **True, False, "inplace"** + decoder_pab_channels: A number of channels for PAB module in decoder. + Default is 64. + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + + Returns: + ``torch.nn.Module``: **MAnet** + + .. _MAnet: + https://ieeexplore.ieee.org/abstract/document/9201310 + + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_depth: int = 5, + encoder_weights: Optional[str] = "imagenet", + decoder_use_batchnorm: bool = True, + decoder_channels: List[int] = (256, 128, 64, 32, 16), + decoder_pab_channels: int = 64, + in_channels: int = 3, + classes: int = 1, + activation: Optional[Union[str, callable]] = None, + aux_params: Optional[dict] = None + ): + super().__init__() + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + ) + + self.decoder = MAnetDecoder( + encoder_channels=self.encoder.out_channels, + decoder_channels=decoder_channels, + n_blocks=encoder_depth, + use_batchnorm=decoder_use_batchnorm, + pab_channels=decoder_pab_channels + ) + + self.segmentation_head = SegmentationHead( + in_channels=decoder_channels[-1], + out_channels=classes, + activation=activation, + kernel_size=3, + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + self.name = "manet-{}".format(encoder_name) + self.initialize() diff --git a/segmentation_models_pytorch_example/pan/__init__.py b/segmentation_models_pytorch_example/pan/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9917f8f14fdc1bbfe63846412d6f5db774617478 --- /dev/null +++ b/segmentation_models_pytorch_example/pan/__init__.py @@ -0,0 +1 @@ +from .model import PAN \ No newline at end of file diff --git a/segmentation_models_pytorch_example/pan/decoder.py b/segmentation_models_pytorch_example/pan/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..06493c78f864ca677c5e9bb484fd039ab3498b7b --- /dev/null +++ b/segmentation_models_pytorch_example/pan/decoder.py @@ -0,0 +1,166 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class ConvBnRelu(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + kernel_size: int, + stride: int = 1, + padding: int = 0, + dilation: int = 1, + groups: int = 1, + bias: bool = True, + add_relu: bool = True, + interpolate: bool = False + ): + super(ConvBnRelu, self).__init__() + self.conv = nn.Conv2d( + in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, dilation=dilation, bias=bias, groups=groups + ) + self.add_relu = add_relu + self.interpolate = interpolate + self.bn = nn.BatchNorm2d(out_channels) + self.activation = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + if self.add_relu: + x = self.activation(x) + if self.interpolate: + x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True) + return x + + +class FPABlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + upscale_mode='bilinear' + ): + super(FPABlock, self).__init__() + + self.upscale_mode = upscale_mode + if self.upscale_mode == 'bilinear': + self.align_corners = True + else: + self.align_corners = False + + # global pooling branch + self.branch1 = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + ConvBnRelu(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1, padding=0) + ) + + # midddle branch + self.mid = nn.Sequential( + ConvBnRelu(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1, padding=0) + ) + self.down1 = nn.Sequential( + nn.MaxPool2d(kernel_size=2, stride=2), + ConvBnRelu(in_channels=in_channels, out_channels=1, kernel_size=7, stride=1, padding=3) + ) + self.down2 = nn.Sequential( + nn.MaxPool2d(kernel_size=2, stride=2), + ConvBnRelu(in_channels=1, out_channels=1, kernel_size=5, stride=1, padding=2) + ) + self.down3 = nn.Sequential( + nn.MaxPool2d(kernel_size=2, stride=2), + ConvBnRelu(in_channels=1, out_channels=1, kernel_size=3, stride=1, padding=1), + ConvBnRelu(in_channels=1, out_channels=1, kernel_size=3, stride=1, padding=1), + ) + self.conv2 = ConvBnRelu(in_channels=1, out_channels=1, kernel_size=5, stride=1, padding=2) + self.conv1 = ConvBnRelu(in_channels=1, out_channels=1, kernel_size=7, stride=1, padding=3) + + def forward(self, x): + h, w = x.size(2), x.size(3) + b1 = self.branch1(x) + upscale_parameters = dict( + mode=self.upscale_mode, + align_corners=self.align_corners + ) + b1 = F.interpolate(b1, size=(h, w), **upscale_parameters) + + mid = self.mid(x) + x1 = self.down1(x) + x2 = self.down2(x1) + x3 = self.down3(x2) + x3 = F.interpolate(x3, size=(h // 4, w // 4), **upscale_parameters) + + x2 = self.conv2(x2) + x = x2 + x3 + x = F.interpolate(x, size=(h // 2, w // 2), **upscale_parameters) + + x1 = self.conv1(x1) + x = x + x1 + x = F.interpolate(x, size=(h, w), **upscale_parameters) + + x = torch.mul(x, mid) + x = x + b1 + return x + + +class GAUBlock(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + upscale_mode: str = 'bilinear' + ): + super(GAUBlock, self).__init__() + + self.upscale_mode = upscale_mode + self.align_corners = True if upscale_mode == 'bilinear' else None + + self.conv1 = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + ConvBnRelu(in_channels=out_channels, out_channels=out_channels, kernel_size=1, add_relu=False), + nn.Sigmoid() + ) + self.conv2 = ConvBnRelu(in_channels=in_channels, out_channels=out_channels, kernel_size=3, padding=1) + + def forward(self, x, y): + """ + Args: + x: low level feature + y: high level feature + """ + h, w = x.size(2), x.size(3) + y_up = F.interpolate( + y, size=(h, w), mode=self.upscale_mode, align_corners=self.align_corners + ) + x = self.conv2(x) + y = self.conv1(y) + z = torch.mul(x, y) + return y_up + z + + +class PANDecoder(nn.Module): + + def __init__( + self, + encoder_channels, + decoder_channels, + upscale_mode: str = 'bilinear' + ): + super().__init__() + + self.fpa = FPABlock(in_channels=encoder_channels[-1], out_channels=decoder_channels) + self.gau3 = GAUBlock(in_channels=encoder_channels[-2], out_channels=decoder_channels, upscale_mode=upscale_mode) + self.gau2 = GAUBlock(in_channels=encoder_channels[-3], out_channels=decoder_channels, upscale_mode=upscale_mode) + self.gau1 = GAUBlock(in_channels=encoder_channels[-4], out_channels=decoder_channels, upscale_mode=upscale_mode) + + def forward(self, *features): + bottleneck = features[-1] + x5 = self.fpa(bottleneck) # 1/32 + x4 = self.gau3(features[-2], x5) # 1/16 + x3 = self.gau2(features[-3], x4) # 1/8 + x2 = self.gau1(features[-4], x3) # 1/4 + + return x2 diff --git a/segmentation_models_pytorch_example/pan/model.py b/segmentation_models_pytorch_example/pan/model.py new file mode 100644 index 0000000000000000000000000000000000000000..e6377195fff049e9c896cabe18d0bc7405367526 --- /dev/null +++ b/segmentation_models_pytorch_example/pan/model.py @@ -0,0 +1,90 @@ +from typing import Optional, Union +from .decoder import PANDecoder +from ..encoders import get_encoder +from ..base import SegmentationModel +from ..base import SegmentationHead, ClassificationHead + + +class PAN(SegmentationModel): + """ Implementation of PAN_ (Pyramid Attention Network). + + Note: + Currently works with shape of input tensor >= [B x C x 128 x 128] for pytorch <= 1.1.0 + and with shape of input tensor >= [B x C x 256 x 256] for pytorch == 1.3.1 + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + encoder_output_stride: 16 or 32, if 16 use dilation in encoder last layer. + Doesn't work with ***ception***, **vgg***, **densenet*`** backbones.Default is 16. + decoder_channels: A number of convolution layer filters in decoder blocks + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + upsampling: Final upsampling factor. Default is 4 to preserve input-output spatial shape identity + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + + Returns: + ``torch.nn.Module``: **PAN** + + .. _PAN: + https://arxiv.org/abs/1805.10180 + + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_weights: Optional[str] = "imagenet", + encoder_output_stride: int = 16, + decoder_channels: int = 32, + in_channels: int = 3, + classes: int = 1, + activation: Optional[Union[str, callable]] = None, + upsampling: int = 4, + aux_params: Optional[dict] = None + ): + super().__init__() + + if encoder_output_stride not in [16, 32]: + raise ValueError("PAN support output stride 16 or 32, got {}".format(encoder_output_stride)) + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=5, + weights=encoder_weights, + output_stride=encoder_output_stride, + ) + + self.decoder = PANDecoder( + encoder_channels=self.encoder.out_channels, + decoder_channels=decoder_channels, + ) + + self.segmentation_head = SegmentationHead( + in_channels=decoder_channels, + out_channels=classes, + activation=activation, + kernel_size=3, + upsampling=upsampling + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + self.name = "pan-{}".format(encoder_name) + self.initialize() diff --git a/segmentation_models_pytorch_example/pspnet/__init__.py b/segmentation_models_pytorch_example/pspnet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4ac0664d91b5ea14c666032fb42696595aee4199 --- /dev/null +++ b/segmentation_models_pytorch_example/pspnet/__init__.py @@ -0,0 +1 @@ +from .model import PSPNet \ No newline at end of file diff --git a/segmentation_models_pytorch_example/pspnet/decoder.py b/segmentation_models_pytorch_example/pspnet/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..79feba851cac561426164e53dac351be22b59608 --- /dev/null +++ b/segmentation_models_pytorch_example/pspnet/decoder.py @@ -0,0 +1,72 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..base import modules + + +class PSPBlock(nn.Module): + + def __init__(self, in_channels, out_channels, pool_size, use_bathcnorm=True): + super().__init__() + if pool_size == 1: + use_bathcnorm = False # PyTorch does not support BatchNorm for 1x1 shape + self.pool = nn.Sequential( + nn.AdaptiveAvgPool2d(output_size=(pool_size, pool_size)), + modules.Conv2dReLU(in_channels, out_channels, (1, 1), use_batchnorm=use_bathcnorm) + ) + + def forward(self, x): + h, w = x.size(2), x.size(3) + x = self.pool(x) + x = F.interpolate(x, size=(h, w), mode='bilinear', align_corners=True) + return x + + +class PSPModule(nn.Module): + def __init__(self, in_channels, sizes=(1, 2, 3, 6), use_bathcnorm=True): + super().__init__() + + self.blocks = nn.ModuleList([ + PSPBlock(in_channels, in_channels // len(sizes), size, use_bathcnorm=use_bathcnorm) for size in sizes + ]) + + def forward(self, x): + xs = [block(x) for block in self.blocks] + [x] + x = torch.cat(xs, dim=1) + return x + + +class PSPDecoder(nn.Module): + + def __init__( + self, + encoder_channels, + use_batchnorm=True, + out_channels=512, + dropout=0.2, + ): + super().__init__() + + self.psp = PSPModule( + in_channels=encoder_channels[-1], + sizes=(1, 2, 3, 6), + use_bathcnorm=use_batchnorm, + ) + + self.conv = modules.Conv2dReLU( + in_channels=encoder_channels[-1] * 2, + out_channels=out_channels, + kernel_size=1, + use_batchnorm=use_batchnorm, + ) + + self.dropout = nn.Dropout2d(p=dropout) + + def forward(self, *features): + x = features[-1] + x = self.psp(x) + x = self.conv(x) + x = self.dropout(x) + + return x diff --git a/segmentation_models_pytorch_example/pspnet/model.py b/segmentation_models_pytorch_example/pspnet/model.py new file mode 100644 index 0000000000000000000000000000000000000000..0e176d24a5485b7c10302cafd75a5630454dc0e7 --- /dev/null +++ b/segmentation_models_pytorch_example/pspnet/model.py @@ -0,0 +1,96 @@ +from typing import Optional, Union + +from .decoder import PSPDecoder +from ..encoders import get_encoder + +from ..base import SegmentationModel +from ..base import SegmentationHead, ClassificationHead + + +class PSPNet(SegmentationModel): + """PSPNet_ is a fully convolution neural network for image semantic segmentation. Consist of + *encoder* and *Spatial Pyramid* (decoder). Spatial Pyramid build on top of encoder and does not + use "fine-features" (features of high spatial resolution). PSPNet can be used for multiclass segmentation + of high resolution images, however it is not good for detecting small objects and producing accurate, pixel-level mask. + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + psp_out_channels: A number of filters in Spatial Pyramid + psp_use_batchnorm: If **True**, BatchNorm2d layer between Conv2D and Activation layers + is used. If **"inplace"** InplaceABN will be used, allows to decrease memory consumption. + Available options are **True, False, "inplace"** + psp_dropout: Spatial dropout rate in [0, 1) used in Spatial Pyramid + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + upsampling: Final upsampling factor. Default is 8 to preserve input-output spatial shape identity + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + + Returns: + ``torch.nn.Module``: **PSPNet** + + .. _PSPNet: + https://arxiv.org/abs/1612.01105 + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_weights: Optional[str] = "imagenet", + encoder_depth: int = 3, + psp_out_channels: int = 512, + psp_use_batchnorm: bool = True, + psp_dropout: float = 0.2, + in_channels: int = 3, + classes: int = 1, + activation: Optional[Union[str, callable]] = None, + upsampling: int = 8, + aux_params: Optional[dict] = None, + ): + super().__init__() + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + ) + + self.decoder = PSPDecoder( + encoder_channels=self.encoder.out_channels, + use_batchnorm=psp_use_batchnorm, + out_channels=psp_out_channels, + dropout=psp_dropout, + ) + + self.segmentation_head = SegmentationHead( + in_channels=psp_out_channels, + out_channels=classes, + kernel_size=3, + activation=activation, + upsampling=upsampling, + ) + + if aux_params: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + self.name = "psp-{}".format(encoder_name) + self.initialize() diff --git a/segmentation_models_pytorch_example/unet/__init__.py b/segmentation_models_pytorch_example/unet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ba6878e0c9f397907149a4dc6f96b6020e2a7f22 --- /dev/null +++ b/segmentation_models_pytorch_example/unet/__init__.py @@ -0,0 +1 @@ +from .model import Unet \ No newline at end of file diff --git a/segmentation_models_pytorch_example/unet/decoder.py b/segmentation_models_pytorch_example/unet/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..8657b7ee7343309e86253a3ed2a6d038d01f538d --- /dev/null +++ b/segmentation_models_pytorch_example/unet/decoder.py @@ -0,0 +1,121 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..base import modules as md + + +class DecoderBlock(nn.Module): + def __init__( + self, + in_channels, + skip_channels, + out_channels, + use_batchnorm=True, + attention_type=None, + ): + super().__init__() + self.conv1 = md.Conv2dReLU( + in_channels + skip_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + self.attention1 = md.Attention(attention_type, in_channels=in_channels + skip_channels) + self.conv2 = md.Conv2dReLU( + out_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + self.attention2 = md.Attention(attention_type, in_channels=out_channels) + + def forward(self, x, skip=None): + x = F.interpolate(x, scale_factor=2, mode="nearest") + if skip is not None: + x = torch.cat([x, skip], dim=1) + x = self.attention1(x) + x = self.conv1(x) + x = self.conv2(x) + x = self.attention2(x) + return x + + +class CenterBlock(nn.Sequential): + def __init__(self, in_channels, out_channels, use_batchnorm=True): + conv1 = md.Conv2dReLU( + in_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + conv2 = md.Conv2dReLU( + out_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + super().__init__(conv1, conv2) + + +class UnetDecoder(nn.Module): + def __init__( + self, + encoder_channels, + decoder_channels, + n_blocks=5, + use_batchnorm=True, + attention_type=None, + center=False, + ): + super().__init__() + + if n_blocks != len(decoder_channels): + raise ValueError( + "Model depth is {}, but you provide `decoder_channels` for {} blocks.".format( + n_blocks, len(decoder_channels) + ) + ) + + encoder_channels = encoder_channels[1:] # remove first skip with same spatial resolution + encoder_channels = encoder_channels[::-1] # reverse channels to start from head of encoder + + # computing blocks input and output channels + head_channels = encoder_channels[0] + in_channels = [head_channels] + list(decoder_channels[:-1]) + skip_channels = list(encoder_channels[1:]) + [0] + out_channels = decoder_channels + + if center: + self.center = CenterBlock( + head_channels, head_channels, use_batchnorm=use_batchnorm + ) + else: + self.center = nn.Identity() + + # combine decoder keyword arguments + kwargs = dict(use_batchnorm=use_batchnorm, attention_type=attention_type) + blocks = [ + DecoderBlock(in_ch, skip_ch, out_ch, **kwargs) + for in_ch, skip_ch, out_ch in zip(in_channels, skip_channels, out_channels) + ] + self.blocks = nn.ModuleList(blocks) + + def forward(self, *features): + + features = features[1:] # remove first skip with same spatial resolution + features = features[::-1] # reverse channels to start from head of encoder + + head = features[0] + skips = features[1:] + + x = self.center(head) + for i, decoder_block in enumerate(self.blocks): + skip = skips[i] if i < len(skips) else None + x = decoder_block(x, skip) + + return x diff --git a/segmentation_models_pytorch_example/unet/model.py b/segmentation_models_pytorch_example/unet/model.py new file mode 100644 index 0000000000000000000000000000000000000000..3da1696c6e3b61ddb6e65e79fd30faea7176ab59 --- /dev/null +++ b/segmentation_models_pytorch_example/unet/model.py @@ -0,0 +1,96 @@ +from typing import Optional, Union, List +from .decoder import UnetDecoder +from ..encoders import get_encoder +from ..base import SegmentationModel +from ..base import SegmentationHead, ClassificationHead + + +class Unet(SegmentationModel): + """Unet_ is a fully convolution neural network for image semantic segmentation. Consist of *encoder* + and *decoder* parts connected with *skip connections*. Encoder extract features of different spatial + resolution (skip connections) which are used by decoder to define accurate segmentation mask. Use *concatenation* + for fusing decoder blocks with skip connections. + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + decoder_channels: List of integers which specify **in_channels** parameter for convolutions used in decoder. + Length of the list should be the same as **encoder_depth** + decoder_use_batchnorm: If **True**, BatchNorm2d layer between Conv2D and Activation layers + is used. If **"inplace"** InplaceABN will be used, allows to decrease memory consumption. + Available options are **True, False, "inplace"** + decoder_attention_type: Attention module used in decoder of the model. Available options are **None** and **scse**. + SCSE paper - https://arxiv.org/abs/1808.08127 + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + + Returns: + ``torch.nn.Module``: Unet + + .. _Unet: + https://arxiv.org/abs/1505.04597 + + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_depth: int = 5, + encoder_weights: Optional[str] = "imagenet", + decoder_use_batchnorm: bool = True, + decoder_channels: List[int] = (256, 128, 64, 32, 16), + decoder_attention_type: Optional[str] = None, + in_channels: int = 3, + classes: int = 1, + activation: Optional[Union[str, callable]] = None, + aux_params: Optional[dict] = None, + ): + super().__init__() + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + ) + + self.decoder = UnetDecoder( + encoder_channels=self.encoder.out_channels, + decoder_channels=decoder_channels, + n_blocks=encoder_depth, + use_batchnorm=decoder_use_batchnorm, + center=True if encoder_name.startswith("vgg") else False, + attention_type=decoder_attention_type, + ) + + self.segmentation_head = SegmentationHead( + in_channels=decoder_channels[-1], + out_channels=classes, + activation=activation, + kernel_size=3, + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + self.name = "u-{}".format(encoder_name) + self.initialize() diff --git a/segmentation_models_pytorch_example/unetplusplus/__init__.py b/segmentation_models_pytorch_example/unetplusplus/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bda62b70a30d92622616f7279d153bc4b68f6b54 --- /dev/null +++ b/segmentation_models_pytorch_example/unetplusplus/__init__.py @@ -0,0 +1 @@ +from .model import UnetPlusPlus diff --git a/segmentation_models_pytorch_example/unetplusplus/decoder.py b/segmentation_models_pytorch_example/unetplusplus/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..f82cd60140d980cd28fcc8ae016a16487056fb7a --- /dev/null +++ b/segmentation_models_pytorch_example/unetplusplus/decoder.py @@ -0,0 +1,136 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..base import modules as md + + +class DecoderBlock(nn.Module): + def __init__( + self, + in_channels, + skip_channels, + out_channels, + use_batchnorm=True, + attention_type=None, + ): + super().__init__() + self.conv1 = md.Conv2dReLU( + in_channels + skip_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + self.attention1 = md.Attention(attention_type, in_channels=in_channels + skip_channels) + self.conv2 = md.Conv2dReLU( + out_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + self.attention2 = md.Attention(attention_type, in_channels=out_channels) + + def forward(self, x, skip=None): + x = F.interpolate(x, scale_factor=2, mode="nearest") + if skip is not None: + x = torch.cat([x, skip], dim=1) + x = self.attention1(x) + x = self.conv1(x) + x = self.conv2(x) + x = self.attention2(x) + return x + + +class CenterBlock(nn.Sequential): + def __init__(self, in_channels, out_channels, use_batchnorm=True): + conv1 = md.Conv2dReLU( + in_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + conv2 = md.Conv2dReLU( + out_channels, + out_channels, + kernel_size=3, + padding=1, + use_batchnorm=use_batchnorm, + ) + super().__init__(conv1, conv2) + + +class UnetPlusPlusDecoder(nn.Module): + def __init__( + self, + encoder_channels, + decoder_channels, + n_blocks=5, + use_batchnorm=True, + attention_type=None, + center=False, + ): + super().__init__() + + if n_blocks != len(decoder_channels): + raise ValueError( + "Model depth is {}, but you provide `decoder_channels` for {} blocks.".format( + n_blocks, len(decoder_channels) + ) + ) + + encoder_channels = encoder_channels[1:] # remove first skip with same spatial resolution + encoder_channels = encoder_channels[::-1] # reverse channels to start from head of encoder + # computing blocks input and output channels + head_channels = encoder_channels[0] + self.in_channels = [head_channels] + list(decoder_channels[:-1]) + self.skip_channels = list(encoder_channels[1:]) + [0] + self.out_channels = decoder_channels + if center: + self.center = CenterBlock( + head_channels, head_channels, use_batchnorm=use_batchnorm + ) + else: + self.center = nn.Identity() + + # combine decoder keyword arguments + kwargs = dict(use_batchnorm=use_batchnorm, attention_type=attention_type) + + blocks = {} + for layer_idx in range(len(self.in_channels) - 1): + for depth_idx in range(layer_idx+1): + if depth_idx == 0: + in_ch = self.in_channels[layer_idx] + skip_ch = self.skip_channels[layer_idx] * (layer_idx+1) + out_ch = self.out_channels[layer_idx] + else: + out_ch = self.skip_channels[layer_idx] + skip_ch = self.skip_channels[layer_idx] * (layer_idx+1-depth_idx) + in_ch = self.skip_channels[layer_idx - 1] + blocks[f'x_{depth_idx}_{layer_idx}'] = DecoderBlock(in_ch, skip_ch, out_ch, **kwargs) + blocks[f'x_{0}_{len(self.in_channels)-1}'] =\ + DecoderBlock(self.in_channels[-1], 0, self.out_channels[-1], **kwargs) + self.blocks = nn.ModuleDict(blocks) + self.depth = len(self.in_channels) - 1 + + def forward(self, *features): + + features = features[1:] # remove first skip with same spatial resolution + features = features[::-1] # reverse channels to start from head of encoder + # start building dense connections + dense_x = {} + for layer_idx in range(len(self.in_channels)-1): + for depth_idx in range(self.depth-layer_idx): + if layer_idx == 0: + output = self.blocks[f'x_{depth_idx}_{depth_idx}'](features[depth_idx], features[depth_idx+1]) + dense_x[f'x_{depth_idx}_{depth_idx}'] = output + else: + dense_l_i = depth_idx + layer_idx + cat_features = [dense_x[f'x_{idx}_{dense_l_i}'] for idx in range(depth_idx+1, dense_l_i+1)] + cat_features = torch.cat(cat_features + [features[dense_l_i+1]], dim=1) + dense_x[f'x_{depth_idx}_{dense_l_i}'] =\ + self.blocks[f'x_{depth_idx}_{dense_l_i}'](dense_x[f'x_{depth_idx}_{dense_l_i-1}'], cat_features) + dense_x[f'x_{0}_{self.depth}'] = self.blocks[f'x_{0}_{self.depth}'](dense_x[f'x_{0}_{self.depth-1}']) + return dense_x[f'x_{0}_{self.depth}'] diff --git a/segmentation_models_pytorch_example/unetplusplus/model.py b/segmentation_models_pytorch_example/unetplusplus/model.py new file mode 100644 index 0000000000000000000000000000000000000000..54fe4e8c485c16f1c29d6f380c5f6ecdcfd22e78 --- /dev/null +++ b/segmentation_models_pytorch_example/unetplusplus/model.py @@ -0,0 +1,96 @@ +from typing import Optional, Union, List +from .decoder import UnetPlusPlusDecoder +from ..encoders import get_encoder +from ..base import SegmentationModel +from ..base import SegmentationHead, ClassificationHead + + +class UnetPlusPlus(SegmentationModel): + """Unet++ is a fully convolution neural network for image semantic segmentation. Consist of *encoder* + and *decoder* parts connected with *skip connections*. Encoder extract features of different spatial + resolution (skip connections) which are used by decoder to define accurate segmentation mask. Decoder of + Unet++ is more complex than in usual Unet. + + Args: + encoder_name: Name of the classification model that will be used as an encoder (a.k.a backbone) + to extract features of different spatial resolution + encoder_depth: A number of stages used in encoder in range [3, 5]. Each stage generate features + two times smaller in spatial dimensions than previous one (e.g. for depth 0 we will have features + with shapes [(N, C, H, W),], for depth 1 - [(N, C, H, W), (N, C, H // 2, W // 2)] and so on). + Default is 5 + encoder_weights: One of **None** (random initialization), **"imagenet"** (pre-training on ImageNet) and + other pretrained weights (see table with available weights for each encoder_name) + decoder_channels: List of integers which specify **in_channels** parameter for convolutions used in decoder. + Length of the list should be the same as **encoder_depth** + decoder_use_batchnorm: If **True**, BatchNorm2d layer between Conv2D and Activation layers + is used. If **"inplace"** InplaceABN will be used, allows to decrease memory consumption. + Available options are **True, False, "inplace"** + decoder_attention_type: Attention module used in decoder of the model. Available options are **None** and **scse**. + SCSE paper - https://arxiv.org/abs/1808.08127 + in_channels: A number of input channels for the model, default is 3 (RGB images) + classes: A number of classes for output mask (or you can think as a number of channels of output mask) + activation: An activation function to apply after the final convolution layer. + Available options are **"sigmoid"**, **"softmax"**, **"logsoftmax"**, **"tanh"**, **"identity"**, **callable** and **None**. + Default is **None** + aux_params: Dictionary with parameters of the auxiliary output (classification head). Auxiliary output is build + on top of encoder if **aux_params** is not **None** (default). Supported params: + - classes (int): A number of classes + - pooling (str): One of "max", "avg". Default is "avg" + - dropout (float): Dropout factor in [0, 1) + - activation (str): An activation function to apply "sigmoid"/"softmax" (could be **None** to return logits) + + Returns: + ``torch.nn.Module``: **Unet++** + + Reference: + https://arxiv.org/abs/1807.10165 + + """ + + def __init__( + self, + encoder_name: str = "resnet34", + encoder_depth: int = 5, + encoder_weights: Optional[str] = "imagenet", + decoder_use_batchnorm: bool = True, + decoder_channels: List[int] = (256, 128, 64, 32, 16), + decoder_attention_type: Optional[str] = None, + in_channels: int = 3, + classes: int = 1, + activation: Optional[Union[str, callable]] = None, + aux_params: Optional[dict] = None, + ): + super().__init__() + + self.encoder = get_encoder( + encoder_name, + in_channels=in_channels, + depth=encoder_depth, + weights=encoder_weights, + ) + + self.decoder = UnetPlusPlusDecoder( + encoder_channels=self.encoder.out_channels, + decoder_channels=decoder_channels, + n_blocks=encoder_depth, + use_batchnorm=decoder_use_batchnorm, + center=True if encoder_name.startswith("vgg") else False, + attention_type=decoder_attention_type, + ) + + self.segmentation_head = SegmentationHead( + in_channels=decoder_channels[-1], + out_channels=classes, + activation=activation, + kernel_size=3, + ) + + if aux_params is not None: + self.classification_head = ClassificationHead( + in_channels=self.encoder.out_channels[-1], **aux_params + ) + else: + self.classification_head = None + + self.name = "unetplusplus-{}".format(encoder_name) + self.initialize() diff --git a/segmentation_models_pytorch_example/utils/__init__.py b/segmentation_models_pytorch_example/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5d7481dba7951e5572abccc2e7e1266118adf932 --- /dev/null +++ b/segmentation_models_pytorch_example/utils/__init__.py @@ -0,0 +1,3 @@ +from . import train +from . import losses +from . import metrics \ No newline at end of file diff --git a/segmentation_models_pytorch_example/utils/base.py b/segmentation_models_pytorch_example/utils/base.py new file mode 100644 index 0000000000000000000000000000000000000000..0e1f3772adecd488cd0045a0002c230d6c1be71b --- /dev/null +++ b/segmentation_models_pytorch_example/utils/base.py @@ -0,0 +1,71 @@ +import re +import torch.nn as nn + +class BaseObject(nn.Module): + + def __init__(self, name=None): + super().__init__() + self._name = name + + @property + def __name__(self): + if self._name is None: + name = self.__class__.__name__ + s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) + return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + else: + return self._name + + +class Metric(BaseObject): + pass + + +class Loss(BaseObject): + + def __add__(self, other): + if isinstance(other, Loss): + return SumOfLosses(self, other) + else: + raise ValueError('Loss should be inherited from `Loss` class') + + def __radd__(self, other): + return self.__add__(other) + + def __mul__(self, value): + if isinstance(value, (int, float)): + return MultipliedLoss(self, value) + else: + raise ValueError('Loss should be inherited from `BaseLoss` class') + + def __rmul__(self, other): + return self.__mul__(other) + + +class SumOfLosses(Loss): + + def __init__(self, l1, l2): + name = '{} + {}'.format(l1.__name__, l2.__name__) + super().__init__(name=name) + self.l1 = l1 + self.l2 = l2 + + def __call__(self, *inputs): + return self.l1.forward(*inputs) + self.l2.forward(*inputs) + + +class MultipliedLoss(Loss): + + def __init__(self, loss, multiplier): + + # resolve name + if len(loss.__name__.split('+')) > 1: + name = '{} * ({})'.format(multiplier, loss.__name__) + else: + name = '{} * {}'.format(multiplier, loss.__name__) + super().__init__(name=name) + self.loss = loss + self.multiplier = multiplier + + def __call__(self, *inputs): + return self.multiplier * self.loss.forward(*inputs) diff --git a/segmentation_models_pytorch_example/utils/functional.py b/segmentation_models_pytorch_example/utils/functional.py new file mode 100644 index 0000000000000000000000000000000000000000..a06e2c12cf054191a52246abcc090ca5e1d99d31 --- /dev/null +++ b/segmentation_models_pytorch_example/utils/functional.py @@ -0,0 +1,126 @@ +import torch + + +def _take_channels(*xs, ignore_channels=None): + if ignore_channels is None: + return xs + else: + channels = [channel for channel in range(xs[0].shape[1]) if channel not in ignore_channels] + xs = [torch.index_select(x, dim=1, index=torch.tensor(channels).to(x.device)) for x in xs] + return xs + + +def _threshold(x, threshold=None): + if threshold is not None: + return (x > threshold).type(x.dtype) + else: + return x + + +def iou(pr, gt, eps=1e-7, threshold=None, ignore_channels=None): + """Calculate Intersection over Union between ground truth and prediction + Args: + pr (torch.Tensor): predicted tensor + gt (torch.Tensor): ground truth tensor + eps (float): epsilon to avoid zero division + threshold: threshold for outputs binarization + Returns: + float: IoU (Jaccard) score + """ + + pr = _threshold(pr, threshold=threshold) + pr, gt = _take_channels(pr, gt, ignore_channels=ignore_channels) + + intersection = torch.sum(gt * pr) + union = torch.sum(gt) + torch.sum(pr) - intersection + eps + return (intersection + eps) / union + + +jaccard = iou + + +def f_score(pr, gt, beta=1, eps=1e-7, threshold=None, ignore_channels=None): + """Calculate F-score between ground truth and prediction + Args: + pr (torch.Tensor): predicted tensor + gt (torch.Tensor): ground truth tensor + beta (float): positive constant + eps (float): epsilon to avoid zero division + threshold: threshold for outputs binarization + Returns: + float: F score + """ + + pr = _threshold(pr, threshold=threshold) + pr, gt = _take_channels(pr, gt, ignore_channels=ignore_channels) + + tp = torch.sum(gt * pr) + fp = torch.sum(pr) - tp + fn = torch.sum(gt) - tp + + score = ((1 + beta ** 2) * tp + eps) \ + / ((1 + beta ** 2) * tp + beta ** 2 * fn + fp + eps) + + return score + + +def accuracy(pr, gt, threshold=0.5, ignore_channels=None): + """Calculate accuracy score between ground truth and prediction + Args: + pr (torch.Tensor): predicted tensor + gt (torch.Tensor): ground truth tensor + eps (float): epsilon to avoid zero division + threshold: threshold for outputs binarization + Returns: + float: precision score + """ + pr = _threshold(pr, threshold=threshold) + pr, gt = _take_channels(pr, gt, ignore_channels=ignore_channels) + + tp = torch.sum(gt == pr, dtype=pr.dtype) + score = tp / gt.view(-1).shape[0] + return score + + +def precision(pr, gt, eps=1e-7, threshold=None, ignore_channels=None): + """Calculate precision score between ground truth and prediction + Args: + pr (torch.Tensor): predicted tensor + gt (torch.Tensor): ground truth tensor + eps (float): epsilon to avoid zero division + threshold: threshold for outputs binarization + Returns: + float: precision score + """ + + pr = _threshold(pr, threshold=threshold) + pr, gt = _take_channels(pr, gt, ignore_channels=ignore_channels) + + tp = torch.sum(gt * pr) + fp = torch.sum(pr) - tp + + score = (tp + eps) / (tp + fp + eps) + + return score + + +def recall(pr, gt, eps=1e-7, threshold=None, ignore_channels=None): + """Calculate Recall between ground truth and prediction + Args: + pr (torch.Tensor): A list of predicted elements + gt (torch.Tensor): A list of elements that are to be predicted + eps (float): epsilon to avoid zero division + threshold: threshold for outputs binarization + Returns: + float: recall score + """ + + pr = _threshold(pr, threshold=threshold) + pr, gt = _take_channels(pr, gt, ignore_channels=ignore_channels) + + tp = torch.sum(gt * pr) + fn = torch.sum(gt) - tp + + score = (tp + eps) / (tp + fn + eps) + + return score diff --git a/segmentation_models_pytorch_example/utils/losses.py b/segmentation_models_pytorch_example/utils/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..8a90cee9f56f06efc47c5c9bd48002c1d298cbaa --- /dev/null +++ b/segmentation_models_pytorch_example/utils/losses.py @@ -0,0 +1,67 @@ +import torch.nn as nn + +from . import base +from . import functional as F +from ..base.modules import Activation + + +class JaccardLoss(base.Loss): + + def __init__(self, eps=1., activation=None, ignore_channels=None, **kwargs): + super().__init__(**kwargs) + self.eps = eps + self.activation = Activation(activation) + self.ignore_channels = ignore_channels + + def forward(self, y_pr, y_gt): + y_pr = self.activation(y_pr) + return 1 - F.jaccard( + y_pr, y_gt, + eps=self.eps, + threshold=None, + ignore_channels=self.ignore_channels, + ) + + +class DiceLoss(base.Loss): + + def __init__(self, eps=1., beta=1., activation=None, ignore_channels=None, **kwargs): + super().__init__(**kwargs) + self.eps = eps + self.beta = beta + self.activation = Activation(activation) + self.ignore_channels = ignore_channels + + def forward(self, y_pr, y_gt): + y_pr = self.activation(y_pr) + return 1 - F.f_score( + y_pr, y_gt, + beta=self.beta, + eps=self.eps, + threshold=None, + ignore_channels=self.ignore_channels, + ) + + +class L1Loss(nn.L1Loss, base.Loss): + pass + + +class MSELoss(nn.MSELoss, base.Loss): + pass + + +class CrossEntropyLoss(nn.CrossEntropyLoss, base.Loss): + pass + + +class NLLLoss(nn.NLLLoss, base.Loss): + pass + + +class BCELoss(nn.BCELoss, base.Loss): + pass + + +class BCEWithLogitsLoss(nn.BCEWithLogitsLoss, base.Loss): + pass diff --git a/segmentation_models_pytorch_example/utils/meter.py b/segmentation_models_pytorch_example/utils/meter.py new file mode 100644 index 0000000000000000000000000000000000000000..c6e7fd3078189160dfcbbbcc188d8a0bb0c293e0 --- /dev/null +++ b/segmentation_models_pytorch_example/utils/meter.py @@ -0,0 +1,61 @@ +import numpy as np + + +class Meter(object): + '''Meters provide a way to keep track of important statistics in an online manner. + This class is abstract, but provides a standard interface for all meters to follow. + ''' + + def reset(self): + '''Resets the meter to default settings.''' + pass + + def add(self, value): + '''Log a new value to the meter + Args: + value: Next result to include. + ''' + pass + + def value(self): + '''Get the value of the meter in the current state.''' + pass + + +class AverageValueMeter(Meter): + def __init__(self): + super(AverageValueMeter, self).__init__() + self.reset() + self.val = 0 + + def add(self, value, n=1): + self.val = value + self.sum += value + self.var += value * value + self.n += n + + if self.n == 0: + self.mean, self.std = np.nan, np.nan + elif self.n == 1: + self.mean = 0.0 + self.sum # This is to force a copy in torch/numpy + self.std = np.inf + self.mean_old = self.mean + self.m_s = 0.0 + else: + self.mean = self.mean_old + (value - n * self.mean_old) / float(self.n) + self.m_s += (value - self.mean_old) * (value - self.mean) + self.mean_old = self.mean + self.std = np.sqrt(self.m_s / (self.n - 1.0)) + + def value(self): + return self.mean, self.std + + def reset(self): + self.n = 0 + self.sum = 0.0 + self.var = 0.0 + self.val = 0.0 + self.mean = np.nan + self.mean_old = 0.0 + self.m_s = 0.0 + self.std = np.nan diff --git a/segmentation_models_pytorch_example/utils/metrics.py b/segmentation_models_pytorch_example/utils/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..256f21f7c69bb94db484940fad9899189358639a --- /dev/null +++ b/segmentation_models_pytorch_example/utils/metrics.py @@ -0,0 +1,99 @@ +from . import base +from . import functional as F +from ..base.modules import Activation + + +class IoU(base.Metric): + __name__ = 'iou_score' + + def __init__(self, eps=1e-7, threshold=0.5, activation=None, ignore_channels=None, **kwargs): + super().__init__(**kwargs) + self.eps = eps + self.threshold = threshold + self.activation = Activation(activation) + self.ignore_channels = ignore_channels + + def forward(self, y_pr, y_gt): + y_pr = self.activation(y_pr) + return F.iou( + y_pr, y_gt, + eps=self.eps, + threshold=self.threshold, + ignore_channels=self.ignore_channels, + ) + + +class Fscore(base.Metric): + + def __init__(self, beta=1, eps=1e-7, threshold=0.5, activation=None, ignore_channels=None, **kwargs): + super().__init__(**kwargs) + self.eps = eps + self.beta = beta + self.threshold = threshold + self.activation = Activation(activation) + self.ignore_channels = ignore_channels + + def forward(self, y_pr, y_gt): + y_pr = self.activation(y_pr) + return F.f_score( + y_pr, y_gt, + eps=self.eps, + beta=self.beta, + threshold=self.threshold, + ignore_channels=self.ignore_channels, + ) + + +class Accuracy(base.Metric): + + def __init__(self, threshold=0.5, activation=None, ignore_channels=None, **kwargs): + super().__init__(**kwargs) + self.threshold = threshold + self.activation = Activation(activation) + self.ignore_channels = ignore_channels + + def forward(self, y_pr, y_gt): + y_pr = self.activation(y_pr) + return F.accuracy( + y_pr, y_gt, + threshold=self.threshold, + ignore_channels=self.ignore_channels, + ) + + +class Recall(base.Metric): + + def __init__(self, eps=1e-7, threshold=0.5, activation=None, ignore_channels=None, **kwargs): + super().__init__(**kwargs) + self.eps = eps + self.threshold = threshold + self.activation = Activation(activation) + self.ignore_channels = ignore_channels + + def forward(self, y_pr, y_gt): + y_pr = self.activation(y_pr) + return F.recall( + y_pr, y_gt, + eps=self.eps, + threshold=self.threshold, + ignore_channels=self.ignore_channels, + ) + + +class Precision(base.Metric): + + def __init__(self, eps=1e-7, threshold=0.5, activation=None, ignore_channels=None, **kwargs): + super().__init__(**kwargs) + self.eps = eps + self.threshold = threshold + self.activation = Activation(activation) + self.ignore_channels = ignore_channels + + def forward(self, y_pr, y_gt): + y_pr = self.activation(y_pr) + return F.precision( + y_pr, y_gt, + eps=self.eps, + threshold=self.threshold, + ignore_channels=self.ignore_channels, + ) diff --git a/segmentation_models_pytorch_example/utils/train.py b/segmentation_models_pytorch_example/utils/train.py new file mode 100644 index 0000000000000000000000000000000000000000..f60b195e0a118b5c934b57f90bc7c44fa7741b83 --- /dev/null +++ b/segmentation_models_pytorch_example/utils/train.py @@ -0,0 +1,113 @@ +import sys +import torch +from tqdm import tqdm as tqdm +from .meter import AverageValueMeter + + +class Epoch: + + def __init__(self, model, loss, metrics, stage_name, device='cpu', verbose=True): + self.model = model + self.loss = loss + self.metrics = metrics + self.stage_name = stage_name + self.verbose = verbose + self.device = device + + self._to_device() + + def _to_device(self): + self.model.to(self.device) + self.loss.to(self.device) + for metric in self.metrics: + metric.to(self.device) + + def _format_logs(self, logs): + str_logs = ['{} - {:.4}'.format(k, v) for k, v in logs.items()] + s = ', '.join(str_logs) + return s + + def batch_update(self, x, y): + raise NotImplementedError + + def on_epoch_start(self): + pass + + def run(self, dataloader): + + self.on_epoch_start() + + logs = {} + loss_meter = AverageValueMeter() + metrics_meters = {metric.__name__: AverageValueMeter() for metric in self.metrics} + + with tqdm(dataloader, desc=self.stage_name, file=sys.stdout, disable=not (self.verbose)) as iterator: + for x, y in iterator: + x, y = x.to(self.device), y.to(self.device) + loss, y_pred = self.batch_update(x, y) + + # update loss logs + loss_value = loss.cpu().detach().numpy() + loss_meter.add(loss_value) + loss_logs = {self.loss.__name__: loss_meter.mean} + logs.update(loss_logs) + + # update metrics logs + for metric_fn in self.metrics: + metric_value = metric_fn(y_pred, y).cpu().detach().numpy() + metrics_meters[metric_fn.__name__].add(metric_value) + metrics_logs = {k: v.mean for k, v in metrics_meters.items()} + logs.update(metrics_logs) + + if self.verbose: + s = self._format_logs(logs) + iterator.set_postfix_str(s) + + return logs + + +class TrainEpoch(Epoch): + + def __init__(self, model, loss, metrics, optimizer, device='cpu', verbose=True): + super().__init__( + model=model, + loss=loss, + metrics=metrics, + stage_name='train', + device=device, + verbose=verbose, + ) + self.optimizer = optimizer + + def on_epoch_start(self): + self.model.train() + + def batch_update(self, x, y): + self.optimizer.zero_grad() + prediction = self.model.forward(x) + loss = self.loss(prediction, y) + loss.backward() + self.optimizer.step() + return loss, prediction + + +class ValidEpoch(Epoch): + + def __init__(self, model, loss, metrics, device='cpu', verbose=True): + super().__init__( + model=model, + loss=loss, + metrics=metrics, + stage_name='valid', + device=device, + verbose=verbose, + ) + + def on_epoch_start(self): + self.model.eval() + + def batch_update(self, x, y): + with torch.no_grad(): + prediction = self.model.forward(x) + loss = self.loss(prediction, y) + return loss, prediction diff --git a/train_classification.py b/train_classification.py new file mode 100644 index 0000000000000000000000000000000000000000..2c603328b7edbb94c2ed19e562999057bcabf40c --- /dev/null +++ b/train_classification.py @@ -0,0 +1,16 @@ +from classification_R50.train_R50_classification import train_R50 + +from utils.func import ( + parse_config, + load_config +) +if __name__=="__main__": + yml_args = parse_config() + cfg = load_config(yml_args.config) + + assert cfg.base.is_R50 + cfg.base.is_SAMVIT == 1 + + if cfg.base.is_R50: + train_R50(yml_args, cfg) + else: + print("Wrong") \ No newline at end of file diff --git a/train_segmentation.py b/train_segmentation.py new file mode 100644 index 0000000000000000000000000000000000000000..f5a12a0ac1c7349f9cd159131e7522a26ea9788f --- /dev/null +++ b/train_segmentation.py @@ -0,0 +1,22 @@ +from segmentation_2d.train_R50_seg_adam_optimizer_2d import train_2d_R50 +from segmentation_3d.train_R50_seg_adam_optimizer_3d import train_3d_R50 +from utils.func import ( + parse_config, + load_config +) +if __name__=="__main__": + yml_args = parse_config() + cfg = load_config(yml_args.config) + + assert cfg.base.is_2D + cfg.base.is_3D == 1 + if cfg.base.is_2D: + if cfg.base.is_R50: + train_2d_R50(yml_args, cfg) + if cfg.base.is_SAMVIT: + train_2d_SAMVIT(yml_args, cfg) + + if cfg.base.is_3D: + if cfg.base.is_R50: + train_3d_R50(yml_args, cfg) + if cfg.base.is_SAMVIT: + train_3d_SAMVIT(yml_args, cfg) \ No newline at end of file diff --git a/utils/SurfaceDice.py b/utils/SurfaceDice.py new file mode 100644 index 0000000000000000000000000000000000000000..a4f7fb2ab76fd1bd3f44a67479e447d4cb09934e --- /dev/null +++ b/utils/SurfaceDice.py @@ -0,0 +1,48 @@ +import torch +import numpy as np +import scipy.ndimage + +def compute_dice_coefficient(mask_gt, mask_pred): + """Compute soerensen-dice coefficient. + + compute the soerensen-dice coefficient between the ground truth mask `mask_gt` + and the predicted mask `mask_pred`. + + Args: + mask_gt: 3-dim Numpy array of type bool. The ground truth mask. + mask_pred: 3-dim Numpy array of type bool. The predicted mask. + + Returns: + the dice coeffcient as float. If both masks are empty, the result is NaN + """ + + if torch.all(mask_gt == 0): # If the present mask is empty (slice contains no information) + if np.all(mask_pred == 0): # If model segments nothing + return torch.tensor(1) # Then dice score is 1 + return torch.tensor(0) # Else dice score is 0 + volume_sum = mask_gt.sum() + mask_pred.sum() + if volume_sum == 0: + return np.NaN + volume_intersect = (mask_gt & mask_pred).sum() + return 2*volume_intersect / volume_sum + +def iou_2d(outputs: torch.Tensor, labels: torch.Tensor, reduce_batch_first: bool =False, epsilon=1e-6): + if outputs.dim() == 2 or reduce_batch_first: + inter = torch.dot(outputs.reshape(-1), labels.reshape(-1)) + union = outputs.sum() + labels.sum() - inter + return (inter + epsilon)/ (union + epsilon) + else: + iou = 0 + for idx in range(outputs.size(0)): + iou += iou_2d(outputs[idx], labels[idx]) + return iou/outputs.size(0) + + +def multiclass_iou(outputs: torch.Tensor, labels: torch.Tensor, reduce_batch_first: bool =False): + assert outputs.size() == labels.size() + if outputs.dim() == 3: + return iou_2d(outputs, labels, reduce_batch_first) + iou = 0 + for cidx in range(outputs.shape[0]): + iou += iou_2d(outputs[cidx], labels[cidx], reduce_batch_first) + return iou/outputs.size(0) \ No newline at end of file diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a44f1d72015eaf34be0c7557d8380347f7b2cbbd --- /dev/null +++ b/utils/__init__.py @@ -0,0 +1,3 @@ +from .SurfaceDice import ( + compute_dice_coefficient +) diff --git a/utils/dataset_prepare.py b/utils/dataset_prepare.py new file mode 100644 index 0000000000000000000000000000000000000000..8b0081872dc20240bae468e38c33119d0b711b04 --- /dev/null +++ b/utils/dataset_prepare.py @@ -0,0 +1,137 @@ +import numpy as np +import torch +import shutil +import os +import matplotlib.pyplot as plt +import cv2 +import json +from PIL import Image +import pickle +from skimage.transform import resize + +def split_data(datasetname): + dir_image = "./dataset_demo/datasetname/images" + dir_label = "./dataset_demo/datasetname/labels" + + dir_image = dir_image.replace("datasetname", datasetname) + dir_label = dir_label.replace("datasetname", datasetname) + + path = "./dataset_demo" + filesplit = "./files_split" + + file_data = os.path.join(filesplit, datasetname+".json") + path_data = os.path.join(path, datasetname) + + + with open(file_data, 'r') as openfile: + json_object = json.load(openfile) + + for typ in json_object.keys(): + print(typ) + file_type = json_object[typ] + path_type = os.path.join(path_data, typ) + if os.path.exists(path_type): + shutil.rmtree(path_type) + os.mkdir(path_type) + + for img in file_type: + if not "labels" in typ: + shutil.copy(os.path.join(dir_image, img), os.path.join(path_type, img.replace(" ", ""))) + else: + shutil.copy(os.path.join(dir_label, img), os.path.join(path_type, img.replace(" ", ""))) + +def save_fileLabel(datasetname): + file_image = "./dataset_demo/datasetname/type" + file_label = "./dataset_demo/datasetname/type_labels" + path = "./dataset_demo/datasetname" +# if datasetname == "BUID": +# B = ['train', 'valid'] +# else: + B = ['train'] + file_image = file_image.replace("datasetname", datasetname) + file_label = file_label.replace("datasetname", datasetname) + path = path.replace("datasetname", datasetname) + with open(os.path.join(path, 'have_label.txt'), 'w') as F: + for j in B: + dir_label = file_label.replace("type", j) + dir_train = file_image.replace("type", j) + for i in os.listdir(dir_label): + file = os.path.join(dir_label, i) + img = np.array(Image.open(file)) + img[img < 50] = 0 + img[img > 200] = 1 + values, counts = np.unique(img, return_counts = True) + if (len(values) == 2): + a = i.replace("_segmentation.png", ".jpg") + F.write(os.path.join(dir_train, a)) + F.write(" ") + F.write(file) + F.write(" ") + F.write("[0, 1]") + F.write("\n") + + with open(os.path.join(path, 'non_label.txt'), 'w') as F: + for j in B: + dir_label = file_label.replace("type", j) + dir_train = file_image.replace("type", j) + for i in os.listdir(dir_label): + file = os.path.join(dir_label, i) + img = np.array(Image.open(file)) + img[img < 50] = 0 + img[img > 200] = 1 + values, counts = np.unique(img, return_counts = True) + + if (len(values) == 1): + a = i.replace("_segmentation.png", ".jpg") + F.write(os.path.join(dir_train, a)) + F.write(" ") + F.write(file) + F.write(" ") + F.write("[0]") + F.write("\n") + +def save_fileLabel_3D(datasetname): + file_image = "./dataset_demo/datasetname/type" + file_label = "./dataset_demo/datasetname/type_labels" + path = "./dataset_demo/datasetname" +# if datasetname == "BUID": +# B = ['train', 'valid'] +# else: + B = ['train'] + file_image = file_image.replace("datasetname", datasetname) + file_label = file_label.replace("datasetname", datasetname) + path = path.replace("datasetname", datasetname) + with open(os.path.join(path, 'have_label.txt'), 'w') as F: + for j in B: + dir_label = file_label.replace("type", j) + dir_train = file_image.replace("type", j) + for i in os.listdir(dir_label): + file = os.path.join(dir_label, i) + img = np.array(Image.open(file)) + values, counts = np.unique(img, return_counts = True) + if 60 in values: + a = i.replace("label", "image") + F.write(os.path.join(dir_train, a)) + F.write(" ") + F.write(file) + F.write(" ") + F.write("[0, 2]") + F.write("\n") + + with open(os.path.join(path, 'non_label.txt'), 'w') as F: + for j in B: + dir_label = file_label.replace("type", j) + dir_train = file_image.replace("type", j) + for i in os.listdir(dir_label): + file = os.path.join(dir_label, i) + img = np.array(Image.open(file)) + values, counts = np.unique(img, return_counts = True) + + if not 60 in values: + a = i.replace("label", "image") + F.write(os.path.join(dir_train, a)) + F.write(" ") + F.write(file) + F.write(" ") + F.write("[0]") + F.write("\n") \ No newline at end of file diff --git a/utils/endtoend.py b/utils/endtoend.py new file mode 100644 index 0000000000000000000000000000000000000000..4acf3b619189b0c9e42949001ef2aab412074935 --- /dev/null +++ b/utils/endtoend.py @@ -0,0 +1,66 @@ +import torch +from torch import Tensor +import numpy as np +import glob +import pandas as pd + +def dice_coeff(input: Tensor, target: Tensor, reduce_batch_first: bool = False, epsilon=1e-6): + # Average of Dice coefficient for all batches, or for a single mask + assert input.size() == target.size() + if input.dim() == 2 and reduce_batch_first: + raise ValueError(f'Dice: asked to reduce batch but got tensor without batch dimension (shape {input.shape})') + + if input.dim() == 2 or reduce_batch_first: + inter = torch.dot(input.reshape(-1), target.reshape(-1)) + sets_sum = torch.sum(input) + torch.sum(target) + if sets_sum.item() == 0: + sets_sum = 2 * inter + + return (2 * inter + epsilon) / (sets_sum + epsilon) + else: + # compute and average metric for each batch element + dice = 0 + for i in range(input.shape[0]): + dice += dice_coeff(input[i, ...], target[i, ...]) + return dice / input.shape[0] + + +def multiclass_dice_coeff(input: Tensor, target: Tensor, reduce_batch_first: bool = False, epsilon=1e-6): + # Average of Dice coefficient for all classes + assert input.size() == target.size() + if input.dim() == 3: + return dice_coeff(input, target, reduce_batch_first, epsilon) + dice = 0 + for channel in range(input.shape[1]): + dice += dice_coeff(input[:, channel, ...], target[:, channel, ...], reduce_batch_first, epsilon) + + return dice / input.shape[1] + + +def iou_2d(outputs: torch.Tensor, labels: torch.Tensor, reduce_batch_first: bool =False, epsilon=1e-6): + if outputs.dim() == 2 or reduce_batch_first: + inter = torch.dot(outputs.reshape(-1), labels.reshape(-1)) + union = outputs.sum() + labels.sum() - inter + return (inter + epsilon)/ (union + epsilon) + else: + iou = 0 + for idx in range(outputs.size(0)): + iou += iou_2d(outputs[idx], labels[idx]) + return iou/outputs.size(0) + +def multiclass_iou(outputs: torch.Tensor, labels: torch.Tensor, reduce_batch_first: bool =False): + assert outputs.size() == labels.size() + if outputs.dim() == 3: + return iou_2d(outputs, labels, reduce_batch_first) + iou = 0 + for cidx in range(outputs.size(1)): + iou += iou_2d(outputs[:,cidx,...], labels[:, cidx, ...], reduce_batch_first) + return iou/outputs.size(1) + +def dice_loss(input: Tensor, target: Tensor, multiclass: bool = False): + # Dice loss (objective to minimize) between 0 and 1 + assert input.size() == target.size() + fn = multiclass_dice_coeff if multiclass else dice_coeff + return 1 - fn(input, target, reduce_batch_first=True) + + diff --git a/utils/func.py b/utils/func.py new file mode 100644 index 0000000000000000000000000000000000000000..05a6c0dd034d100aa906e20717d06a30a142a261 --- /dev/null +++ b/utils/func.py @@ -0,0 +1,43 @@ +import yaml +from munch import munchify +import argparse + +def load_config(path): + with open(path, 'r') as file: + cfg = yaml.load(file, Loader=yaml.FullLoader) + return munchify(cfg) + +def parse_config(): + parser = argparse.ArgumentParser(allow_abbrev=True) + parser.add_argument( + '-config', + type=str, + default='./configs/default.yaml', + help='Path to the config file.' + ) + parser.add_argument( + '-overwrite', + action='store_true', + default=False, + help='Overwrite file in the save path.' + ) + parser.add_argument( + '-lvm_encoder', + '--lvm_med_encoder_path', + type=str, + default='', + help='Path to LVM Med encoder arch' + ) + parser.add_argument( + '-print_config', + action='store_true', + default=False, + help='Print details of configs.' + ) + parser.add_argument( + '-test', + '--use_test_mode', + action='store_true', + help='') + args = parser.parse_args() + return args \ No newline at end of file diff --git a/working_dir/__init__.py b/working_dir/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/working_dir/__init__.py @@ -0,0 +1 @@ + diff --git a/working_dir/checkpoint/__init__.py b/working_dir/checkpoint/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/zero_shot_segmentation.py b/zero_shot_segmentation.py new file mode 100644 index 0000000000000000000000000000000000000000..51f5a0b74084d5e7f6d6cc79d6ed6c7fbcba9023 --- /dev/null +++ b/zero_shot_segmentation.py @@ -0,0 +1,21 @@ +from segmentation_2d.zero_shot_SAM_2d import zero_shot_sam_2d +from segmentation_2d.zero_shot_LVMMed_SAM_2d import zero_shot_lvmmed_sam_2d + +from utils.func import ( + parse_config, + load_config +) + +if __name__=="__main__": + yml_args = parse_config() + cfg = load_config(yml_args.config) + + assert cfg.base.is_2D + cfg.base.is_3D == 1 + + if cfg.base.is_3D: + assert NotImplementedError(f"[Error] We have not yet implemented this function for 3D dataset. You could try implement this similarly based on our 3D implementation for MedSAM.") + + if yml_args.lvm_med_encoder_path != '': + zero_shot_lvmmed_sam_2d(yml_args, cfg) + else: + zero_shot_sam_2d(yml_args, cfg) \ No newline at end of file