diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..c957848293c654a36ed7309c83d1f5d1a02b9997 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +*.jpg filter=lfs diff=lfs merge=lfs -text diff --git a/DIC.py b/DIC.py new file mode 100644 index 0000000000000000000000000000000000000000..9bd67353e13c054ea320882506624ac0e2050a91 --- /dev/null +++ b/DIC.py @@ -0,0 +1,17 @@ +import torch +from pathlib import Path + + +dir=Path.home() / f"tmp/resnet50/CUB2011/123456/" +dic=torch.load(dir/ f"SlDD_Selection_50.pt") + +print (dic) + +#if 'linear.selection' in dic.keys(): + #print("key 'linear.selection' exist") +#else: + #print("no such key") + + + + diff --git a/FeatureDiversityLoss.py b/FeatureDiversityLoss.py new file mode 100644 index 0000000000000000000000000000000000000000..be5745ae71dbe298244271c3a942c80c2b3e9867 --- /dev/null +++ b/FeatureDiversityLoss.py @@ -0,0 +1,59 @@ +import torch +from torch import nn + +""" +Feature Diversity Loss: +Usage to replicate paper: +Call +loss_function = FeatureDiversityLoss(0.196, linear) +to inititalize loss with linear layer of model. +At each mini batch get feature maps (Output of final convolutional layer) and add to Loss: +loss += loss_function(feature_maps, outputs) +""" + + +class FeatureDiversityLoss(nn.Module): + def __init__(self, scaling_factor, linear): + super().__init__() + self.scaling_factor = scaling_factor #* 0 + print("Scaling Factor: ", self.scaling_factor) + self.linearLayer = linear + + def initialize(self, linearLayer): + self.linearLayer = linearLayer + + def get_weights(self, outputs): + weight_matrix = self.linearLayer.weight + weight_matrix = torch.abs(weight_matrix) + top_classes = torch.argmax(outputs, dim=1) + relevant_weights = weight_matrix[top_classes] + return relevant_weights + + def forward(self, feature_maps, outputs): + relevant_weights = self.get_weights(outputs) + relevant_weights = norm_vector(relevant_weights) + feature_maps = preserve_avg_func(feature_maps) + flattened_feature_maps = feature_maps.flatten(2) + batch, features, map_size = flattened_feature_maps.size() + relevant_feature_maps = flattened_feature_maps * relevant_weights[..., None] + diversity_loss = torch.sum( + torch.amax(relevant_feature_maps, dim=1)) + return -diversity_loss / batch * self.scaling_factor + + +def norm_vector(x): + return x / (torch.norm(x, dim=1) + 1e-5)[:, None] + + +def preserve_avg_func(x): + avgs = torch.mean(x, dim=[2, 3]) + max_avgs = torch.max(avgs, dim=1)[0] + scaling_factor = avgs / torch.clamp(max_avgs[..., None], min=1e-6) + softmaxed_maps = softmax_feature_maps(x) + scaled_maps = softmaxed_maps * scaling_factor[..., None, None] + return scaled_maps + + +def softmax_feature_maps(x): + return torch.softmax(x.reshape(x.size(0), x.size(1), -1), 2).view_as(x) + diff --git a/ReadME.md b/ReadME.md new file mode 100644 index 0000000000000000000000000000000000000000..ea7057e74321e137281dfc1e7b4890ca2c193ef0 --- /dev/null +++ b/ReadME.md @@ -0,0 +1,138 @@ +# Q-SENN - Quantized Self-Explaining Neural Networks + +This repository contains the code for the AAAI 2024 paper +[*Q-SENN: Quantized Self-Explaining Neural Network*](https://ojs.aaai.org/index.php/AAAI/article/view/30145) by Thomas +Norrenbrock , +Marco Rudolph, +and Bodo Rosenhahn. +Additonally, the SLDD-model from [*Take 5: +Interpretable Image Classification with a Handful of Features*](https://arxiv.org/pdf/2303.13166) (NeurIPS +Workshop) from the same authors is included. + + +

+ +

+ +--- +Abstract: +>Explanations in Computer Vision are often desired, but most Deep Neural Networks can only provide saliency maps with questionable faithfulness. Self-Explaining Neural Networks (SENN) extract interpretable concepts with fidelity, diversity, and grounding to combine them linearly for decision-making. While they can explain what was recognized, initial realizations lack accuracy and general applicability. We propose the Quantized-Self-Explaining Neural Network Q-SENN. Q-SENN satisfies or exceeds the desiderata of SENN while being applicable to more complex datasets and maintaining most or all of the accuracy of an uninterpretable baseline model, out-performing previous work in all considered metrics. Q-SENN describes the relationship between every class and feature as either positive, negative or neutral instead of an arbitrary number of possible relations, enforcing more binary human-friendly features. Since every class is assigned just 5 interpretable features on average, Q-SENN shows convincing local and global interpretability. Additionally, we propose a feature alignment method, capable of aligning learned features with human language-based concepts without additional supervision. Thus, what is learned can be more easily verbalized. + + + + +--- + +## Installation +You will need the usual libaries for deep learning, e.g. pytorch, +torchvision, numpy, etc. Additionally, we use +[GLM-Saga](https://github.com/MadryLab/glm_saga) that can be installed via pip. +In case you are lazy (or like to spend your time otherwise), a suitable +environment can be created using [Anaconda](https://www.anaconda.com/) and the +provided environment.yml file: +```shell +conda env create -f environment.yml +``` + +## Data +Supported datasets are: +- [Cub2011](https://www.vision.caltech.edu/datasets/cub_200_2011/) +- [StanfordCars](https://ai.stanford.edu/~jkrause/cars/car_dataset.html) +- [TravelingBirds](https://worksheets.codalab.org/bundles/0x518829de2aa440c79cd9d75ef6669f27) +- [ImageNet](https://www.image-net.org/) + +To use the data for training, the datasets have to be downloaded and put into the +respective folder under ~/tmp/datasets such that the final structure looks like + +```shell +~/tmp/datasets +├── CUB200 +│   └── CUB_200_2011 +│   ├── ... +├── StanfordCars +│   ├── stanford_cars +│      ├── ... +├── TravelingBirds +│   ├── CUB_fixed +│      ├── ... +├── imagenet +│   ├── ... +``` + +The default paths could be changed in the dataset_classes or for Imagenet in +get_data.py + +Note: +If cropped images, like for PIP-Net, ProtoPool, etc. are desired, then the +crop_root should be set to a folder containing the cropped images in the +expected structure, obtained by following ProtoTree's instructions: +https://github.com/M-Nauta/ProtoTree/blob/main/README.md#preprocessing-cub, +default path is: PPCUB200 instead of CUB200 for Protopool. Using these images +can be set using an additional flag `--cropGT` introduced later. + + + +## Usage +The code to create a Q-SENN model can be started from the file main.py. +Available parameters are: +- `--dataset`: The dataset to use. Default: Cub2011 +- `--arch`: The backbone to use. Default: resnet50 +- `--model_type`: The model type to use. Default: qsenn +- `--seed`: The seed to use. Default: None +- `--do_dense`: Whether to train the dense model. Default: True +- `--cropGT`: Whether to crop CUB/TravelingBirds based on GT Boundaries. Default: False +- `--n_features`: How many features to select. Default: 50 +- `--n_per_class`: How many features to assign to each class. Default: 5 +- `--img_size`: Image size. Default: 448 +- `--reduced_strides`: Whether to use reduced strides for resnets. Default: False + + +For Example the next command will start the creation of Q-SENN with resnet50 on +StanfordCars using the default arguments in the paper. +```shell +python main.py --dataset StanfordCars +``` + +**Note:** +All experiments on ImageNet in the paper skipped the dense training from +scratch on ImageNet. The pretrained models are used directly. +This can be replicated with the argument --do-dense False. +## Citations +Please cite this work as:\ +Q-SENN +```bibtex +@inproceedings{norrenbrock2024q, + title={Q-senn: Quantized self-explaining neural networks}, + author={Norrenbrock, Thomas and Rudolph, Marco and Rosenhahn, Bodo}, + booktitle={Proceedings of the AAAI Conference on Artificial Intelligence}, + volume={38}, + number={19}, + pages={21482--21491}, + year={2024} +} +``` +SLDD-Model +```bibtex +@inproceedings{norrenbrocktake, + title={Take 5: Interpretable Image Classification with a Handful of Features}, + author={Norrenbrock, Thomas and Rudolph, Marco and Rosenhahn, Bodo}, + year={2022}, + booktitle={Progress and Challenges in Building Trustworthy Embodied AI} +} +``` +## Pretrained Model +One pretrained model for Q-SENN on CUB can be obtained via this link: https://drive.google.com/drive/folders/1agWqKhcWOVWueV4Fzaowr80lQroCJFYn?usp=drive_link +## Acknowledgement +This work was supported by the Federal Ministry of Education and Research (BMBF), Germany under the AI service center KISSKI (grant no. 01IS22093C) and the Deutsche Forschungsgemeinschaft (DFG) under Germany’s Excellence Strategy within the Cluster of Excellence PhoenixD (EXC 2122). +This work was partially supported by Intel Corporation and by the German Federal Ministry +of the Environment, Nature Conservation, Nuclear Safety +and Consumer Protection (GreenAutoML4FAS project no. +67KI32007A). + +The work was done at the Leibniz University Hannover and published at AAAI 2024. + +

+ + + +

diff --git a/__pycache__/get_data.cpython-310.pyc b/__pycache__/get_data.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db7e1ece40eba3119dbfd6595168c1599a3847bb Binary files /dev/null and b/__pycache__/get_data.cpython-310.pyc differ diff --git a/__pycache__/load_model.cpython-310.pyc b/__pycache__/load_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb7d6c778978476de6f9d6d7937a559b3ddfeb6d Binary files /dev/null and b/__pycache__/load_model.cpython-310.pyc differ diff --git a/__pycache__/visualization.cpython-310.pyc b/__pycache__/visualization.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..06e9e4eb229bbc2e8e205d21fd20bcdbbd2dafd1 Binary files /dev/null and b/__pycache__/visualization.cpython-310.pyc differ diff --git a/__pycache__/visualization_gary.cpython-310.pyc b/__pycache__/visualization_gary.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf554ca959630452170764c3a93b0a0c49594f59 Binary files /dev/null and b/__pycache__/visualization_gary.cpython-310.pyc differ diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..f0da29b53801c9dcb729e7ad364f6617dea0e479 --- /dev/null +++ b/app.py @@ -0,0 +1,87 @@ +import gradio as gr +from visualization_gary import* + +# 定义模式选择操作,加载不同的界面 +def choose_mode(selected_mode): + if selected_mode == "get interpretable Result ": + return gr.update(visible=True), gr.update(visible=False), gr.update(visible=False), "in Mode 1" + elif selected_mode == "To give Feedbacks": + return gr.update(visible=False), gr.update(visible=True), gr.update(visible=False), "in Mode 2" + else: + return gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), "please choose a mode" + +# clear if reupload +def reset_state_mod1(): + return None, None, "State has been reset." + +def reset_state_mod2(): + return None, None,None,gr.update(interactive=True),gr.update(interactive=True),gr.update(interactive=True),gr.update(interactive=True),gr.update(interactive=True) + +# 主界面,包含模式选择和每种模式的界面 +with gr.Blocks() as demo: + gr.Markdown("# please choose a mode") + + # 模式选择器 + mode_selector = gr.Radio(["get interpretable Result ", "To give Feedbacks"], label="Mode Selection") + + # 模式1界面 + with gr.Column(visible=False) as mode1_ui: + gr.Markdown("### Please keep the object in the center of your image, click the Button to get interpretebale Result") + mode1_button = gr.Button("get interpretable Classification") + mode1_input_img=gr.Image() + mode1_output_img= gr.Image() + mode1_output_txt=gr.Markdown() + + #clear if reupload + mode1_state = gr.State() + mode1_input_img.upload(fn=reset_state_mod1, outputs=[mode1_output_img, mode1_state, mode1_output_txt]) + #clear if reupload + + + mode1_button.click(fn=direct_inference,inputs=mode1_input_img, outputs=[mode1_output_img,mode1_output_txt]) + + # 模式2界面 + with gr.Column(visible=False) as mode2_ui: + gr.Markdown("### Please keep the object in the center of your image, click the Button 'Get some interpretable Features' to get options") + image_input=gr.Image() + gallery_output = gr.Gallery(label="Initial Label") + text_output=gr.Markdown() + but_generate=gr.Button("Get some interpretable Features") + but_feedback_A=gr.Button("A") + but_feedback_B=gr.Button("B") + but_feedback_C=gr.Button("C") + but_feedback_D=gr.Button("D") + + key_op = gr.State() + A= gr.State("A") + B= gr.State("B") + C= gr.State("C") + D= gr.State("D") + + #clear if reupload + image_input.upload( + fn=reset_state_mod2, + outputs=[gallery_output, key_op, text_output,but_generate,but_feedback_A,but_feedback_B,but_feedback_C,but_feedback_D] + ) + + #clear if reupload + + + + but_generate.click(fn=get_features_on_interface, inputs=image_input, outputs=[gallery_output,key_op,text_output,but_generate]) + + but_feedback_A.click(fn=post_next_image, inputs=[A,key_op], outputs=[text_output,but_feedback_A,but_feedback_B,but_feedback_C,but_feedback_D]) + but_feedback_B.click(fn=post_next_image, inputs=[B,key_op], outputs=[text_output,but_feedback_A,but_feedback_B,but_feedback_C,but_feedback_D]) + but_feedback_C.click(fn=post_next_image, inputs=[C,key_op], outputs=[text_output,but_feedback_A,but_feedback_B,but_feedback_C,but_feedback_D]) + but_feedback_D.click(fn=post_next_image, inputs=[D,key_op], outputs=[text_output,but_feedback_A,but_feedback_B,but_feedback_C,but_feedback_D]) + # but_feedback_B.click(fn=post_next_image, inputs=image_list, outputs=[image_list,image_output,text_output]) + + + + # 状态输出 + status_output = gr.Textbox(label="Status") + + # 选择器点击事件绑定 + mode_selector.change(choose_mode, inputs=mode_selector, outputs=[mode1_ui, mode2_ui, status_output]) + +demo.launch() diff --git a/architectures/FinalLayer.py b/architectures/FinalLayer.py new file mode 100644 index 0000000000000000000000000000000000000000..af1a55a667c462ec8f256f9d28aefdc5e77d6cae --- /dev/null +++ b/architectures/FinalLayer.py @@ -0,0 +1,36 @@ +import torch +from torch import nn + +from architectures.SLDDLevel import SLDDLevel + + +class FinalLayer(): + def __init__(self, num_classes, n_features): + super().__init__() + self.avgpool = torch.nn.AdaptiveAvgPool2d((1, 1)) + self.linear = nn.Linear(n_features, num_classes) + self.featureDropout = torch.nn.Dropout(0.2) + self.selection = None + + def transform_output(self, feature_maps, with_feature_maps, + with_final_features): + if self.selection is not None: + feature_maps = feature_maps[:, self.selection] + x = self.avgpool(feature_maps) + pre_out = torch.flatten(x, 1) + final_features = self.featureDropout(pre_out) + final = self.linear(final_features) + final = [final] + if with_feature_maps: + final.append(feature_maps) + if with_final_features: + final.append(final_features) + if len(final) == 1: + final = final[0] + return final + + + def set_model_sldd(self, selection, weight, mean, std, bias = None): + self.selection = selection + self.linear = SLDDLevel(selection, weight, mean, std, bias) + self.featureDropout = torch.nn.Dropout(0.1) \ No newline at end of file diff --git a/architectures/SLDDLevel.py b/architectures/SLDDLevel.py new file mode 100644 index 0000000000000000000000000000000000000000..bc214c88f384690d29bda97d7ed82a8c01e866da --- /dev/null +++ b/architectures/SLDDLevel.py @@ -0,0 +1,37 @@ +import torch.nn + + +class SLDDLevel(torch.nn.Module): + def __init__(self, selection, weight_at_selection,mean, std, bias=None): + super().__init__() + self.register_buffer('selection', torch.tensor(selection, dtype=torch.long)) + num_classes, n_features = weight_at_selection.shape + selected_mean = mean + selected_std = std + if len(selected_mean) != len(selection): + selected_mean = selected_mean[selection] + selected_std = selected_std[selection] + self.mean = torch.nn.Parameter(selected_mean) + self.std = torch.nn.Parameter(selected_std) + if bias is not None: + self.layer = torch.nn.Linear(n_features, num_classes) + self.layer.bias = torch.nn.Parameter(bias, requires_grad=False) + else: + self.layer = torch.nn.Linear(n_features, num_classes, bias=False) + self.layer.weight = torch.nn.Parameter(weight_at_selection, requires_grad=False) + + @property + def weight(self): + return self.layer.weight + + @property + def bias(self): + if self.layer.bias is None: + return torch.zeros(self.layer.out_features) + else: + return self.layer.bias + + + def forward(self, input): + input = (input - self.mean) / torch.clamp(self.std, min=1e-6) + return self.layer(input) diff --git a/architectures/__pycache__/FinalLayer.cpython-310.pyc b/architectures/__pycache__/FinalLayer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a61e00b4a63ecca65185c2f0157f001dbead798 Binary files /dev/null and b/architectures/__pycache__/FinalLayer.cpython-310.pyc differ diff --git a/architectures/__pycache__/SLDDLevel.cpython-310.pyc b/architectures/__pycache__/SLDDLevel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a5d50a5be90c92f0840009d5a78ce7a4a4821df Binary files /dev/null and b/architectures/__pycache__/SLDDLevel.cpython-310.pyc differ diff --git a/architectures/__pycache__/model_mapping.cpython-310.pyc b/architectures/__pycache__/model_mapping.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..515201d4169359545fb87981950d8c22a4181b40 Binary files /dev/null and b/architectures/__pycache__/model_mapping.cpython-310.pyc differ diff --git a/architectures/__pycache__/resnet.cpython-310.pyc b/architectures/__pycache__/resnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93dccc8da3700ae7fdcbfe887f868ef16d935b95 Binary files /dev/null and b/architectures/__pycache__/resnet.cpython-310.pyc differ diff --git a/architectures/__pycache__/utils.cpython-310.pyc b/architectures/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a224141ff65b31e88257f416c3f7bfc2aaafbfa Binary files /dev/null and b/architectures/__pycache__/utils.cpython-310.pyc differ diff --git a/architectures/model_mapping.py b/architectures/model_mapping.py new file mode 100644 index 0000000000000000000000000000000000000000..52df91009029b653b420ff03562616b2389eaa68 --- /dev/null +++ b/architectures/model_mapping.py @@ -0,0 +1,7 @@ +from architectures.resnet import resnet50 + + +def get_model(arch, num_classes, changed_strides=True): + if arch == "resnet50": + model = resnet50(True, num_classes=num_classes, changed_strides=changed_strides) + return model \ No newline at end of file diff --git a/architectures/resnet.py b/architectures/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..eaaa5d3c22e6ab85f9ac63b29462d20aec9594d3 --- /dev/null +++ b/architectures/resnet.py @@ -0,0 +1,420 @@ +import copy +import time + +import torch +import torch.nn as nn +from torch.hub import load_state_dict_from_url +from torchvision.models import get_model + +# from scripts.modelExtensions.crossModelfunctions import init_experiment_stuff + + + +__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101', + 'resnet152', 'resnext50_32x4d', 'resnext101_32x8d', + 'wide_resnet50_2', 'wide_resnet101_2', + 'wide_resnet50_3', 'wide_resnet50_4', 'wide_resnet50_5', + 'wide_resnet50_6', ] + +from architectures.FinalLayer import FinalLayer +from architectures.utils import SequentialWithArgs + +model_urls = { + 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth', + 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth', + 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth', + 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth', + 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth', + 'resnext50_32x4d': 'https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth', + 'resnext101_32x8d': 'https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth', + 'wide_resnet50_2': 'https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth', + 'wide_resnet101_2': 'https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth', +} + + +def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=dilation, groups=groups, bias=False, dilation=dilation) + + +def conv1x1(in_planes, out_planes, stride=1): + """1x1 convolution""" + return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) + + +class BasicBlock(nn.Module): + expansion = 1 + __constants__ = ['downsample'] + + def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1, + base_width=64, dilation=1, norm_layer=None, features=None): + super(BasicBlock, self).__init__() + if norm_layer is None: + norm_layer = nn.BatchNorm2d + if groups != 1 or base_width != 64: + raise ValueError('BasicBlock only supports groups=1 and base_width=64') + if dilation > 1: + raise NotImplementedError("Dilation > 1 not supported in BasicBlock") + # Both self.conv1 and self.downsample layers downsample the input when stride != 1 + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = norm_layer(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = norm_layer(planes) + self.downsample = downsample + self.stride = stride + + + def forward(self, x, no_relu=False): + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + + + out += identity + + if no_relu: + return out + return self.relu(out) + + +class Bottleneck(nn.Module): + expansion = 4 + __constants__ = ['downsample'] + + def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1, + base_width=64, dilation=1, norm_layer=None, features=None): + super(Bottleneck, self).__init__() + if norm_layer is None: + norm_layer = nn.BatchNorm2d + width = int(planes * (base_width / 64.)) * groups + # Both self.conv2 and self.downsample layers downsample the input when stride != 1 + self.conv1 = conv1x1(inplanes, width) + self.bn1 = norm_layer(width) + self.conv2 = conv3x3(width, width, stride, groups, dilation) + self.bn2 = norm_layer(width) + if features is None: + self.conv3 = conv1x1(width, planes * self.expansion) + self.bn3 = norm_layer(planes * self.expansion) + else: + self.conv3 = conv1x1(width, features) + self.bn3 = norm_layer(features) + + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x, no_relu=False, early_exit=False): + identity = x + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + if no_relu: + return out + return self.relu(out) + + +class ResNet(nn.Module, FinalLayer): + + def __init__(self, block, layers, num_classes=1000, zero_init_residual=False, + groups=1, width_per_group=64, replace_stride_with_dilation=None, + norm_layer=None, changed_strides=False,): + super(ResNet, self).__init__() + if norm_layer is None: + norm_layer = nn.BatchNorm2d + self._norm_layer = norm_layer + widths = [64, 128, 256, 512] + self.inplanes = 64 + self.dilation = 1 + if replace_stride_with_dilation is None: + # each element in the tuple indicates if we should replace + # the 2x2 stride with a dilated convolution instead + replace_stride_with_dilation = [False, False, False] + if len(replace_stride_with_dilation) != 3: + raise ValueError("replace_stride_with_dilation should be None " + "or a 3-element tuple, got {}".format(replace_stride_with_dilation)) + self.groups = groups + self.base_width = width_per_group + self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3, + bias=False) + self.bn1 = norm_layer(self.inplanes) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2, + dilate=replace_stride_with_dilation[0]) + self.sstride = 2 + if changed_strides: + self.sstride = 1 + self.layer3 = self._make_layer(block, 256, layers[2], stride=self.sstride, + dilate=replace_stride_with_dilation[1]) + self.stride = 2 + + if changed_strides: + self.stride = 1 + self.layer4 = self._make_layer(block, 512, layers[3], stride=self.stride, + dilate=replace_stride_with_dilation[2]) + FinalLayer.__init__(self, num_classes, 512 * block.expansion) + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + # Zero-initialize the last BN in each residual branch, + # so that the residual branch starts with zeros, and each residual block behaves like an identity. + # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 + if zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + nn.init.constant_(m.bn3.weight, 0) + elif isinstance(m, BasicBlock): + nn.init.constant_(m.bn2.weight, 0) + + def _make_layer(self, block, planes, blocks, stride=1, dilate=False, last_block_f=None): + norm_layer = self._norm_layer + downsample = None + previous_dilation = self.dilation + if dilate: + self.dilation *= stride + stride = 1 + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + conv1x1(self.inplanes, planes * block.expansion, stride), + norm_layer(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample, self.groups, + self.base_width, previous_dilation, norm_layer)) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + krepeep = None + if last_block_f is not None and _ == blocks - 1: + krepeep = last_block_f + layers.append(block(self.inplanes, planes, groups=self.groups, + base_width=self.base_width, dilation=self.dilation, + norm_layer=norm_layer, features=krepeep)) + + return SequentialWithArgs(*layers) + + def _forward(self, x, with_feature_maps=False, with_final_features=False): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + feature_maps = self.layer4(x, no_relu=True) + feature_maps = torch.functional.F.relu(feature_maps) + return self.transform_output( feature_maps, with_feature_maps, + with_final_features) + + # Allow for accessing forward method in a inherited class + forward = _forward + + +def _resnet(arch, block, layers, pretrained, progress, **kwargs): + model = ResNet(block, layers, **kwargs) + if pretrained: + state_dict = load_state_dict_from_url(model_urls[arch], + progress=progress) + if kwargs["num_classes"] == 1000: + state_dict["linear.weight"] = state_dict["fc.weight"] + state_dict["linear.bias"] = state_dict["fc.bias"] + model.load_state_dict(state_dict, strict=False) + return model + + +def resnet18(pretrained=False, progress=True, **kwargs): + r"""ResNet-18 model from + `"Deep Residual Learning for Image Recognition" `_ + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + return _resnet('resnet18', BasicBlock, [2, 2, 2, 2], pretrained, progress, + **kwargs) + + +def resnet34(pretrained=False, progress=True, **kwargs): + r"""ResNet-34 model from + `"Deep Residual Learning for Image Recognition" `_ + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + return _resnet('resnet34', BasicBlock, [3, 4, 6, 3], pretrained, progress, + **kwargs) + + +def resnet50(pretrained=False, progress=True, **kwargs): + r"""ResNet-50 model from + `"Deep Residual Learning for Image Recognition" `_ + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + return _resnet('resnet50', Bottleneck, [3, 4, 6, 3], pretrained, progress, + **kwargs) + + +def resnet101(pretrained=False, progress=True, **kwargs): + r"""ResNet-101 model from + `"Deep Residual Learning for Image Recognition" `_ + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + return _resnet('resnet101', Bottleneck, [3, 4, 23, 3], pretrained, progress, + **kwargs) + + +def resnet152(pretrained=False, progress=True, **kwargs): + r"""ResNet-152 model from + `"Deep Residual Learning for Image Recognition" `_ + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + return _resnet('resnet152', Bottleneck, [3, 8, 36, 3], pretrained, progress, + **kwargs) + + +def resnext50_32x4d(pretrained=False, progress=True, **kwargs): + r"""ResNeXt-50 32x4d model from + `"Aggregated Residual Transformation for Deep Neural Networks" `_ + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['groups'] = 32 + kwargs['width_per_group'] = 4 + return _resnet('resnext50_32x4d', Bottleneck, [3, 4, 6, 3], + pretrained, progress, **kwargs) + + +def resnext101_32x8d(pretrained=False, progress=True, **kwargs): + r"""ResNeXt-101 32x8d model from + `"Aggregated Residual Transformation for Deep Neural Networks" `_ + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['groups'] = 32 + kwargs['width_per_group'] = 8 + return _resnet('resnext101_32x8d', Bottleneck, [3, 4, 23, 3], + pretrained, progress, **kwargs) + + +def wide_resnet50_2(pretrained=False, progress=True, **kwargs): + r"""Wide ResNet-50-2 model from + `"Wide Residual Networks" `_ + + The model is the same as ResNet except for the bottleneck number of channels + which is twice larger in every block. The number of channels in outer 1x1 + convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048 + channels, and in Wide ResNet-50-2 has 2048-1024-2048. + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['width_per_group'] = 64 * 2 + return _resnet('wide_resnet50_2', Bottleneck, [3, 4, 6, 3], + pretrained, progress, **kwargs) + + +def wide_resnet50_3(pretrained=False, progress=True, **kwargs): + r"""Wide ResNet-50-3 model + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['width_per_group'] = 64 * 3 + return _resnet('wide_resnet50_3', Bottleneck, [3, 4, 6, 3], + pretrained, progress, **kwargs) + + +def wide_resnet50_4(pretrained=False, progress=True, **kwargs): + r"""Wide ResNet-50-4 model + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['width_per_group'] = 64 * 4 + return _resnet('wide_resnet50_4', Bottleneck, [3, 4, 6, 3], + pretrained, progress, **kwargs) + + +def wide_resnet50_5(pretrained=False, progress=True, **kwargs): + r"""Wide ResNet-50-5 model + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['width_per_group'] = 64 * 5 + return _resnet('wide_resnet50_5', Bottleneck, [3, 4, 6, 3], + pretrained, progress, **kwargs) + + +def wide_resnet50_6(pretrained=False, progress=True, **kwargs): + r"""Wide ResNet-50-6 model + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['width_per_group'] = 64 * 6 + return _resnet('wide_resnet50_6', Bottleneck, [3, 4, 6, 3], + pretrained, progress, **kwargs) + + +def wide_resnet101_2(pretrained=False, progress=True, **kwargs): + r"""Wide ResNet-101-2 model from + `"Wide Residual Networks" `_ + + The model is the same as ResNet except for the bottleneck number of channels + which is twice larger in every block. The number of channels in outer 1x1 + convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048 + channels, and in Wide ResNet-50-2 has 2048-1024-2048. + + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + progress (bool): If True, displays a progress bar of the download to stderr + """ + kwargs['width_per_group'] = 64 * 2 + return _resnet('wide_resnet101_2', Bottleneck, [3, 4, 23, 3], + pretrained, progress, **kwargs) diff --git a/architectures/utils.py b/architectures/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ed4cc78fc2799c675098bc73f0a9fb1719fb64b1 --- /dev/null +++ b/architectures/utils.py @@ -0,0 +1,17 @@ +import torch + + + +class SequentialWithArgs(torch.nn.Sequential): + def forward(self, input, *args, **kwargs): + vs = list(self._modules.values()) + l = len(vs) + for i in range(l): + if i == l-1: + input = vs[i](input, *args, **kwargs) + else: + input = vs[i](input) + return input + + + diff --git a/configs/__pycache__/dataset_params.cpython-310.pyc b/configs/__pycache__/dataset_params.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f91c9298daab0217462fe2590a20695b80575e85 Binary files /dev/null and b/configs/__pycache__/dataset_params.cpython-310.pyc differ diff --git a/configs/__pycache__/optim_params.cpython-310.pyc b/configs/__pycache__/optim_params.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0b3f696299233ff7e5b5d5936ea47d83c8f8b97 Binary files /dev/null and b/configs/__pycache__/optim_params.cpython-310.pyc differ diff --git a/configs/architecture_params.py b/configs/architecture_params.py new file mode 100644 index 0000000000000000000000000000000000000000..21a5f2b7fb72dfbee2b4487cabca2d2e840ad938 --- /dev/null +++ b/configs/architecture_params.py @@ -0,0 +1 @@ +architecture_params = {"resnet50": {"beta":0.196}} \ No newline at end of file diff --git a/configs/dataset_params.py b/configs/dataset_params.py new file mode 100644 index 0000000000000000000000000000000000000000..3f227da674de51bf3c4ac0fe3a8faff2004775a6 --- /dev/null +++ b/configs/dataset_params.py @@ -0,0 +1,22 @@ +import torch + +from configs.optim_params import EvaluatedDict + +dataset_constants = {"CUB2011":{"num_classes":200}, + "TravelingBirds":{"num_classes":200}, + "ImageNet":{"num_classes":1000}, + "StanfordCars":{"num_classes":196}, + "FGVCAircraft": {"num_classes":100}} + +normalize_params = {"CUB2011":{"mean": torch.tensor([0.4853, 0.4964, 0.4295]),"std":torch.tensor([0.2300, 0.2258, 0.2625])}, +"TravelingBirds":{"mean": torch.tensor([0.4584, 0.4369, 0.3957]),"std":torch.tensor([0.2610, 0.2569, 0.2722])}, + "ImageNet":{'mean': torch.tensor([0.485, 0.456, 0.406]),'std': torch.tensor([0.229, 0.224, 0.225])} , +"StanfordCars":{'mean': torch.tensor([0.4593, 0.4466, 0.4453]),'std': torch.tensor([0.2920, 0.2910, 0.2988])} , + "FGVCAircraft":{'mean': torch.tensor([0.4827, 0.5130, 0.5352]), + 'std': torch.tensor([0.2236, 0.2170, 0.2478]),} + } + + +dense_batch_size = EvaluatedDict({False: 16,True: 1024,}, lambda x: x == "ImageNet") + +ft_batch_size = EvaluatedDict({False: 16,True: 1024,}, lambda x: x == "ImageNet")# Untested \ No newline at end of file diff --git a/configs/optim_params.py b/configs/optim_params.py new file mode 100644 index 0000000000000000000000000000000000000000..c0fad011caec798b4d51948b28b4d0885c414b59 --- /dev/null +++ b/configs/optim_params.py @@ -0,0 +1,22 @@ +# order: lr,weight_decay, step_lr, step_lr_gamma +import math + + +class EvaluatedDict: + def __init__(self, d, func): + self.dict = d + self.func = func + + def __getitem__(self, key): + return self.dict[self.func(key)] + +dense_params = EvaluatedDict({False: [0.005, 0.0005, 30, 0.4, 150],True: [None,None,None,None,None],}, lambda x: x == "ImageNet") +def calculate_lr_from_args( epochs, step_lr, start_lr, step_lr_decay): + # Gets the final learning rate after dense training with step_lr_schedule. + n_steps = math.floor((epochs - step_lr) / step_lr) + final_lr = start_lr * step_lr_decay ** n_steps + return final_lr + +ft_params =EvaluatedDict({False: [1e-4, 0.0005, 10, 0.4, 40],True:[[calculate_lr_from_args(150,30,0.005, 0.4), 0.0005, 10, 0.4, 40]]}, lambda x: x == "ImageNet") + + diff --git a/configs/qsenn_training_params.py b/configs/qsenn_training_params.py new file mode 100644 index 0000000000000000000000000000000000000000..5ca03c994ee04c47016c89357ff5d4953f634281 --- /dev/null +++ b/configs/qsenn_training_params.py @@ -0,0 +1,11 @@ +from configs.sldd_training_params import OptimizationScheduler + + +class QSENNScheduler(OptimizationScheduler): + def get_params(self): + params = super().get_params() + if self.n_calls >= 2: + params[0] = params[0] * 0.9**(self.n_calls-2) + if 2 <= self.n_calls <= 4: + params[-2] = 10# Change num epochs to 10 for iterative finetuning + return params diff --git a/configs/sldd_training_params.py b/configs/sldd_training_params.py new file mode 100644 index 0000000000000000000000000000000000000000..5a605602a1a399d0dd55e1f53d8cbaa8c5d73dc0 --- /dev/null +++ b/configs/sldd_training_params.py @@ -0,0 +1,17 @@ +from configs.optim_params import dense_params, ft_params + + +class OptimizationScheduler: + def __init__(self, dataset): + self.dataset = dataset + self.n_calls = 0 + + + def get_params(self): + if self.n_calls == 0: # Return Deńse Params + params = dense_params[self.dataset]+ [False] + else: # Return Finetuning Params + params = ft_params[self.dataset]+ [True] + self.n_calls += 1 + return params + diff --git a/dataset_classes/__pycache__/cub200.cpython-310.pyc b/dataset_classes/__pycache__/cub200.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e0798cd4897e3f59ad5b5072688ca33d8bc4b31e Binary files /dev/null and b/dataset_classes/__pycache__/cub200.cpython-310.pyc differ diff --git a/dataset_classes/__pycache__/stanfordcars.cpython-310.pyc b/dataset_classes/__pycache__/stanfordcars.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e54b5188bd94de8a82b0639237f4ddf557cd52a Binary files /dev/null and b/dataset_classes/__pycache__/stanfordcars.cpython-310.pyc differ diff --git a/dataset_classes/__pycache__/travelingbirds.cpython-310.pyc b/dataset_classes/__pycache__/travelingbirds.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7cc7e30e2027a3730c1873443049962b75998fb0 Binary files /dev/null and b/dataset_classes/__pycache__/travelingbirds.cpython-310.pyc differ diff --git a/dataset_classes/__pycache__/utils.cpython-310.pyc b/dataset_classes/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5bb96db5a1c1e6f6cd7e6efc268cffd6d6e0004b Binary files /dev/null and b/dataset_classes/__pycache__/utils.cpython-310.pyc differ diff --git a/dataset_classes/cub200.py b/dataset_classes/cub200.py new file mode 100644 index 0000000000000000000000000000000000000000..f7557c74fce35d23d2e2cd04a923c4a81dd6e238 --- /dev/null +++ b/dataset_classes/cub200.py @@ -0,0 +1,96 @@ +# Dataset should lie under /root/ +# root is currently set to ~/tmp/Datasets/CUB200 +# If cropped iamges, like for PIP-Net, ProtoPool, etc. are used, then the crop_root should be set to a folder containing the +# cropped images in the expected structure, obtained by following ProtoTree's instructions. +# https://github.com/M-Nauta/ProtoTree/blob/main/README.md#preprocessing-cub +import os +from pathlib import Path + +import numpy as np +import pandas as pd +from torch.utils.data import Dataset +from torchvision.datasets.folder import default_loader + +from dataset_classes.utils import txt_load + + +class CUB200Class(Dataset): + root = Path.home() / "tmp/Datasets/CUB200" + crop_root = Path.home() / "tmp/Datasets/PPCUB200" + base_folder = 'CUB_200_2011/images' + def __init__(self, train, transform, crop=True): + self.train = train + self.transform = transform + self.crop = crop + self._load_metadata() + self.loader = default_loader + + if crop: + self.adapt_to_crop() + + def _load_metadata(self): + images = pd.read_csv(os.path.join(self.root, 'CUB_200_2011', 'images.txt'), sep=' ', + names=['img_id', 'filepath']) + image_class_labels = pd.read_csv(os.path.join(self.root, 'CUB_200_2011', 'image_class_labels.txt'), + sep=' ', names=['img_id', 'target']) + train_test_split = pd.read_csv(os.path.join(self.root, 'CUB_200_2011', 'train_test_split.txt'), + sep=' ', names=['img_id', 'is_training_img']) + data = images.merge(image_class_labels, on='img_id') + self.data = data.merge(train_test_split, on='img_id') + if self.train: + self.data = self.data[self.data.is_training_img == 1] + else: + self.data = self.data[self.data.is_training_img == 0] + + def __len__(self): + return len(self.data) + + def adapt_to_crop(self): + # ds_name = [x for x in self.cropped_dict.keys() if x in self.root][0] + self.root = self.crop_root + folder_name = "train" if self.train else "test" + folder_name = folder_name + "_cropped" + self.base_folder = 'CUB_200_2011/' + folder_name + + def __getitem__(self, idx): + sample = self.data.iloc[idx] + path = os.path.join(self.root, self.base_folder, sample.filepath) + target = sample.target - 1 # Targets start at 1 by default, so shift to 0 + img = self.loader(path) + img = self.transform(img) + return img, target + + @classmethod + def get_image_attribute_labels(self, train=False): + image_attribute_labels = pd.read_csv( + os.path.join('/home/qixuan/tmp/Datasets/CUB200', 'CUB_200_2011', "attributes", + 'image_attribute_labels.txt'), + sep=' ', names=['img_id', 'attribute', "is_present", "certainty", "time"], on_bad_lines="skip") + train_test_split = pd.read_csv(os.path.join(self.root, 'CUB_200_2011', 'train_test_split.txt'), + sep=' ', names=['img_id', 'is_training_img']) + merged = image_attribute_labels.merge(train_test_split, on="img_id") + filtered_data = merged[merged["is_training_img"] == train] + return filtered_data + + + @staticmethod + def filter_attribute_labels(labels, min_certainty=3): + is_invisible_present = labels[labels["certainty"] == 1]["is_present"].sum() + if is_invisible_present != 0: + raise ValueError("Invisible present") + labels["img_id"] -= min(labels["img_id"]) + labels["img_id"] = fillholes_in_array(labels["img_id"]) + labels[labels["certainty"] == 1]["certainty"] = 4 + labels = labels[labels["certainty"] >= min_certainty] + labels["attribute"] -= min(labels["attribute"]) + labels = labels[["img_id", "attribute", "is_present"]] + labels["is_present"] = labels["is_present"].astype(bool) + return labels + + + +def fillholes_in_array(array): + unique_values = np.unique(array) + mapping = {x: i for i, x in enumerate(unique_values)} + array = array.map(mapping) + return array diff --git a/dataset_classes/stanfordcars.py b/dataset_classes/stanfordcars.py new file mode 100644 index 0000000000000000000000000000000000000000..0be682a5d164a8b39cff5bd9cca82cc8cf5ebe53 --- /dev/null +++ b/dataset_classes/stanfordcars.py @@ -0,0 +1,121 @@ +import pathlib +from typing import Callable, Optional, Any, Tuple + +import numpy as np +import pandas as pd +from PIL import Image +from torchvision.datasets import VisionDataset +from torchvision.datasets.utils import download_and_extract_archive, download_url + + +class StanfordCarsClass(VisionDataset): + """`Stanford Cars `_ Dataset + + The Cars dataset contains 16,185 images of 196 classes of cars. The data is + split into 8,144 training images and 8,041 testing images, where each class + has been split roughly in a 50-50 split + + .. note:: + + This class needs `scipy `_ to load target files from `.mat` format. + + Args: + root (string): Root directory of dataset + split (string, optional): The dataset split, supports ``"train"`` (default) or ``"test"``. + transform (callable, optional): A function/transform that takes in an PIL image + and returns a transformed version. E.g, ``transforms.RandomCrop`` + target_transform (callable, optional): A function/transform that takes in the + target and transforms it. + download (bool, optional): If True, downloads the dataset from the internet and + puts it in root directory. If dataset is already downloaded, it is not + downloaded again.""" + root = pathlib.Path.home() / "tmp" / "Datasets" / "StanfordCars" + def __init__( + self, + train: bool = True, + transform: Optional[Callable] = None, + target_transform: Optional[Callable] = None, + download: bool = True, + ) -> None: + + try: + import scipy.io as sio + except ImportError: + raise RuntimeError("Scipy is not found. This dataset needs to have scipy installed: pip install scipy") + + super().__init__(self.root, transform=transform, target_transform=target_transform) + + self.train = train + self._base_folder = pathlib.Path(self.root) / "stanford_cars" + devkit = self._base_folder / "devkit" + + if train: + self._annotations_mat_path = devkit / "cars_train_annos.mat" + self._images_base_path = self._base_folder / "cars_train" + else: + self._annotations_mat_path = self._base_folder / "cars_test_annos_withlabels.mat" + self._images_base_path = self._base_folder / "cars_test" + + if download: + self.download() + + if not self._check_exists(): + raise RuntimeError("Dataset not found. You can use download=True to download it") + + self.samples = [ + ( + str(self._images_base_path / annotation["fname"]), + annotation["class"] - 1, # Original target mapping starts from 1, hence -1 + ) + for annotation in sio.loadmat(self._annotations_mat_path, squeeze_me=True)["annotations"] + ] + self.targets = np.array([x[1] for x in self.samples]) + self.classes = sio.loadmat(str(devkit / "cars_meta.mat"), squeeze_me=True)["class_names"].tolist() + self.class_to_idx = {cls: i for i, cls in enumerate(self.classes)} + + def __len__(self) -> int: + return len(self.samples) + + def __getitem__(self, idx: int) -> Tuple[Any, Any]: + """Returns pil_image and class_id for given index""" + image_path, target = self.samples[idx] + pil_image = Image.open(image_path).convert("RGB") + + if self.transform is not None: + pil_image = self.transform(pil_image) + if self.target_transform is not None: + target = self.target_transform(target) + return pil_image, target + + def download(self) -> None: + if self._check_exists(): + return + + download_and_extract_archive( + url="https://ai.stanford.edu/~jkrause/cars/car_devkit.tgz", + download_root=str(self._base_folder), + md5="c3b158d763b6e2245038c8ad08e45376", + ) + if self.train: + download_and_extract_archive( + url="https://ai.stanford.edu/~jkrause/car196/cars_train.tgz", + download_root=str(self._base_folder), + md5="065e5b463ae28d29e77c1b4b166cfe61", + ) + else: + download_and_extract_archive( + url="https://ai.stanford.edu/~jkrause/car196/cars_test.tgz", + download_root=str(self._base_folder), + md5="4ce7ebf6a94d07f1952d94dd34c4d501", + ) + download_url( + url="https://ai.stanford.edu/~jkrause/car196/cars_test_annos_withlabels.mat", + root=str(self._base_folder), + md5="b0a2b23655a3edd16d84508592a98d10", + ) + + def _check_exists(self) -> bool: + if not (self._base_folder / "devkit").is_dir(): + return False + + return self._annotations_mat_path.exists() and self._images_base_path.is_dir() diff --git a/dataset_classes/travelingbirds.py b/dataset_classes/travelingbirds.py new file mode 100644 index 0000000000000000000000000000000000000000..551ce1fd46b9b84e572ea18f4adc6ecd73cea00d --- /dev/null +++ b/dataset_classes/travelingbirds.py @@ -0,0 +1,59 @@ +# TravelingBirds dataset needs to be downloaded from https://worksheets.codalab.org/bundles/0x518829de2aa440c79cd9d75ef6669f27 +# as it comes from https://github.com/yewsiang/ConceptBottleneck +import os +from pathlib import Path + +import numpy as np +import pandas as pd + +from dataset_classes.cub200 import CUB200Class +from dataset_classes.utils import index_list_with_sorting, mask_list + + +class TravelingBirds(CUB200Class): + init_base_folder = 'CUB_fixed' + root = Path.home() / "tmp/Datasets/TravelingBirds" + crop_root = Path.home() / "tmp/Datasets/PPTravelingBirds" + def get_all_samples_dir(self, dir): + + self.base_folder = os.path.join(self.init_base_folder, dir) + main_dir = Path(self.root) / self.init_base_folder / dir + return self.get_all_sample(main_dir) + + def adapt_to_crop(self): + self.root = self.crop_root + folder_name = "train" if self.train else "test" + folder_name = folder_name + "_cropped" + self.base_folder = 'CUB_fixed/' + folder_name + + def get_all_sample(self, dir): + answer = [] + for i, sub_dir in enumerate(sorted(os.listdir(dir))): + class_dir = dir / sub_dir + for single_img in os.listdir(class_dir): + answer.append([Path(sub_dir) / single_img, i + 1]) + return answer + def _load_metadata(self): + train_test_split = pd.read_csv( + os.path.join(Path(self.root).parent / "CUB200", 'CUB_200_2011', 'train_test_split.txt'), + sep=' ', names=['img_id', 'is_training_img']) + data = pd.read_csv( + os.path.join(Path(self.root).parent / "CUB200", 'CUB_200_2011', 'images.txt'), + sep=' ', names=['img_id', "path"]) + img_dict = {x[1]: x[0] for x in data.values} + # TravelingBirds has all train+test images in both folders, just with different backgrounds. + # They are separated by train_test_split of CUB200. + if self.train: + samples = self.get_all_samples_dir("train") + mask = train_test_split["is_training_img"] == 1 + else: + samples = self.get_all_samples_dir("test") + mask = train_test_split["is_training_img"] == 0 + ids = np.array([img_dict[str(x[0])] for x in samples]) + sorted = np.argsort(ids) + samples = index_list_with_sorting(samples, sorted) + samples = mask_list(samples, mask) + filepaths = [x[0] for x in samples] + labels = [x[1] for x in samples] + samples = pd.DataFrame({"filepath": filepaths, "target": labels}) + self.data = samples diff --git a/dataset_classes/utils.py b/dataset_classes/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5f0039ba93d0966230da88f2568d02bb7cebeebf --- /dev/null +++ b/dataset_classes/utils.py @@ -0,0 +1,16 @@ +def index_list_with_sorting(list_to_sort, sorting_list): + answer = [] + for entry in sorting_list: + answer.append(list_to_sort[entry]) + return answer + + +def mask_list(list_input, mask): + return [x for i, x in enumerate(list_input) if mask[i]] + + +def txt_load(filename): + with open(filename, 'r') as f: + data = f.read() + return data + diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000000000000000000000000000000000000..e4e2f7b3680115f3e38c80511baede60fda0db03 --- /dev/null +++ b/environment.yml @@ -0,0 +1,117 @@ +name: QSENNEnv +channels: + - pytorch + - nvidia + - defaults +dependencies: + - _libgcc_mutex=0.1=main + - _openmp_mutex=5.1=1_gnu + - blas=1.0=mkl + - brotli-python=1.0.9=py310h6a678d5_7 + - bzip2=1.0.8=h7b6447c_0 + - ca-certificates=2023.12.12=h06a4308_0 + - certifi=2023.11.17=py310h06a4308_0 + - cffi=1.16.0=py310h5eee18b_0 + - charset-normalizer=2.0.4=pyhd3eb1b0_0 + - cryptography=41.0.7=py310hdda0065_0 + - cuda-cudart=12.1.105=0 + - cuda-cupti=12.1.105=0 + - cuda-libraries=12.1.0=0 + - cuda-nvrtc=12.1.105=0 + - cuda-nvtx=12.1.105=0 + - cuda-opencl=12.3.101=0 + - cuda-runtime=12.1.0=0 + - ffmpeg=4.3=hf484d3e_0 + - filelock=3.13.1=py310h06a4308_0 + - freetype=2.12.1=h4a9f257_0 + - giflib=5.2.1=h5eee18b_3 + - gmp=6.2.1=h295c915_3 + - gmpy2=2.1.2=py310heeb90bb_0 + - gnutls=3.6.15=he1e5248_0 + - idna=3.4=py310h06a4308_0 + - intel-openmp=2023.1.0=hdb19cb5_46306 + - jinja2=3.1.2=py310h06a4308_0 + - jpeg=9e=h5eee18b_1 + - lame=3.100=h7b6447c_0 + - lcms2=2.12=h3be6417_0 + - ld_impl_linux-64=2.38=h1181459_1 + - lerc=3.0=h295c915_0 + - libcublas=12.1.0.26=0 + - libcufft=11.0.2.4=0 + - libcufile=1.8.1.2=0 + - libcurand=10.3.4.107=0 + - libcusolver=11.4.4.55=0 + - libcusparse=12.0.2.55=0 + - libdeflate=1.17=h5eee18b_1 + - libffi=3.4.4=h6a678d5_0 + - libgcc-ng=11.2.0=h1234567_1 + - libgomp=11.2.0=h1234567_1 + - libiconv=1.16=h7f8727e_2 + - libidn2=2.3.4=h5eee18b_0 + - libjpeg-turbo=2.0.0=h9bf148f_0 + - libnpp=12.0.2.50=0 + - libnvjitlink=12.1.105=0 + - libnvjpeg=12.1.1.14=0 + - libpng=1.6.39=h5eee18b_0 + - libstdcxx-ng=11.2.0=h1234567_1 + - libtasn1=4.19.0=h5eee18b_0 + - libtiff=4.5.1=h6a678d5_0 + - libunistring=0.9.10=h27cfd23_0 + - libuuid=1.41.5=h5eee18b_0 + - libwebp=1.3.2=h11a3e52_0 + - libwebp-base=1.3.2=h5eee18b_0 + - llvm-openmp=14.0.6=h9e868ea_0 + - lz4-c=1.9.4=h6a678d5_0 + - markupsafe=2.1.3=py310h5eee18b_0 + - mkl=2023.1.0=h213fc3f_46344 + - mkl-service=2.4.0=py310h5eee18b_1 + - mkl_fft=1.3.8=py310h5eee18b_0 + - mkl_random=1.2.4=py310hdb19cb5_0 + - mpc=1.1.0=h10f8cd9_1 + - mpfr=4.0.2=hb69a4c5_1 + - mpmath=1.3.0=py310h06a4308_0 + - ncurses=6.4=h6a678d5_0 + - nettle=3.7.3=hbbd107a_1 + - networkx=3.1=py310h06a4308_0 + - numpy=1.26.3=py310h5f9d8c6_0 + - numpy-base=1.26.3=py310hb5e798b_0 + - openh264=2.1.1=h4ff587b_0 + - openjpeg=2.4.0=h3ad879b_0 + - openssl=3.0.12=h7f8727e_0 + - pillow=10.0.1=py310ha6cbd5a_0 + - pip=23.3.1=py310h06a4308_0 + - pycparser=2.21=pyhd3eb1b0_0 + - pyopenssl=23.2.0=py310h06a4308_0 + - pysocks=1.7.1=py310h06a4308_0 + - python=3.10.13=h955ad1f_0 + - pytorch=2.1.2=py3.10_cuda12.1_cudnn8.9.2_0 + - pytorch-cuda=12.1=ha16c6d3_5 + - pytorch-mutex=1.0=cuda + - pyyaml=6.0.1=py310h5eee18b_0 + - readline=8.2=h5eee18b_0 + - requests=2.31.0=py310h06a4308_0 + - setuptools=68.2.2=py310h06a4308_0 + - sqlite=3.41.2=h5eee18b_0 + - sympy=1.12=py310h06a4308_0 + - tbb=2021.8.0=hdb19cb5_0 + - tk=8.6.12=h1ccaba5_0 + - torchaudio=2.1.2=py310_cu121 + - torchtriton=2.1.0=py310 + - torchvision=0.16.2=py310_cu121 + - typing_extensions=4.7.1=py310h06a4308_0 + - urllib3=1.26.18=py310h06a4308_0 + - wheel=0.41.2=py310h06a4308_0 + - xz=5.4.5=h5eee18b_0 + - yaml=0.2.5=h7b6447c_0 + - zlib=1.2.13=h5eee18b_0 + - zstd=1.5.5=hc292b87_0 + - pip: + - fsspec==2023.12.2 + - glm-saga==0.1.2 + - pandas==2.1.4 + - python-dateutil==2.8.2 + - pytz==2023.3.post1 + - six==1.16.0 + - tqdm==4.66.1 + - tzdata==2023.4 +prefix: /home/norrenbr/anaconda/tmp/envs/QSENN-Minimal diff --git a/evaluation/Metrics/Dependence.py b/evaluation/Metrics/Dependence.py new file mode 100644 index 0000000000000000000000000000000000000000..5f1b26dfc19de0430925e38aac45ebcc33a94455 --- /dev/null +++ b/evaluation/Metrics/Dependence.py @@ -0,0 +1,21 @@ +import torch + + +def compute_contribution_top_feature(features, outputs, weights, labels): + with torch.no_grad(): + total_pre_softmax, predicted_classes = torch.max(outputs, dim=1) + feature_part = features * weights.to(features.device)[predicted_classes] + class_specific_feature_part = torch.zeros((weights.shape[0], features.shape[1],)) + feature_class_part = torch.zeros((weights.shape[0], features.shape[1],)) + for unique_class in predicted_classes.unique(): + mask = predicted_classes == unique_class + class_specific_feature_part[unique_class] = feature_part[mask].mean(dim=0) + gt_mask = labels == unique_class + feature_class_part[unique_class] = feature_part[gt_mask].mean(dim=0) + abs_features = feature_part.abs() + abs_sum = abs_features.sum(dim=1) + fractions_abs = abs_features / abs_sum[:, None] + abs_max = fractions_abs.max(dim=1)[0] + mask = ~torch.isnan(abs_max) + abs_max = abs_max[mask] + return abs_max.mean() \ No newline at end of file diff --git a/evaluation/Metrics/__pycache__/Dependence.cpython-310.pyc b/evaluation/Metrics/__pycache__/Dependence.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3d59c7cc91e9d0c3e84e533ca3876e3ee9850c52 Binary files /dev/null and b/evaluation/Metrics/__pycache__/Dependence.cpython-310.pyc differ diff --git a/evaluation/Metrics/__pycache__/cub_Alignment.cpython-310.pyc b/evaluation/Metrics/__pycache__/cub_Alignment.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0de63438f9c10a07e3524cca02c53276c1d7622 Binary files /dev/null and b/evaluation/Metrics/__pycache__/cub_Alignment.cpython-310.pyc differ diff --git a/evaluation/Metrics/cub_Alignment.py b/evaluation/Metrics/cub_Alignment.py new file mode 100644 index 0000000000000000000000000000000000000000..9b4b41e427668f86ec530baab1796ac9d0678489 --- /dev/null +++ b/evaluation/Metrics/cub_Alignment.py @@ -0,0 +1,30 @@ +import numpy as np + +from dataset_classes.cub200 import CUB200Class + + +def get_cub_alignment_from_features(features_train_sorted): + metric_matrix = compute_metric_matrix(np.array(features_train_sorted), "train") + return np.mean(np.max(metric_matrix, axis=1)) + pass + + +def compute_metric_matrix(features, mode): + image_attribute_labels = CUB200Class.get_image_attribute_labels(train=mode == "train") + image_attribute_labels = CUB200Class.filter_attribute_labels(image_attribute_labels) + matrix_shape = ( + features.shape[1], max(image_attribute_labels["attribute"]) + 1) + accuracy_matrix = np.zeros(matrix_shape) + sensitivity_matrix = np.zeros_like(accuracy_matrix) + grouped_attributes = image_attribute_labels.groupby("attribute") + for attribute_id, group in grouped_attributes: + is_present = group[group["is_present"]] + not_present = group[~group["is_present"]] + is_present_avg = np.mean(features[is_present["img_id"]], axis=0) + not_present_avg = np.mean(features[not_present["img_id"]], axis=0) + sensitivity_matrix[:, attribute_id] = not_present_avg + accuracy_matrix[:, attribute_id] = is_present_avg + metric_matrix = accuracy_matrix - sensitivity_matrix + no_abs_features = features - np.min(features, axis=0) + no_abs_feature_mean = metric_matrix / no_abs_features.mean(axis=0)[:, None] + return no_abs_feature_mean diff --git a/evaluation/__pycache__/diversity.cpython-310.pyc b/evaluation/__pycache__/diversity.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d65f89a660c7660f26ce7578e34557c87b970b66 Binary files /dev/null and b/evaluation/__pycache__/diversity.cpython-310.pyc differ diff --git a/evaluation/__pycache__/helpers.cpython-310.pyc b/evaluation/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f7034adbe8c30d420975414880bb581f2052080 Binary files /dev/null and b/evaluation/__pycache__/helpers.cpython-310.pyc differ diff --git a/evaluation/__pycache__/qsenn_metrics.cpython-310.pyc b/evaluation/__pycache__/qsenn_metrics.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9460b6362b47ba4ddf699b74707b87b5a063ce73 Binary files /dev/null and b/evaluation/__pycache__/qsenn_metrics.cpython-310.pyc differ diff --git a/evaluation/__pycache__/utils.cpython-310.pyc b/evaluation/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e70bce079f72b6f83344a1decd675b1312655b6 Binary files /dev/null and b/evaluation/__pycache__/utils.cpython-310.pyc differ diff --git a/evaluation/diversity.py b/evaluation/diversity.py new file mode 100644 index 0000000000000000000000000000000000000000..033679ce9cf4546b74b0d1d4bdb6b8590c5c8865 --- /dev/null +++ b/evaluation/diversity.py @@ -0,0 +1,111 @@ +import numpy as np +import torch + +from evaluation.helpers import softmax_feature_maps + + +class MultiKCrossChannelMaxPooledSum: + def __init__(self, top_k_range, weights, interactions, func="softmax"): + self.top_k_range = top_k_range + self.weights = weights + self.failed = False + self.max_ks = self.get_max_ks(weights) + self.locality_of_used_features = torch.zeros(len(top_k_range), device=weights.device) + self.locality_of_exclusely_used_features = torch.zeros(len(top_k_range), device=weights.device) + self.ns_k = torch.zeros(len(top_k_range), device=weights.device) + self.exclusive_ns = torch.zeros(len(top_k_range), device=weights.device) + self.interactions = interactions + self.func = func + + def get_max_ks(self, weights): + nonzeros = torch.count_nonzero(torch.tensor(weights), 1) + return nonzeros + + def get_top_n_locality(self, outputs, initial_feature_maps, k): + feature_maps, relevant_weights, vector_size, top_classes = self.adapt_feature_maps(outputs, + initial_feature_maps) + max_ks = self.max_ks[top_classes] + max_k_based_row_selection = max_ks >= k + + result = self.get_crosspooled(relevant_weights, max_k_based_row_selection, k, vector_size, feature_maps, + separated=True) + return result + + def get_locality(self, outputs, initial_feature_maps, n): + answer = self.get_top_n_locality(outputs, initial_feature_maps, n) + return answer + + def get_result(self): + # if torch.sum(self.exclusive_ns) ==0: + # end_idx = len(self.exclusive_ns) - 1 + # else: + + exclusive_array = torch.zeros_like(self.locality_of_exclusely_used_features) + local_array = torch.zeros_like(self.locality_of_used_features) + # if self.failed: + # return local_array, exclusive_array + cumulated = torch.cumsum(self.exclusive_ns, 0) + end_idx = torch.argmax(cumulated) + exclusivity_array = self.locality_of_exclusely_used_features[:end_idx + 1] / self.exclusive_ns[:end_idx + 1] + exclusivity_array[exclusivity_array != exclusivity_array] = 0 + exclusive_array[:len(exclusivity_array)] = exclusivity_array + locality_array = self.locality_of_used_features[self.locality_of_used_features != 0] / self.ns_k[ + self.locality_of_used_features != 0] + local_array[:len(locality_array)] = locality_array + return local_array, exclusive_array + + def get_crosspooled(self, relevant_weights, mask, k, vector_size, feature_maps, separated=False): + relevant_indices = get_relevant_indices(relevant_weights, k)[mask] + # this should have size batch x k x featuremapsize squared] + indices = relevant_indices.unsqueeze(2).repeat(1, 1, vector_size) + sub_feature_maps = torch.gather(feature_maps[mask], 1, indices) + # shape batch x featuremapsquared: For each "pixel" the highest value + cross_pooled = torch.max(sub_feature_maps, 1)[0] + if separated: + return torch.sum(cross_pooled, 1) / k + else: + ns = len(cross_pooled) + result = torch.sum(cross_pooled) / (k) + # should be batch x map size + + return ns, result + + def adapt_feature_maps(self, outputs, initial_feature_maps): + if self.func == "softmax": + feature_maps = softmax_feature_maps(initial_feature_maps) + feature_maps = torch.flatten(feature_maps, 2) + vector_size = feature_maps.shape[2] + top_classes = torch.argmax(outputs, dim=1) + relevant_weights = self.weights[top_classes] + if relevant_weights.shape[1] != feature_maps.shape[1]: + feature_maps = self.interactions.get_localized_features(initial_feature_maps) + feature_maps = softmax_feature_maps(feature_maps) + feature_maps = torch.flatten(feature_maps, 2) + return feature_maps, relevant_weights, vector_size, top_classes + + def calculate_locality(self, outputs, initial_feature_maps): + feature_maps, relevant_weights, vector_size, top_classes = self.adapt_feature_maps(outputs, + initial_feature_maps) + max_ks = self.max_ks[top_classes] + for k in self.top_k_range: + # relevant_k_s = max_ks[] + max_k_based_row_selection = max_ks >= k + if torch.sum(max_k_based_row_selection) == 0: + break + + exclusive_k = max_ks == k + if torch.sum(exclusive_k) != 0: + ns, result = self.get_crosspooled(relevant_weights, exclusive_k, k, vector_size, feature_maps) + self.locality_of_exclusely_used_features[k - 1] += result + self.exclusive_ns[k - 1] += ns + ns, result = self.get_crosspooled(relevant_weights, max_k_based_row_selection, k, vector_size, feature_maps) + self.ns_k[k - 1] += ns + self.locality_of_used_features[k - 1] += result + + def __call__(self, outputs, initial_feature_maps): + self.calculate_locality(outputs, initial_feature_maps) + + +def get_relevant_indices(weights, top_k): + top_k = weights.topk(top_k)[1] + return top_k \ No newline at end of file diff --git a/evaluation/helpers.py b/evaluation/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..fe4a9902103fe63df01994acb079127ab719c9f1 --- /dev/null +++ b/evaluation/helpers.py @@ -0,0 +1,6 @@ +import torch + + +def softmax_feature_maps(x): + # done: verify that this applies softmax along first dimension + return torch.softmax(x.reshape(x.size(0), x.size(1), -1), 2).view_as(x) \ No newline at end of file diff --git a/evaluation/qsenn_metrics.py b/evaluation/qsenn_metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..1bb8f21b6f7dfe101c8e668e9c422c1d88ce8751 --- /dev/null +++ b/evaluation/qsenn_metrics.py @@ -0,0 +1,39 @@ +import numpy as np +import torch + +from evaluation.Metrics.Dependence import compute_contribution_top_feature +from evaluation.Metrics.cub_Alignment import get_cub_alignment_from_features +from evaluation.diversity import MultiKCrossChannelMaxPooledSum +from evaluation.utils import get_metrics_for_model + + +def evaluateALLMetricsForComps(features_train, outputs_train, feature_maps_test, + outputs_test, linear_matrix, labels_train): + with torch.no_grad(): + if len(features_train) < 7000: # recognize CUB and TravelingBirds + cub_alignment = get_cub_alignment_from_features(features_train) + else: + cub_alignment = 0 + print("cub_alignment: ", cub_alignment) + localizer = MultiKCrossChannelMaxPooledSum(range(1, 6), linear_matrix, None) + batch_size = 300 + for i in range(np.floor(len(features_train) / batch_size).astype(int)): + localizer(outputs_test[i * batch_size:(i + 1) * batch_size].to("cuda"), + feature_maps_test[i * batch_size:(i + 1) * batch_size].to("cuda")) + + locality, exlusive_locality = localizer.get_result() + diversity = locality[4] + print("diversity@5: ", diversity) + abs_frac_mean = compute_contribution_top_feature( + features_train, + outputs_train, + linear_matrix, + labels_train) + print("Dependence ", abs_frac_mean) + answer_dict = {"diversity": diversity.item(), "Dependence": abs_frac_mean.item(), "Alignment":cub_alignment} + return answer_dict + +def eval_model_on_all_qsenn_metrics(model, test_loader, train_loader): + return get_metrics_for_model(train_loader, test_loader, model, evaluateALLMetricsForComps) + + diff --git a/evaluation/utils.py b/evaluation/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ef1b679fc9dac88e2fb897d69c34d959c19b3101 --- /dev/null +++ b/evaluation/utils.py @@ -0,0 +1,57 @@ +import torch +from tqdm import tqdm + + + +def get_metrics_for_model(train_loader, test_loader, model, metric_evaluator): + (features_train, feature_maps_train, outputs_train, features_test, feature_maps_test, + outputs_test, labels) = [], [], [], [], [], [], [] + device = "cuda" if torch.cuda.is_available() else "cpu" + model.eval() + model = model.to(device) + training_transforms = train_loader.dataset.transform + train_loader.dataset.transform = test_loader.dataset.transform # Use test transform for train + train_loader = torch.utils.data.DataLoader(train_loader.dataset, batch_size=100, shuffle=False) # Turn off shuffling + print("Going in get metrics") + linear_matrix = model.linear.weight + entries = torch.nonzero(linear_matrix) + rel_features = torch.unique(entries[:, 1]) + with torch.no_grad(): + iterator = tqdm(enumerate(train_loader), total=len(train_loader)) + for batch_idx, (data, target) in iterator: + xs1 = data.to("cuda") + output, feature_maps, final_features = model(xs1, with_feature_maps=True, with_final_features=True,) + outputs_train.append(output.to("cpu")) + features_train.append(final_features.to("cpu")) + labels.append(target.to("cpu")) + total = 0 + correct = 0 + iterator = tqdm(enumerate(test_loader), total=len(test_loader)) + for batch_idx, (data, target) in iterator: + xs1 = data.to("cuda") + output, feature_maps, final_features = model(xs1, with_feature_maps=True, + with_final_features=True, ) + feature_maps_test.append(feature_maps[:, rel_features].to("cpu")) + outputs_test.append(output.to("cpu")) + total += target.size(0) + _, predicted = output.max(1) + correct += predicted.eq(target.to("cuda")).sum().item() + print("test accuracy: ", correct / total) + features_train = torch.cat(features_train) + outputs_train = torch.cat(outputs_train) + feature_maps_test = torch.cat(feature_maps_test) + outputs_test = torch.cat(outputs_test) + labels = torch.cat(labels) + linear_matrix = linear_matrix[:, rel_features] + print("Shape of linear matrix: ", linear_matrix.shape) + all_metrics_dict = metric_evaluator(features_train, outputs_train, + feature_maps_test, + outputs_test, linear_matrix, labels) + result_dict = {"Accuracy": correct / total, "NFfeatures": linear_matrix.shape[1], + "PerClass": torch.nonzero(linear_matrix).shape[0] / linear_matrix.shape[0], + } + result_dict.update(all_metrics_dict) + print(result_dict) + # Reset Train transforms + train_loader.dataset.transform = training_transforms + return result_dict diff --git a/fig/AutoML4FAS_Logo.jpeg b/fig/AutoML4FAS_Logo.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..35d4066fa5cf5967553960097b57f80c2ac8c580 Binary files /dev/null and b/fig/AutoML4FAS_Logo.jpeg differ diff --git a/fig/Bund.png b/fig/Bund.png new file mode 100644 index 0000000000000000000000000000000000000000..1c92a104515f9b3c61642f7cd3cc898163e5ef0e Binary files /dev/null and b/fig/Bund.png differ diff --git a/fig/LUH.png b/fig/LUH.png new file mode 100644 index 0000000000000000000000000000000000000000..af168ab3e866e5c66c616b6a090ef9c4ac212e3b Binary files /dev/null and b/fig/LUH.png differ diff --git a/fig/birds.png b/fig/birds.png new file mode 100644 index 0000000000000000000000000000000000000000..330ebdff52c39b989a5c0cd42e0a35fdbeb7c1ff Binary files /dev/null and b/fig/birds.png differ diff --git a/finetuning/map_function.py b/finetuning/map_function.py new file mode 100644 index 0000000000000000000000000000000000000000..7aa65c3fa6dee0dc55484bdaae3fb181786eed1b --- /dev/null +++ b/finetuning/map_function.py @@ -0,0 +1,11 @@ +from finetuning.qsenn import finetune_qsenn +from finetuning.sldd import finetune_sldd + + +def finetune(key, model, train_loader, test_loader, log_dir, n_classes, seed, beta, optimization_schedule, per_class, n_features): + if key == 'sldd': + return finetune_sldd(model, train_loader, test_loader, log_dir, n_classes, seed, beta, optimization_schedule,per_class, n_features) + elif key == 'qsenn': + return finetune_qsenn(model, train_loader, test_loader, log_dir, n_classes, seed, beta, optimization_schedule,n_features,per_class, ) + else: + raise ValueError(f"Unknown Finetuning key: {key}") \ No newline at end of file diff --git a/finetuning/qsenn.py b/finetuning/qsenn.py new file mode 100644 index 0000000000000000000000000000000000000000..ce4dc8b65e6c703e51fe602c2ac897c97844897c --- /dev/null +++ b/finetuning/qsenn.py @@ -0,0 +1,30 @@ +import os + +import torch + +from finetuning.utils import train_n_epochs +from sparsification.qsenn import compute_qsenn_feature_selection_and_assignment + + +def finetune_qsenn(model, train_loader, test_loader, log_dir, n_classes, seed, beta, optimization_schedule ,n_features, n_per_class): + for iteration_epoch in range(4): + print(f"Starting iteration epoch {iteration_epoch}") + this_log_dir = log_dir / f"iteration_epoch_{iteration_epoch}" + this_log_dir.mkdir(parents=True, exist_ok=True) + feature_sel, sparse_layer,bias_sparse, current_mean, current_std = compute_qsenn_feature_selection_and_assignment(model, train_loader, + test_loader, + this_log_dir, n_classes, seed, n_features, n_per_class) + model.set_model_sldd(feature_sel, sparse_layer, current_mean, current_std, bias_sparse) + if os.path.exists(this_log_dir / "trained_model.pth"): + model.load_state_dict(torch.load(this_log_dir / "trained_model.pth")) + _ = optimization_schedule.get_params() # count up, to have get correct lr + continue + + model = train_n_epochs( model, beta, optimization_schedule, train_loader, test_loader) + torch.save(model.state_dict(), this_log_dir / "trained_model.pth") + print(f"Finished iteration epoch {iteration_epoch}") + return model + + + + diff --git a/finetuning/sldd.py b/finetuning/sldd.py new file mode 100644 index 0000000000000000000000000000000000000000..7c8ac0034b14cbbf460f0bf59e25dfd8188ee94b --- /dev/null +++ b/finetuning/sldd.py @@ -0,0 +1,22 @@ +import numpy as np +import torch + +from FeatureDiversityLoss import FeatureDiversityLoss +from finetuning.utils import train_n_epochs +from sparsification.glmBasedSparsification import compute_feature_selection_and_assignment +from sparsification.sldd import compute_sldd_feature_selection_and_assignment +from train import train, test +from training.optim import get_optimizer + + + + +def finetune_sldd(model, train_loader, test_loader, log_dir, n_classes, seed, beta, optimization_schedule,n_per_class, n_features, ): + feature_sel, weight, bias, mean, std = compute_sldd_feature_selection_and_assignment(model, train_loader, + test_loader, + log_dir, n_classes, seed,n_per_class, n_features) + model.set_model_sldd(feature_sel, weight, mean, std, bias) + model = train_n_epochs( model, beta, optimization_schedule, train_loader, test_loader) + return model + + diff --git a/finetuning/utils.py b/finetuning/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..af751e2094c5ba6b2f83adadb5059f692329db37 --- /dev/null +++ b/finetuning/utils.py @@ -0,0 +1,14 @@ +from FeatureDiversityLoss import FeatureDiversityLoss +from train import train, test +from training.optim import get_optimizer + + +def train_n_epochs(model, beta,optimization_schedule, train_loader, test_loader): + optimizer, schedule, epochs = get_optimizer(model, optimization_schedule) + fdl = FeatureDiversityLoss(beta, model.linear) + for epoch in range(epochs): + model = train(model, train_loader, optimizer, fdl, epoch) + schedule.step() + if epoch % 5 == 0 or epoch+1 == epochs: + test(model, test_loader, epoch) + return model \ No newline at end of file diff --git a/flagged/input/1e670025e5206017965a/Western_Grebe_0090_36182.jpg b/flagged/input/1e670025e5206017965a/Western_Grebe_0090_36182.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6450f6174bdd37cace75c6b32a029bcfa8761ed7 Binary files /dev/null and b/flagged/input/1e670025e5206017965a/Western_Grebe_0090_36182.jpg differ diff --git a/flagged/input/6a11e385290e9006bb0a/Black_Footed_Albatross_0003_796136.jpg b/flagged/input/6a11e385290e9006bb0a/Black_Footed_Albatross_0003_796136.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f9f6063f1c6130694ddd53c0231b317abe9ef03b Binary files /dev/null and b/flagged/input/6a11e385290e9006bb0a/Black_Footed_Albatross_0003_796136.jpg differ diff --git a/flagged/log.csv b/flagged/log.csv new file mode 100644 index 0000000000000000000000000000000000000000..5af3d3f8c5830b52178c12538580c9cd038fd2e4 --- /dev/null +++ b/flagged/log.csv @@ -0,0 +1,3 @@ +input,output,flag,username,timestamp +flagged/input/1e670025e5206017965a/Western_Grebe_0090_36182.jpg,,,,2024-10-21 12:37:51.541901 +flagged/input/6a11e385290e9006bb0a/Black_Footed_Albatross_0003_796136.jpg,"[{""image"": ""flagged/output/e2f704607c002e0c557d/image.webp"", ""caption"": null}, {""image"": ""flagged/output/1b4541c3e93f034d746d/image.webp"", ""caption"": null}, {""image"": ""flagged/output/f8727dcfa3c59de0d873/image.webp"", ""caption"": null}, {""image"": ""flagged/output/c4b75e9fbc946f6ead6d/image.webp"", ""caption"": null}, {""image"": ""flagged/output/5b5ad2dd997a635f4917/image.webp"", ""caption"": null}, {""image"": ""flagged/output/b066004e4a0114aa705b/image.webp"", ""caption"": null}, {""image"": ""flagged/output/036072cdcc620de8cb65/image.webp"", ""caption"": null}, {""image"": ""flagged/output/218135cb251eb6cd0b2c/image.webp"", ""caption"": null}, {""image"": ""flagged/output/2a0671ba5ac1aa3bd2b9/image.webp"", ""caption"": null}, {""image"": ""flagged/output/595953adce3a654bbd33/image.webp"", ""caption"": null}, {""image"": ""flagged/output/f333c69915509927b2ff/image.webp"", ""caption"": null}, {""image"": ""flagged/output/a966f50f23644e5046e8/image.webp"", ""caption"": null}, {""image"": ""flagged/output/1a8a9e53fd4990fe5231/image.webp"", ""caption"": null}, {""image"": ""flagged/output/d7bc2f0eb8d70a562542/image.webp"", ""caption"": null}, {""image"": ""flagged/output/53fd53c5eab644d30338/image.webp"", ""caption"": null}, {""image"": ""flagged/output/ddf6b8ddc855838cc3b5/image.webp"", ""caption"": null}, {""image"": ""flagged/output/41a99b70366ac01533b4/image.webp"", ""caption"": null}, {""image"": ""flagged/output/1b4ae8362917e14cb7a7/image.webp"", ""caption"": null}, {""image"": ""flagged/output/b321456290561eacf170/image.webp"", ""caption"": null}, {""image"": ""flagged/output/42d34c69c2384bda376b/image.webp"", ""caption"": null}, {""image"": ""flagged/output/35d0e9ae554c0b863ef3/image.webp"", ""caption"": null}, {""image"": ""flagged/output/799f55238c434907570f/image.webp"", ""caption"": null}, {""image"": ""flagged/output/db82081afaabf2fb505b/image.webp"", ""caption"": null}, {""image"": ""flagged/output/fff73f12467314dce395/image.webp"", ""caption"": null}, {""image"": ""flagged/output/1bd17ff3896c5045b453/image.webp"", ""caption"": null}, {""image"": ""flagged/output/e31f93405e1526fe3e55/image.webp"", ""caption"": null}, {""image"": ""flagged/output/e9c9ff1da0805da0c0d8/image.webp"", ""caption"": null}, {""image"": ""flagged/output/e6ef5ba2d6c65b3c1d21/image.webp"", ""caption"": null}, {""image"": ""flagged/output/f763a51fb4a6d8a13313/image.webp"", ""caption"": null}, {""image"": ""flagged/output/7bdb4562631122e4ced7/image.webp"", ""caption"": null}, {""image"": ""flagged/output/9f7495b7c7648ecb1a10/image.webp"", ""caption"": null}, {""image"": ""flagged/output/ecbe75612f5db6cc7370/image.webp"", ""caption"": null}, {""image"": ""flagged/output/31f824d9522d30106a44/image.webp"", ""caption"": null}, {""image"": ""flagged/output/e06b9103e0bf90cd398a/image.webp"", ""caption"": null}, {""image"": ""flagged/output/1441b4f37340c2afa3d0/image.webp"", ""caption"": null}]",,,2024-10-21 23:01:32.158338 diff --git a/flagged/output/036072cdcc620de8cb65/image.webp b/flagged/output/036072cdcc620de8cb65/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..4e7a831b3e63d41cf7dc53178e8f19231f456648 Binary files /dev/null and b/flagged/output/036072cdcc620de8cb65/image.webp differ diff --git a/flagged/output/1441b4f37340c2afa3d0/image.webp b/flagged/output/1441b4f37340c2afa3d0/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..46e20fcfccd3a763d3eae21a0fda7d2908c6f53b Binary files /dev/null and b/flagged/output/1441b4f37340c2afa3d0/image.webp differ diff --git a/flagged/output/1a8a9e53fd4990fe5231/image.webp b/flagged/output/1a8a9e53fd4990fe5231/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..30452bb9f913012c3a787e78f5af2a657bfc4a82 Binary files /dev/null and b/flagged/output/1a8a9e53fd4990fe5231/image.webp differ diff --git a/flagged/output/1b4541c3e93f034d746d/image.webp b/flagged/output/1b4541c3e93f034d746d/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..41fb284622b8bf0e85dac87a497a4942011579f2 Binary files /dev/null and b/flagged/output/1b4541c3e93f034d746d/image.webp differ diff --git a/flagged/output/1b4ae8362917e14cb7a7/image.webp b/flagged/output/1b4ae8362917e14cb7a7/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..3566fc9c4f4f8bc2d8be57ffbaf1fb0b84f6fed8 Binary files /dev/null and b/flagged/output/1b4ae8362917e14cb7a7/image.webp differ diff --git a/flagged/output/1bd17ff3896c5045b453/image.webp b/flagged/output/1bd17ff3896c5045b453/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..a2e8a49694c6a233177b8757e916860ec2c217cb Binary files /dev/null and b/flagged/output/1bd17ff3896c5045b453/image.webp differ diff --git a/flagged/output/218135cb251eb6cd0b2c/image.webp b/flagged/output/218135cb251eb6cd0b2c/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..986c085197db498a852f013f503db78b64b4f7c5 Binary files /dev/null and b/flagged/output/218135cb251eb6cd0b2c/image.webp differ diff --git a/flagged/output/2a0671ba5ac1aa3bd2b9/image.webp b/flagged/output/2a0671ba5ac1aa3bd2b9/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..5e9a54c48df42fa656f0bced1d9580acd75cf7ba Binary files /dev/null and b/flagged/output/2a0671ba5ac1aa3bd2b9/image.webp differ diff --git a/flagged/output/31f824d9522d30106a44/image.webp b/flagged/output/31f824d9522d30106a44/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..c91c7a09d9b8611a1430afa699da601d7d0efe21 Binary files /dev/null and b/flagged/output/31f824d9522d30106a44/image.webp differ diff --git a/flagged/output/35d0e9ae554c0b863ef3/image.webp b/flagged/output/35d0e9ae554c0b863ef3/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..2473cae43807f063aa4d3e568e06e17e4b569920 Binary files /dev/null and b/flagged/output/35d0e9ae554c0b863ef3/image.webp differ diff --git a/flagged/output/41a99b70366ac01533b4/image.webp b/flagged/output/41a99b70366ac01533b4/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..4121b433b87b66dd3fbb58722c67818906c67411 Binary files /dev/null and b/flagged/output/41a99b70366ac01533b4/image.webp differ diff --git a/flagged/output/42d34c69c2384bda376b/image.webp b/flagged/output/42d34c69c2384bda376b/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..7618c903c18dc2451d25e1f32656f4caf9fe6ddb Binary files /dev/null and b/flagged/output/42d34c69c2384bda376b/image.webp differ diff --git a/flagged/output/53fd53c5eab644d30338/image.webp b/flagged/output/53fd53c5eab644d30338/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..2abbb10f1dbe66b93e37422b2470a0f071dea7cf Binary files /dev/null and b/flagged/output/53fd53c5eab644d30338/image.webp differ diff --git a/flagged/output/595953adce3a654bbd33/image.webp b/flagged/output/595953adce3a654bbd33/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..881add82b80c09007934e0467acd081e1b5fd7ac Binary files /dev/null and b/flagged/output/595953adce3a654bbd33/image.webp differ diff --git a/flagged/output/5b5ad2dd997a635f4917/image.webp b/flagged/output/5b5ad2dd997a635f4917/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..e57d0e88fbfc7af54aeb69995fe44af657c0d8dd Binary files /dev/null and b/flagged/output/5b5ad2dd997a635f4917/image.webp differ diff --git a/flagged/output/799f55238c434907570f/image.webp b/flagged/output/799f55238c434907570f/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..85d8a8fe108f97bec9684ccb2c614db43035d88e Binary files /dev/null and b/flagged/output/799f55238c434907570f/image.webp differ diff --git a/flagged/output/7bdb4562631122e4ced7/image.webp b/flagged/output/7bdb4562631122e4ced7/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..0b046a9a2ca40f025b7cc77df1b4c4f0613a7659 Binary files /dev/null and b/flagged/output/7bdb4562631122e4ced7/image.webp differ diff --git a/flagged/output/9f7495b7c7648ecb1a10/image.webp b/flagged/output/9f7495b7c7648ecb1a10/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..bc21593a6869f1cc00f78de4dd9ebf912d18d795 Binary files /dev/null and b/flagged/output/9f7495b7c7648ecb1a10/image.webp differ diff --git a/flagged/output/a966f50f23644e5046e8/image.webp b/flagged/output/a966f50f23644e5046e8/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..ffb81c67f03b993798f710e71e65b0f43cd151ca Binary files /dev/null and b/flagged/output/a966f50f23644e5046e8/image.webp differ diff --git a/flagged/output/b066004e4a0114aa705b/image.webp b/flagged/output/b066004e4a0114aa705b/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..b861d88dda4c0c7b783a87abaabc29f94dc943b2 Binary files /dev/null and b/flagged/output/b066004e4a0114aa705b/image.webp differ diff --git a/flagged/output/b321456290561eacf170/image.webp b/flagged/output/b321456290561eacf170/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..a10280498c93346105b7da59ba0808494004024c Binary files /dev/null and b/flagged/output/b321456290561eacf170/image.webp differ diff --git a/flagged/output/c4b75e9fbc946f6ead6d/image.webp b/flagged/output/c4b75e9fbc946f6ead6d/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..106535e80842768f14da47245baa981cabeea71b Binary files /dev/null and b/flagged/output/c4b75e9fbc946f6ead6d/image.webp differ diff --git a/flagged/output/d7bc2f0eb8d70a562542/image.webp b/flagged/output/d7bc2f0eb8d70a562542/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..686f7aac20f6a5449db99a97dc43c01dfdd99551 Binary files /dev/null and b/flagged/output/d7bc2f0eb8d70a562542/image.webp differ diff --git a/flagged/output/db82081afaabf2fb505b/image.webp b/flagged/output/db82081afaabf2fb505b/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..af93f3eacee5ef0a4903995aaf8d2e2e5921976d Binary files /dev/null and b/flagged/output/db82081afaabf2fb505b/image.webp differ diff --git a/flagged/output/ddf6b8ddc855838cc3b5/image.webp b/flagged/output/ddf6b8ddc855838cc3b5/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..f5d206e37f97d1f45b91aceb45df630ca9fae223 Binary files /dev/null and b/flagged/output/ddf6b8ddc855838cc3b5/image.webp differ diff --git a/flagged/output/e06b9103e0bf90cd398a/image.webp b/flagged/output/e06b9103e0bf90cd398a/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..8b4510986e28e5a132f4cc197ce9063b072b113b Binary files /dev/null and b/flagged/output/e06b9103e0bf90cd398a/image.webp differ diff --git a/flagged/output/e2f704607c002e0c557d/image.webp b/flagged/output/e2f704607c002e0c557d/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..a7b2dafc246639799cfabd97306b3c4ba426cba6 Binary files /dev/null and b/flagged/output/e2f704607c002e0c557d/image.webp differ diff --git a/flagged/output/e31f93405e1526fe3e55/image.webp b/flagged/output/e31f93405e1526fe3e55/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..955afaa8ead0e0bc67be7722bbc791dbfe4f35be Binary files /dev/null and b/flagged/output/e31f93405e1526fe3e55/image.webp differ diff --git a/flagged/output/e6ef5ba2d6c65b3c1d21/image.webp b/flagged/output/e6ef5ba2d6c65b3c1d21/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..4e0429c00817f88878d5cfc460039b8ed169c74c Binary files /dev/null and b/flagged/output/e6ef5ba2d6c65b3c1d21/image.webp differ diff --git a/flagged/output/e9c9ff1da0805da0c0d8/image.webp b/flagged/output/e9c9ff1da0805da0c0d8/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..d73c97deb0388bfb5423ad36e686e5e3ca44ce8d Binary files /dev/null and b/flagged/output/e9c9ff1da0805da0c0d8/image.webp differ diff --git a/flagged/output/ecbe75612f5db6cc7370/image.webp b/flagged/output/ecbe75612f5db6cc7370/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..7608061bb4d13a3e87696620071a61112463dea9 Binary files /dev/null and b/flagged/output/ecbe75612f5db6cc7370/image.webp differ diff --git a/flagged/output/f333c69915509927b2ff/image.webp b/flagged/output/f333c69915509927b2ff/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..766635e3fd9996a671a9fd9e09bd37901f37a20e Binary files /dev/null and b/flagged/output/f333c69915509927b2ff/image.webp differ diff --git a/flagged/output/f763a51fb4a6d8a13313/image.webp b/flagged/output/f763a51fb4a6d8a13313/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..65b0c4ddd52dac9f92f898bc64ed67c18722c6ac Binary files /dev/null and b/flagged/output/f763a51fb4a6d8a13313/image.webp differ diff --git a/flagged/output/f8727dcfa3c59de0d873/image.webp b/flagged/output/f8727dcfa3c59de0d873/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..175792b0035f9df9db88d62f56d40356d8afbbfe Binary files /dev/null and b/flagged/output/f8727dcfa3c59de0d873/image.webp differ diff --git a/flagged/output/fff73f12467314dce395/image.webp b/flagged/output/fff73f12467314dce395/image.webp new file mode 100644 index 0000000000000000000000000000000000000000..0b51dcf8c9e672660529ad1adea9afe37e5a4f08 Binary files /dev/null and b/flagged/output/fff73f12467314dce395/image.webp differ diff --git a/get_data.py b/get_data.py new file mode 100644 index 0000000000000000000000000000000000000000..e6e9414c933a64f1124f4eda6ec0faa8cd8ed2ee --- /dev/null +++ b/get_data.py @@ -0,0 +1,119 @@ +from pathlib import Path + +import torch +import torchvision +from torchvision.transforms import transforms, TrivialAugmentWide + +from configs.dataset_params import normalize_params +from dataset_classes.cub200 import CUB200Class +from dataset_classes.stanfordcars import StanfordCarsClass +from dataset_classes.travelingbirds import TravelingBirds + + +def get_data(dataset, crop = True, img_size=448): + batchsize = 16 + if dataset == "CUB2011": + train_transform = get_augmentation(0.1, img_size, True,not crop, True, True, normalize_params["CUB2011"]) + test_transform = get_augmentation(0.1, img_size, False, not crop, True, True, normalize_params["CUB2011"]) + train_dataset = CUB200Class(True, train_transform, crop) + test_dataset = CUB200Class(False, test_transform, crop) + elif dataset == "TravelingBirds": + train_transform = get_augmentation(0.1, img_size, True, not crop, True, True, normalize_params["TravelingBirds"]) + test_transform = get_augmentation(0.1, img_size, False, not crop, True, True, normalize_params["TravelingBirds"]) + train_dataset = TravelingBirds(True, train_transform, crop) + test_dataset = TravelingBirds(False, test_transform, crop) + + elif dataset == "StanfordCars": + train_transform = get_augmentation(0.1, img_size, True, True, True, True, normalize_params["StanfordCars"]) + test_transform = get_augmentation(0.1, img_size, False, True, True, True, normalize_params["StanfordCars"]) + train_dataset = StanfordCarsClass(True, train_transform) + test_dataset = StanfordCarsClass(False, test_transform) + elif dataset == "FGVCAircraft": + raise NotImplementedError + + elif dataset == "ImageNet": + # Defaults from the robustness package + if img_size != 224: + raise NotImplementedError("ImageNet is setup to only work with 224x224 images") + train_transform = transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ColorJitter( + brightness=0.1, + contrast=0.1, + saturation=0.1 + ), + transforms.ToTensor(), + Lighting(0.05, IMAGENET_PCA['eigval'], + IMAGENET_PCA['eigvec']) + ]) + """ + Standard training data augmentation for ImageNet-scale datasets: Random crop, + Random flip, Color Jitter, and Lighting Transform (see https://git.io/fhBOc) + """ + test_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + ]) + imgnet_root = Path.home()/ "tmp" /"Datasets"/ "imagenet" + train_dataset = torchvision.datasets.ImageNet(root=imgnet_root, split='train', transform=train_transform) + test_dataset = torchvision.datasets.ImageNet(root=imgnet_root, split='val', transform=test_transform) + batchsize = 64 + + train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batchsize, shuffle=True, num_workers=8) + test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batchsize, shuffle=False, num_workers=8) + return train_loader, test_loader + +def get_augmentation(jitter, size, training, random_center_crop, trivialAug, hflip, normalize): + augmentation = [] + if random_center_crop: + augmentation.append(transforms.Resize(size)) + else: + augmentation.append(transforms.Resize((size, size))) + if training: + if random_center_crop: + augmentation.append(transforms.RandomCrop(size, padding=4)) + else: + if random_center_crop: + augmentation.append(transforms.CenterCrop(size)) + if training: + if hflip: + augmentation.append(transforms.RandomHorizontalFlip()) + if jitter: + augmentation.append(transforms.ColorJitter(jitter, jitter, jitter)) + if trivialAug: + augmentation.append(TrivialAugmentWide()) + augmentation.append(transforms.ToTensor()) + augmentation.append(transforms.Normalize(**normalize)) + return transforms.Compose(augmentation) + +class Lighting(object): + """ + Lighting noise (see https://git.io/fhBOc) + """ + + def __init__(self, alphastd, eigval, eigvec): + self.alphastd = alphastd + self.eigval = eigval + self.eigvec = eigvec + + def __call__(self, img): + if self.alphastd == 0: + return img + + alpha = img.new().resize_(3).normal_(0, self.alphastd) + rgb = self.eigvec.type_as(img).clone() \ + .mul(alpha.view(1, 3).expand(3, 3)) \ + .mul(self.eigval.view(1, 3).expand(3, 3)) \ + .sum(1).squeeze() + + return img.add(rgb.view(3, 1, 1).expand_as(img)) +IMAGENET_PCA = { + 'eigval': torch.Tensor([0.2175, 0.0188, 0.0045]), + 'eigvec': torch.Tensor([ + [-0.5675, 0.7192, 0.4009], + [-0.5808, -0.0045, -0.8140], + [-0.5836, -0.6948, 0.4203], + ]) +} diff --git a/image_class_labels.txt b/image_class_labels.txt new file mode 100644 index 0000000000000000000000000000000000000000..e12c50e08d68eeb558fd85de2a8fd3e85fb93a7e --- /dev/null +++ b/image_class_labels.txt @@ -0,0 +1,800 @@ +49 1 +40 1 +31 1 +14 1 +93 2 +78 2 +102 2 +73 2 +142 3 +176 3 +123 3 +134 3 +185 4 +182 4 +202 4 +206 4 +262 5 +280 5 +270 5 +249 5 +293 6 +290 6 +308 6 +292 6 +361 7 +333 7 +349 7 +325 7 +400 8 +394 8 +406 8 +404 8 +441 9 +460 9 +480 9 +432 9 +511 10 +539 10 +523 10 +532 10 +598 11 +572 11 +574 11 +547 11 +619 12 +629 12 +621 12 +655 12 +689 13 +674 13 +718 13 +712 13 +742 14 +741 14 +776 14 +726 14 +822 15 +835 15 +801 15 +780 15 +862 16 +868 16 +887 16 +855 16 +947 17 +919 17 +939 17 +898 17 +957 18 +955 18 +989 18 +975 18 +1007 19 +1023 19 +1049 19 +1028 19 +1094 20 +1077 20 +1068 20 +1058 20 +1120 21 +1163 21 +1159 21 +1146 21 +1224 22 +1203 22 +1218 22 +1230 22 +1255 23 +1257 23 +1240 23 +1258 23 +1318 24 +1304 24 +1308 24 +1309 24 +1395 25 +1361 25 +1366 25 +1363 25 +1414 26 +1429 26 +1432 26 +1417 26 +1512 27 +1498 27 +1466 27 +1491 27 +1563 28 +1556 28 +1576 28 +1570 28 +1608 29 +1638 29 +1597 29 +1622 29 +1669 30 +1642 30 +1670 30 +1666 30 +1743 31 +1717 31 +1759 31 +1713 31 +1775 32 +1778 32 +1794 32 +1805 32 +1836 33 +1850 33 +1828 33 +1866 33 +1910 34 +1921 34 +1903 34 +1928 34 +1980 35 +1974 35 +1969 35 +1945 35 +2052 36 +2026 36 +2044 36 +1993 36 +2101 37 +2111 37 +2086 37 +2071 37 +2123 38 +2117 38 +2168 38 +2119 38 +2176 39 +2205 39 +2197 39 +2178 39 +2240 40 +2289 40 +2279 40 +2287 40 +2327 41 +2337 41 +2324 41 +2326 41 +2401 42 +2378 42 +2361 42 +2386 42 +2454 43 +2457 43 +2446 43 +2436 43 +2526 44 +2508 44 +2529 44 +2528 44 +2554 45 +2576 45 +2537 45 +2539 45 +2649 46 +2606 46 +2642 46 +2623 46 +2679 47 +2651 47 +2698 47 +2696 47 +2711 48 +2723 48 +2736 48 +2719 48 +2800 49 +2827 49 +2803 49 +2801 49 +2865 50 +2873 50 +2856 50 +2885 50 +2891 51 +2947 51 +2900 51 +2896 51 +2987 52 +2964 52 +2970 52 +2998 52 +3066 53 +3031 53 +3021 53 +3026 53 +3102 54 +3104 54 +3079 54 +3091 54 +3163 55 +3158 55 +3159 55 +3175 55 +3203 56 +3249 56 +3232 56 +3229 56 +3256 57 +3302 57 +3296 57 +3287 57 +3320 58 +3331 58 +3322 58 +3332 58 +3393 59 +3418 59 +3421 59 +3380 59 +3440 60 +3479 60 +3480 60 +3437 60 +3511 61 +3498 61 +3490 61 +3523 61 +3576 62 +3547 62 +3606 62 +3590 62 +3652 63 +3611 63 +3621 63 +3615 63 +3688 64 +3685 64 +3681 64 +3715 64 +3755 65 +3728 65 +3736 65 +3776 65 +3797 66 +3784 66 +3796 66 +3802 66 +3877 67 +3873 67 +3872 67 +3888 67 +3930 68 +3951 68 +3956 68 +3922 68 +3958 69 +3963 69 +3959 69 +4012 69 +4066 70 +4048 70 +4033 70 +4076 70 +4129 71 +4134 71 +4114 71 +4122 71 +4184 72 +4147 72 +4172 72 +4170 72 +4254 73 +4226 73 +4249 73 +4233 73 +4297 74 +4262 74 +4280 74 +4292 74 +4369 75 +4323 75 +4325 75 +4358 75 +4377 76 +4391 76 +4398 76 +4415 76 +4484 77 +4472 77 +4459 77 +4451 77 +4523 78 +4518 78 +4549 78 +4539 78 +4554 79 +4575 79 +4561 79 +4567 79 +4642 80 +4647 80 +4621 80 +4627 80 +4696 81 +4713 81 +4703 81 +4732 81 +4747 82 +4779 82 +4737 82 +4734 82 +4798 83 +4807 83 +4817 83 +4814 83 +4882 84 +4904 84 +4896 84 +4876 84 +4961 85 +4924 85 +4958 85 +4912 85 +4977 86 +5006 86 +4983 86 +4985 86 +5031 87 +5027 87 +5065 87 +5069 87 +5140 88 +5120 88 +5121 88 +5109 88 +5153 89 +5154 89 +5155 89 +5204 89 +5255 90 +5248 90 +5254 90 +5237 90 +5312 91 +5302 91 +5306 91 +5276 91 +5380 92 +5347 92 +5327 92 +5337 92 +5398 93 +5434 93 +5420 93 +5431 93 +5447 94 +5492 94 +5462 94 +5503 94 +5548 95 +5522 95 +5506 95 +5527 95 +5592 96 +5584 96 +5607 96 +5568 96 +5658 97 +5676 97 +5679 97 +5672 97 +5742 98 +5690 98 +5687 98 +5719 98 +5784 99 +5766 99 +5791 99 +5763 99 +5831 100 +5817 100 +5858 100 +5814 100 +5903 101 +5904 101 +5889 101 +5901 101 +5924 102 +5933 102 +5950 102 +5960 102 +6033 103 +5983 103 +6022 103 +5995 103 +6089 104 +6094 104 +6066 104 +6084 104 +6111 105 +6139 105 +6099 105 +6133 105 +6170 106 +6177 106 +6186 106 +6146 106 +6213 107 +6232 107 +6254 107 +6235 107 +6264 108 +6269 108 +6307 108 +6279 108 +6349 109 +6351 109 +6330 109 +6382 109 +6398 110 +6418 110 +6385 110 +6441 110 +6490 111 +6493 111 +6452 111 +6476 111 +6528 112 +6542 112 +6512 112 +6548 112 +6580 113 +6592 113 +6568 113 +6591 113 +6620 114 +6631 114 +6636 114 +6672 114 +6684 115 +6687 115 +6681 115 +6673 115 +6762 116 +6790 116 +6764 116 +6760 116 +6797 117 +6828 117 +6825 117 +6799 117 +6873 118 +6864 118 +6902 118 +6909 118 +6914 119 +6937 119 +6949 119 +6931 119 +6993 120 +7010 120 +6988 120 +7026 120 +7038 121 +7044 121 +7037 121 +7071 121 +7095 122 +7103 122 +7136 122 +7138 122 +7194 123 +7203 123 +7155 123 +7175 123 +7219 124 +7268 124 +7211 124 +7220 124 +7321 125 +7311 125 +7315 125 +7290 125 +7332 126 +7355 126 +7363 126 +7330 126 +7399 127 +7407 127 +7442 127 +7395 127 +7506 128 +7490 128 +7491 128 +7462 128 +7525 129 +7558 129 +7542 129 +7530 129 +7611 130 +7582 130 +7626 130 +7574 130 +7633 131 +7666 131 +7660 131 +7685 131 +7687 132 +7704 132 +7723 132 +7693 132 +7748 133 +7788 133 +7780 133 +7777 133 +7849 134 +7863 134 +7847 134 +7832 134 +7896 135 +7911 135 +7915 135 +7908 135 +7962 136 +7934 136 +7945 136 +7957 136 +8028 137 +8018 137 +7992 137 +7990 137 +8084 138 +8089 138 +8090 138 +8104 138 +8159 139 +8141 139 +8150 139 +8120 139 +8177 140 +8202 140 +8185 140 +8205 140 +8258 141 +8264 141 +8233 141 +8237 141 +8323 142 +8335 142 +8327 142 +8302 142 +8392 143 +8358 143 +8352 143 +8361 143 +8459 144 +8444 144 +8440 144 +8462 144 +8491 145 +8514 145 +8518 145 +8494 145 +8536 146 +8570 146 +8530 146 +8542 146 +8626 147 +8611 147 +8630 147 +8631 147 +8700 148 +8701 148 +8679 148 +8644 148 +8714 149 +8762 149 +8733 149 +8717 149 +8821 150 +8780 150 +8784 150 +8781 150 +8869 151 +8860 151 +8827 151 +8843 151 +8924 152 +8922 152 +8900 152 +8887 152 +8955 153 +8946 153 +8977 153 +8978 153 +9041 154 +9003 154 +9027 154 +9024 154 +9076 155 +9055 155 +9100 155 +9068 155 +9115 156 +9150 156 +9162 156 +9160 156 +9225 157 +9182 157 +9193 157 +9185 157 +9272 158 +9261 158 +9237 158 +9287 158 +9326 159 +9307 159 +9346 159 +9321 159 +9373 160 +9402 160 +9364 160 +9392 160 +9443 161 +9421 161 +9464 161 +9429 161 +9478 162 +9476 162 +9530 162 +9475 162 +9560 163 +9576 163 +9574 163 +9569 163 +9621 164 +9627 164 +9610 164 +9599 164 +9704 165 +9679 165 +9710 165 +9653 165 +9769 166 +9712 166 +9717 166 +9745 166 +9794 167 +9787 167 +9828 167 +9819 167 +9873 168 +9861 168 +9874 168 +9851 168 +9941 169 +9931 169 +9913 169 +9889 169 +9997 170 +10003 170 +9981 170 +9994 170 +10015 171 +10064 171 +10056 171 +10045 171 +10111 172 +10107 172 +10104 172 +10117 172 +10180 173 +10181 173 +10166 173 +10134 173 +10213 174 +10197 174 +10210 174 +10200 174 +10264 175 +10286 175 +10277 175 +10275 175 +10312 176 +10328 176 +10334 176 +10349 176 +10403 177 +10382 177 +10405 177 +10393 177 +10475 178 +10428 178 +10470 178 +10462 178 +10484 179 +10522 179 +10541 179 +10507 179 +10565 180 +10586 180 +10564 180 +10597 180 +10606 181 +10610 181 +10636 181 +10650 181 +10669 182 +10718 182 +10719 182 +10716 182 +10740 183 +10727 183 +10723 183 +10775 183 +10828 184 +10785 184 +10832 184 +10790 184 +10878 185 +10884 185 +10846 185 +10879 185 +10920 186 +10956 186 +10959 186 +10913 186 +10982 187 +10992 187 +10967 187 +10977 187 +11027 188 +11025 188 +11028 188 +11047 188 +11072 189 +11115 189 +11080 189 +11073 189 +11185 190 +11169 190 +11179 190 +11189 190 +11209 191 +11244 191 +11204 191 +11240 191 +11266 192 +11265 192 +11291 192 +11284 192 +11325 193 +11346 193 +11360 193 +11321 193 +11429 194 +11376 194 +11412 194 +11375 194 +11439 195 +11459 195 +11484 195 +11482 195 +11500 196 +11509 196 +11545 196 +11494 196 +11573 197 +11579 197 +11556 197 +11577 197 +11627 198 +11634 198 +11628 198 +11637 198 +11678 199 +11707 199 +11699 199 +11728 199 +11775 200 +11778 200 +11765 200 +11786 200 diff --git a/load_model.py b/load_model.py new file mode 100644 index 0000000000000000000000000000000000000000..c6bf1bae10cf979cb3a8bacac149cacc777edd34 --- /dev/null +++ b/load_model.py @@ -0,0 +1,52 @@ +from argparse import ArgumentParser +from pathlib import Path + +import torch + +from architectures.model_mapping import get_model +from configs.dataset_params import dataset_constants +from evaluation.qsenn_metrics import eval_model_on_all_qsenn_metrics +from get_data import get_data + +def extract_sel_mean_std_bias_assignemnt(state_dict): + feature_sel = state_dict["linear.selection"] + #feature_sel = selection + weight_at_selection = state_dict["linear.layer.weight"] + mean = state_dict["linear.mean"] + std = state_dict["linear.std"] + bias = state_dict["linear.layer.bias"] + return feature_sel, weight_at_selection, mean, std, bias + + +def eval_model(dataset, arch,seed=123456, model_type="qsenn",crop = True, n_features = 50, n_per_class=5, img_size=448, reduced_strides=False, folder = None): + n_classes = dataset_constants[dataset]["num_classes"] + train_loader, test_loader = get_data(dataset, crop=False, img_size=img_size) + model = get_model(arch, n_classes, reduced_strides) + if folder is None: + folder = Path.home() / f"tmp/{arch}/{dataset}/{seed}/" + print(folder) + model.load_state_dict(torch.load(folder / "Trained_DenseModel.pth"))#REMOVE + state_dict = torch.load(folder / f"{model_type}_{n_features}_{n_per_class}_FinetunedModel.pth") + selection= torch.load(folder / f"SlDD_Selection_50.pt") + state_dict['linear.selection']=selection + print(state_dict.keys()) + feature_sel, sparse_layer, current_mean, current_std, bias_sparse = extract_sel_mean_std_bias_assignemnt(state_dict) + model.set_model_sldd(feature_sel, sparse_layer, current_mean, current_std, bias_sparse) + model.load_state_dict(state_dict) + print(model) + metrics_finetuned = eval_model_on_all_qsenn_metrics(model, test_loader, train_loader) + +if __name__ == '__main__': + parser = ArgumentParser() + parser.add_argument('--dataset', default="CUB2011", type=str, help='dataset name', choices=["CUB2011", "ImageNet", "TravelingBirds", "StanfordCars"]) + parser.add_argument('--arch', default="resnet50", type=str, help='Backbone Feature Extractor', choices=["resnet50", "resnet18"]) + parser.add_argument('--model_type', default="qsenn", type=str, help='Type of Model', choices=["qsenn", "sldd"]) + parser.add_argument('--seed', default=123456, type=int, help='seed, used for naming the folder and random processes. Could be useful to set to have multiple finetune runs (e.g. Q-SENN and SLDD) on the same dense model') # 769567, 552629 + parser.add_argument('--cropGT', default=False, type=bool, + help='Whether to crop CUB/TravelingBirds based on GT Boundaries') + parser.add_argument('--n_features', default=50, type=int, help='How many features to select') #769567 + parser.add_argument('--n_per_class', default=5, type=int, help='How many features to assign to each class') + parser.add_argument('--img_size', default=448, type=int, help='Image size') + parser.add_argument('--reduced_strides', default=False, type=bool, help='Whether to use reduced strides for resnets') + args = parser.parse_args() + eval_model(args.dataset, args.arch, args.seed, args.model_type,args.cropGT, args.n_features, args.n_per_class, args.img_size, args.reduced_strides) \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000000000000000000000000000000000000..5a340862967c2c1d8befc1eff79bd00122223f93 --- /dev/null +++ b/main.py @@ -0,0 +1,79 @@ +import os +from argparse import ArgumentParser +from pathlib import Path + +import numpy as np +import torch +from tqdm import trange + +from FeatureDiversityLoss import FeatureDiversityLoss +from architectures.model_mapping import get_model +from configs.architecture_params import architecture_params +from configs.dataset_params import dataset_constants +from evaluation.qsenn_metrics import eval_model_on_all_qsenn_metrics +from finetuning.map_function import finetune +from get_data import get_data +from saving.logging import Tee +from saving.utils import json_save +from train import train, test +from training.optim import get_optimizer, get_scheduler_for_model + + +def main(dataset, arch,seed=None, model_type="qsenn", do_dense=True,crop = True, n_features = 50, n_per_class=5, img_size=448, reduced_strides=False): + # create random seed, if seed is None + if seed is None: + seed = np.random.randint(0, 1000000) + np.random.seed(seed) + torch.manual_seed(seed) + dataset_key = dataset + if crop: + assert dataset in ["CUB2011","TravelingBirds"] + dataset_key += "_crop" + log_dir = Path.home()/f"tmp/{arch}/{dataset_key}/{seed}/" + log_dir.mkdir(parents=True, exist_ok=True) + tee = Tee(log_dir / "log.txt") # save log to file + n_classes = dataset_constants[dataset]["num_classes"] + train_loader, test_loader = get_data(dataset, crop=crop, img_size=img_size) + model = get_model(arch, n_classes, reduced_strides) + fdl = FeatureDiversityLoss(architecture_params[arch]["beta"], model.linear) + OptimizationSchedule = get_scheduler_for_model(model_type, dataset) + optimizer, schedule, dense_epochs =get_optimizer(model, OptimizationSchedule) + if not os.path.exists(log_dir / "Trained_DenseModel.pth"): + if do_dense: + for epoch in trange(dense_epochs): + model = train(model, train_loader, optimizer, fdl, epoch) + schedule.step() + if epoch % 5 == 0: + test(model, test_loader,epoch) + else: + print("Using pretrained model, only makes sense for ImageNet") + torch.save(model.state_dict(), os.path.join(log_dir, f"Trained_DenseModel.pth")) + else: + model.load_state_dict(torch.load(log_dir / "Trained_DenseModel.pth")) + if not os.path.exists( log_dir/f"Results_DenseModel.json"): + metrics_dense = eval_model_on_all_qsenn_metrics(model, test_loader, train_loader) + json_save(os.path.join(log_dir, f"Results_DenseModel.json"), metrics_dense) + final_model = finetune(model_type, model, train_loader, test_loader, log_dir, n_classes, seed, architecture_params[arch]["beta"], OptimizationSchedule, n_per_class, n_features) + torch.save(final_model.state_dict(), os.path.join(log_dir,f"{model_type}_{n_features}_{n_per_class}_FinetunedModel.pth")) + metrics_finetuned = eval_model_on_all_qsenn_metrics(final_model, test_loader, train_loader) + json_save(os.path.join(log_dir, f"Results_{model_type}_{n_features}_{n_per_class}_FinetunedModel.json"), metrics_finetuned) + print("Done") + pass + + + +if __name__ == '__main__': + parser = ArgumentParser() + parser.add_argument('--dataset', default="CUB2011", type=str, help='dataset name', choices=["CUB2011", "ImageNet", "TravelingBirds", "StanfordCars"]) + parser.add_argument('--arch', default="resnet50", type=str, help='Backbone Feature Extractor', choices=["resnet50", "resnet18"]) + parser.add_argument('--model_type', default="qsenn", type=str, help='Type of Model', choices=["qsenn", "sldd"]) + parser.add_argument('--seed', default=None, type=int, help='seed, used for naming the folder and random processes. Could be useful to set to have multiple finetune runs (e.g. Q-SENN and SLDD) on the same dense model') # 769567, 552629 + parser.add_argument('--do_dense', default=True, type=bool, help='whether to train dense model. Should be true for all datasets except (maybe) ImageNet') + parser.add_argument('--cropGT', default=False, type=bool, + help='Whether to crop CUB/TravelingBirds based on GT Boundaries') + parser.add_argument('--n_features', default=50, type=int, help='How many features to select') #769567 + parser.add_argument('--n_per_class', default=5, type=int, help='How many features to assign to each class') + parser.add_argument('--img_size', default=448, type=int, help='Image size') + parser.add_argument('--reduced_strides', default=False, type=bool, help='Whether to use reduced strides for resnets') + args = parser.parse_args() + main(args.dataset, args.arch, args.seed, args.model_type, args.do_dense,args.cropGT, args.n_features, args.n_per_class, args.img_size, args.reduced_strides) diff --git a/options/1.jpg b/options/1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c2dfc949ba8c2e415d7fa4a304aa62e1e2cbbec2 Binary files /dev/null and b/options/1.jpg differ diff --git a/options/10.jpg b/options/10.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9128300ef0e8be31ec1b2b32329c2d5f8d9a38bf Binary files /dev/null and b/options/10.jpg differ diff --git a/options/100.jpg b/options/100.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1abf70eca3103e1d298af7c930840bab36865fde Binary files /dev/null and b/options/100.jpg differ diff --git a/options/101.jpg b/options/101.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f9990bf2ad8802f8c06c5a26fd1994bd60653e47 Binary files /dev/null and b/options/101.jpg differ diff --git a/options/102.jpg b/options/102.jpg new file mode 100644 index 0000000000000000000000000000000000000000..55e46092696f966dfa9822d38a9ef9409cfadbfb Binary files /dev/null and b/options/102.jpg differ diff --git a/options/103.jpg b/options/103.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e28e9726f907fc8d6506e35d643af97c5ee0e8e3 Binary files /dev/null and b/options/103.jpg differ diff --git a/options/104.jpg b/options/104.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d51c00f35f025bec9275f08cbe0fe1deebea4a56 Binary files /dev/null and b/options/104.jpg differ diff --git a/options/105.jpg b/options/105.jpg new file mode 100644 index 0000000000000000000000000000000000000000..92f6cca9d8894c7bbf5652b4f6454ae8e16eb2ee Binary files /dev/null and b/options/105.jpg differ diff --git a/options/106.jpg b/options/106.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4e5dd67235b5af3ccf0a3c3b0e8e7c8a79e610b8 Binary files /dev/null and b/options/106.jpg differ diff --git a/options/107.jpg b/options/107.jpg new file mode 100644 index 0000000000000000000000000000000000000000..419d83508f169914c86584b66dddd85a3f453ecd Binary files /dev/null and b/options/107.jpg differ diff --git a/options/108.jpg b/options/108.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f790970d3e40e5d01604ad5883634e9faf436cf1 Binary files /dev/null and b/options/108.jpg differ diff --git a/options/109.jpg b/options/109.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c2fdeb31f6a0b333ed271407faabe11816961bc3 Binary files /dev/null and b/options/109.jpg differ diff --git a/options/11.jpg b/options/11.jpg new file mode 100644 index 0000000000000000000000000000000000000000..02319e4863b4eef98d1e7a4e9c73b8fba243be8b Binary files /dev/null and b/options/11.jpg differ diff --git a/options/110.jpg b/options/110.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3fa7a74485cb5dec6f6bd1dee5d7ba88634802bb Binary files /dev/null and b/options/110.jpg differ diff --git a/options/111.jpg b/options/111.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6d56e4b5e5cee49cfff0318387e34665e4f41372 Binary files /dev/null and b/options/111.jpg differ diff --git a/options/112.jpg b/options/112.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1af1baa92227371b174e9ee76589b8b800812291 Binary files /dev/null and b/options/112.jpg differ diff --git a/options/113.jpg b/options/113.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ae07da448ffe39b40c46d1d6cf571063a1b2286d Binary files /dev/null and b/options/113.jpg differ diff --git a/options/114.jpg b/options/114.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0197dd58fc82012b9d4c6e886c81be87db159ac7 Binary files /dev/null and b/options/114.jpg differ diff --git a/options/115.jpg b/options/115.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2e0bf7a45ad4c6e8c25d987b2881c9487f912ec4 Binary files /dev/null and b/options/115.jpg differ diff --git a/options/116.jpg b/options/116.jpg new file mode 100644 index 0000000000000000000000000000000000000000..395f9e38a5e90b79ce6046df53ac48799cd7a5b7 Binary files /dev/null and b/options/116.jpg differ diff --git a/options/117.jpg b/options/117.jpg new file mode 100644 index 0000000000000000000000000000000000000000..07a6e800f9ffa1445d9bcc4d7feae9889217acce Binary files /dev/null and b/options/117.jpg differ diff --git a/options/118.jpg b/options/118.jpg new file mode 100644 index 0000000000000000000000000000000000000000..086e5f58524de7d8599c695a0473575b7f088f58 Binary files /dev/null and b/options/118.jpg differ diff --git a/options/119.jpg b/options/119.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c82cc8c29bd8ef4c96aebdd00234988926c4443e Binary files /dev/null and b/options/119.jpg differ diff --git a/options/12.jpg b/options/12.jpg new file mode 100644 index 0000000000000000000000000000000000000000..649ec5df8d8003db7b470e2785fdd64a78845604 Binary files /dev/null and b/options/12.jpg differ diff --git a/options/120.jpg b/options/120.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5dae7ae409d22da721bee49479a950a9c12b2cc3 Binary files /dev/null and b/options/120.jpg differ diff --git a/options/121.jpg b/options/121.jpg new file mode 100644 index 0000000000000000000000000000000000000000..be9e32339efd3d0a00c1edfcdacaa95418078d36 Binary files /dev/null and b/options/121.jpg differ diff --git a/options/122.jpg b/options/122.jpg new file mode 100644 index 0000000000000000000000000000000000000000..867ce90fd7b1538a148aa179da9ad4f6b482e0d3 Binary files /dev/null and b/options/122.jpg differ diff --git a/options/123.jpg b/options/123.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9a82516bedd17166777b062067ac929cf5ea6655 Binary files /dev/null and b/options/123.jpg differ diff --git a/options/124.jpg b/options/124.jpg new file mode 100644 index 0000000000000000000000000000000000000000..acca5bfc848a4e3bd49a12b4b145cd07320fd675 Binary files /dev/null and b/options/124.jpg differ diff --git a/options/125.jpg b/options/125.jpg new file mode 100644 index 0000000000000000000000000000000000000000..926b6520d595ab3977782cb30f2f1d0b6bfc3a0f Binary files /dev/null and b/options/125.jpg differ diff --git a/options/126.jpg b/options/126.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1f8eb5e7d186cd7b73bffb1d62ab38c9ede3372d Binary files /dev/null and b/options/126.jpg differ diff --git a/options/127.jpg b/options/127.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a4b6beae9e3d50caa664b919bb9ead9c6f8a6a3e Binary files /dev/null and b/options/127.jpg differ diff --git a/options/128.jpg b/options/128.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a17a8a028c3c92b1cb27342a6ea68a08a815d631 Binary files /dev/null and b/options/128.jpg differ diff --git a/options/129.jpg b/options/129.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1cc5fbf4c835bd02c34318299874673af3b20bc5 Binary files /dev/null and b/options/129.jpg differ diff --git a/options/13.jpg b/options/13.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d3aa3719b826ad4d528b6be1897a4cefa05bf6a3 Binary files /dev/null and b/options/13.jpg differ diff --git a/options/130.jpg b/options/130.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c9d2eb2efbf4ee0498a834e0672b2956fae90626 Binary files /dev/null and b/options/130.jpg differ diff --git a/options/131.jpg b/options/131.jpg new file mode 100644 index 0000000000000000000000000000000000000000..99a79ec515c13bcb48df0f95c30d1666cda5d8cc Binary files /dev/null and b/options/131.jpg differ diff --git a/options/132.jpg b/options/132.jpg new file mode 100644 index 0000000000000000000000000000000000000000..df7b6b0a4fa1bc548152717c6f8e4165770401ba Binary files /dev/null and b/options/132.jpg differ diff --git a/options/133.jpg b/options/133.jpg new file mode 100644 index 0000000000000000000000000000000000000000..71cfa346a8edbeb8ca3e2ab22284eb8a651561a4 Binary files /dev/null and b/options/133.jpg differ diff --git a/options/134.jpg b/options/134.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d29f2c16f537898d49b98dea270337fa2fddb986 Binary files /dev/null and b/options/134.jpg differ diff --git a/options/135.jpg b/options/135.jpg new file mode 100644 index 0000000000000000000000000000000000000000..98438cc3b787088392009206cfb16febfb45ebd1 Binary files /dev/null and b/options/135.jpg differ diff --git a/options/136.jpg b/options/136.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e6afa5325ce97da05852dd3ceedb490bdfb64087 Binary files /dev/null and b/options/136.jpg differ diff --git a/options/137.jpg b/options/137.jpg new file mode 100644 index 0000000000000000000000000000000000000000..05a4bd872804fc4d2884f590a47ac7e73c2aacb4 Binary files /dev/null and b/options/137.jpg differ diff --git a/options/138.jpg b/options/138.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a74b3f8922bc6d996b01803c49660e44d6d11218 Binary files /dev/null and b/options/138.jpg differ diff --git a/options/139.jpg b/options/139.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a6ff9640d201072c0719b3ba3d1ebf157da4d4af Binary files /dev/null and b/options/139.jpg differ diff --git a/options/14.jpg b/options/14.jpg new file mode 100644 index 0000000000000000000000000000000000000000..31288ccdb97dd394f156ec5ea161e427c1085f08 Binary files /dev/null and b/options/14.jpg differ diff --git a/options/140.jpg b/options/140.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ed85dfc75a67ab797c41ff2a74896d1241a7070e Binary files /dev/null and b/options/140.jpg differ diff --git a/options/141.jpg b/options/141.jpg new file mode 100644 index 0000000000000000000000000000000000000000..df52545752dd755e4ea61363c27b85dee371961e Binary files /dev/null and b/options/141.jpg differ diff --git a/options/142.jpg b/options/142.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ecdc0f67eb52a02ee3ec766a281b23dcd07407a Binary files /dev/null and b/options/142.jpg differ diff --git a/options/143.jpg b/options/143.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c19e8174f1a80ceb56219a3adb6422cfdb57b3f1 Binary files /dev/null and b/options/143.jpg differ diff --git a/options/144.jpg b/options/144.jpg new file mode 100644 index 0000000000000000000000000000000000000000..597f176a840800d60e0839ab7d0cb32838ec9910 Binary files /dev/null and b/options/144.jpg differ diff --git a/options/145.jpg b/options/145.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d4a05d0eba4baed781a060a0c90c1413a8cf5ef7 Binary files /dev/null and b/options/145.jpg differ diff --git a/options/146.jpg b/options/146.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7d4f01077a81c96397c4c9c619d0717578cdfa20 Binary files /dev/null and b/options/146.jpg differ diff --git a/options/147.jpg b/options/147.jpg new file mode 100644 index 0000000000000000000000000000000000000000..28a74d5de3e6f9949f739dd8146cf4ddf18455a6 Binary files /dev/null and b/options/147.jpg differ diff --git a/options/148.jpg b/options/148.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c5cf338a6a9f98170ed4d99fea2ca68d9608a982 Binary files /dev/null and b/options/148.jpg differ diff --git a/options/149.jpg b/options/149.jpg new file mode 100644 index 0000000000000000000000000000000000000000..464a1a486d2a7b31c69b7008a1841cc6945641aa Binary files /dev/null and b/options/149.jpg differ diff --git a/options/15.jpg b/options/15.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a964c7ff37a23fc78bfb5cce1c0e7fd74cbcde10 Binary files /dev/null and b/options/15.jpg differ diff --git a/options/150.jpg b/options/150.jpg new file mode 100644 index 0000000000000000000000000000000000000000..99ee93bff54da48110aae3ba29bc549c4ffd86e9 Binary files /dev/null and b/options/150.jpg differ diff --git a/options/151.jpg b/options/151.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3a0fd5ffe8ce967ce06a2bcdc8361f6ae233d3fa Binary files /dev/null and b/options/151.jpg differ diff --git a/options/152.jpg b/options/152.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d55351880c8d115d3e1a20284fde83c09b9e3dcf Binary files /dev/null and b/options/152.jpg differ diff --git a/options/153.jpg b/options/153.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5b01949432fe858c27226d1df3d83556e623ab5a Binary files /dev/null and b/options/153.jpg differ diff --git a/options/154.jpg b/options/154.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7a89ae91092179785397846d9e7b07306f099376 Binary files /dev/null and b/options/154.jpg differ diff --git a/options/155.jpg b/options/155.jpg new file mode 100644 index 0000000000000000000000000000000000000000..94e06c113dce15405c3a7a911cf999ecdc0ef53a Binary files /dev/null and b/options/155.jpg differ diff --git a/options/156.jpg b/options/156.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f7616d7c233149d0af57bf7b0fa71e0974c7ba05 Binary files /dev/null and b/options/156.jpg differ diff --git a/options/157.jpg b/options/157.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c1a98bb74c19f6df7da8bbe8eefdf1d2914b9f23 Binary files /dev/null and b/options/157.jpg differ diff --git a/options/158.jpg b/options/158.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ab5feff811b885bec8f159a18df9ae34e89fed58 Binary files /dev/null and b/options/158.jpg differ diff --git a/options/159.jpg b/options/159.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fc732741275a2582c99ac198749a6e248b440c98 Binary files /dev/null and b/options/159.jpg differ diff --git a/options/16.jpg b/options/16.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3f48bfaa354b03b4e03cc0da9ee5aea03a684402 Binary files /dev/null and b/options/16.jpg differ diff --git a/options/160.jpg b/options/160.jpg new file mode 100644 index 0000000000000000000000000000000000000000..17c00c4ce91aad48d8bc6df938b83695de91117c Binary files /dev/null and b/options/160.jpg differ diff --git a/options/161.jpg b/options/161.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fd986bfa27027b572327cd0f21359996d60ad5fc Binary files /dev/null and b/options/161.jpg differ diff --git a/options/162.jpg b/options/162.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b5c3bb21b68ed1698f9164f5fe9642e3e9c0f7ba Binary files /dev/null and b/options/162.jpg differ diff --git a/options/163.jpg b/options/163.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c837e745051634a8e303b373fa00740cf7892384 Binary files /dev/null and b/options/163.jpg differ diff --git a/options/164.jpg b/options/164.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e9c73a64ef3e801c70fc73bf5413a4a53552ab17 Binary files /dev/null and b/options/164.jpg differ diff --git a/options/165.jpg b/options/165.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f83796e9090c3e27f7dcf92c054cdb960017b356 Binary files /dev/null and b/options/165.jpg differ diff --git a/options/166.jpg b/options/166.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5e188497ddad3f13d0f7ea123be0a3052fce0180 Binary files /dev/null and b/options/166.jpg differ diff --git a/options/167.jpg b/options/167.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6c019fa01b013e5250fb41c56b15526972530524 Binary files /dev/null and b/options/167.jpg differ diff --git a/options/168.jpg b/options/168.jpg new file mode 100644 index 0000000000000000000000000000000000000000..10766c74e772c3f1a268a790b89fa519ead24873 Binary files /dev/null and b/options/168.jpg differ diff --git a/options/169.jpg b/options/169.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f26750e0b2994664d9a57294fa6d576946ce8d3b Binary files /dev/null and b/options/169.jpg differ diff --git a/options/17.jpg b/options/17.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0fd6b7e4c164a76076ab0866b4afe7cdf547bd8b Binary files /dev/null and b/options/17.jpg differ diff --git a/options/170.jpg b/options/170.jpg new file mode 100644 index 0000000000000000000000000000000000000000..762e5d2d09f438604970d07671157e72f10039e7 Binary files /dev/null and b/options/170.jpg differ diff --git a/options/171.jpg b/options/171.jpg new file mode 100644 index 0000000000000000000000000000000000000000..445590a156d831f14aac280ab9621a0bd6e887b4 Binary files /dev/null and b/options/171.jpg differ diff --git a/options/172.jpg b/options/172.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dec3cdaf545cff651936e9f99d598d2d6e1ae56e Binary files /dev/null and b/options/172.jpg differ diff --git a/options/173.jpg b/options/173.jpg new file mode 100644 index 0000000000000000000000000000000000000000..59db051751aa3dfa36ecd141df12c3ca96769c81 Binary files /dev/null and b/options/173.jpg differ diff --git a/options/174.jpg b/options/174.jpg new file mode 100644 index 0000000000000000000000000000000000000000..aae8fe04d5365eb90bf0132471cf6069f25120dd Binary files /dev/null and b/options/174.jpg differ diff --git a/options/175.jpg b/options/175.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8cc85ecf0d3ca80025a5b01a6c46ec850f803dd7 Binary files /dev/null and b/options/175.jpg differ diff --git a/options/176.jpg b/options/176.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cfe560288bdc747badf2521f0f9c7c2dabc6cc4f Binary files /dev/null and b/options/176.jpg differ diff --git a/options/177.jpg b/options/177.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e9af850e45fd69d700443f3689b9939f34213ed9 Binary files /dev/null and b/options/177.jpg differ diff --git a/options/178.jpg b/options/178.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e7b2048c638ab672dfeaba07809b956a46f087eb Binary files /dev/null and b/options/178.jpg differ diff --git a/options/179.jpg b/options/179.jpg new file mode 100644 index 0000000000000000000000000000000000000000..461f4c244c72466e93347814b34c78abe3909e3a Binary files /dev/null and b/options/179.jpg differ diff --git a/options/18.jpg b/options/18.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4efad5910ef869fdf763aa5053159fffc2fb49d2 Binary files /dev/null and b/options/18.jpg differ diff --git a/options/180.jpg b/options/180.jpg new file mode 100644 index 0000000000000000000000000000000000000000..02cf850caf26e2efafce155a6e57a145acef0c32 Binary files /dev/null and b/options/180.jpg differ diff --git a/options/181.jpg b/options/181.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1b342ab0e909cce1f69d66d98f3ca8d33548faae Binary files /dev/null and b/options/181.jpg differ diff --git a/options/182.jpg b/options/182.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0f674fa7e6231c72e412d8759bebf32e5191f2f1 Binary files /dev/null and b/options/182.jpg differ diff --git a/options/183.jpg b/options/183.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5d6a65c3092c3f358b8ec20b029311128bb934e1 Binary files /dev/null and b/options/183.jpg differ diff --git a/options/184.jpg b/options/184.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f1e34fad419270a44ae6c313db3e83e281d67361 Binary files /dev/null and b/options/184.jpg differ diff --git a/options/185.jpg b/options/185.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8a7f61142891845802eb5655bbd96f0b9d646268 Binary files /dev/null and b/options/185.jpg differ diff --git a/options/186.jpg b/options/186.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c68711e69090669e5791f523766f21bbbc416345 Binary files /dev/null and b/options/186.jpg differ diff --git a/options/187.jpg b/options/187.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2993a5e378f117d31866353d30eaf9c5295cfc0a Binary files /dev/null and b/options/187.jpg differ diff --git a/options/188.jpg b/options/188.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c86a67745bd9f01b5f1e6a2ca3fbdcf083e56be2 Binary files /dev/null and b/options/188.jpg differ diff --git a/options/189.jpg b/options/189.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d2b404a79f65f49cc423f5b56ce41d335073e683 Binary files /dev/null and b/options/189.jpg differ diff --git a/options/19.jpg b/options/19.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2fbf68b547390ec6ea994154fac5124f2097fc34 Binary files /dev/null and b/options/19.jpg differ diff --git a/options/190.jpg b/options/190.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b51f507a1bbef53153ea971a02dd947ef7b15c01 Binary files /dev/null and b/options/190.jpg differ diff --git a/options/191.jpg b/options/191.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a19f39d2e5335b83b751633f4cae23a359044a69 Binary files /dev/null and b/options/191.jpg differ diff --git a/options/192.jpg b/options/192.jpg new file mode 100644 index 0000000000000000000000000000000000000000..76648564421ee4e214b0da10f80da0a8455a24e4 Binary files /dev/null and b/options/192.jpg differ diff --git a/options/193.jpg b/options/193.jpg new file mode 100644 index 0000000000000000000000000000000000000000..036b632d05a107a0036e185ba275c11a0ef14e8e Binary files /dev/null and b/options/193.jpg differ diff --git a/options/194.jpg b/options/194.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f6b10be34cdb302cf60e69b67845dc61bbde93ba Binary files /dev/null and b/options/194.jpg differ diff --git a/options/195.jpg b/options/195.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c0654a19aebea4e42da26f821ada3299341299af Binary files /dev/null and b/options/195.jpg differ diff --git a/options/196.jpg b/options/196.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5c929badd14f633330194df9fde26f60f6c2c4a5 Binary files /dev/null and b/options/196.jpg differ diff --git a/options/197.jpg b/options/197.jpg new file mode 100644 index 0000000000000000000000000000000000000000..515ecaaf6925177875f878a7fcd58ce751ccb07d Binary files /dev/null and b/options/197.jpg differ diff --git a/options/198.jpg b/options/198.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a2a452aed90a8e30f6a2f11a9b21a000faf9fa0d Binary files /dev/null and b/options/198.jpg differ diff --git a/options/199.jpg b/options/199.jpg new file mode 100644 index 0000000000000000000000000000000000000000..349a7bddc135cb3871801f3924a07e02061d8329 Binary files /dev/null and b/options/199.jpg differ diff --git a/options/2.jpg b/options/2.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1e50c7aee12a247593f569b434e081609214e102 Binary files /dev/null and b/options/2.jpg differ diff --git a/options/20.jpg b/options/20.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dfcabad349f57490cf5eea6012e637154d02e9ff Binary files /dev/null and b/options/20.jpg differ diff --git a/options/200.jpg b/options/200.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d873f8a3d0c7b46c3986de41d8d57e66ea0da2cf Binary files /dev/null and b/options/200.jpg differ diff --git a/options/21.jpg b/options/21.jpg new file mode 100644 index 0000000000000000000000000000000000000000..16528a8060e94a5e1ec32e579ad0aadbc0610770 Binary files /dev/null and b/options/21.jpg differ diff --git a/options/22.jpg b/options/22.jpg new file mode 100644 index 0000000000000000000000000000000000000000..abda65694cea897e35fdb305b1fefaed995abcd4 Binary files /dev/null and b/options/22.jpg differ diff --git a/options/23.jpg b/options/23.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b3fcfb27e7afe41d970cf616d6a7348d6f5ca496 Binary files /dev/null and b/options/23.jpg differ diff --git a/options/24.jpg b/options/24.jpg new file mode 100644 index 0000000000000000000000000000000000000000..67a59671bff25d05c353ed0f0979a1a8696a03d1 Binary files /dev/null and b/options/24.jpg differ diff --git a/options/25.jpg b/options/25.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6e415ed144a40432331b5d6b0eed78eace1763e5 Binary files /dev/null and b/options/25.jpg differ diff --git a/options/26.jpg b/options/26.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dd379823ea8e514099de756b9f9f2bf1aa241f18 Binary files /dev/null and b/options/26.jpg differ diff --git a/options/27.jpg b/options/27.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fae175b8ada9c5a423a7499bed13fbfd2a45b4b7 Binary files /dev/null and b/options/27.jpg differ diff --git a/options/28.jpg b/options/28.jpg new file mode 100644 index 0000000000000000000000000000000000000000..52ef54ad86706eea444f08bc974213a00e985ee4 Binary files /dev/null and b/options/28.jpg differ diff --git a/options/29.jpg b/options/29.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d3b60d3cbc77308500608c7080ff9ad3da45050f Binary files /dev/null and b/options/29.jpg differ diff --git a/options/3.jpg b/options/3.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2fe05e159ad57326fb0ee11a53e0ee90e7b813d0 Binary files /dev/null and b/options/3.jpg differ diff --git a/options/30.jpg b/options/30.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8cab9837348234f3336fec12298962400dc68b57 Binary files /dev/null and b/options/30.jpg differ diff --git a/options/31.jpg b/options/31.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cb3f1ebeafcfef5f41c0ab997f7e08510b54a666 Binary files /dev/null and b/options/31.jpg differ diff --git a/options/32.jpg b/options/32.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a6851774a8216c2cec06b26ca022ccea2480b6af Binary files /dev/null and b/options/32.jpg differ diff --git a/options/33.jpg b/options/33.jpg new file mode 100644 index 0000000000000000000000000000000000000000..468661513e27c7a1d3fec55527f8e81c22d32c2e Binary files /dev/null and b/options/33.jpg differ diff --git a/options/34.jpg b/options/34.jpg new file mode 100644 index 0000000000000000000000000000000000000000..93eb7b401f57bdced9eee1330c54b0c10b6f4b1b Binary files /dev/null and b/options/34.jpg differ diff --git a/options/35.jpg b/options/35.jpg new file mode 100644 index 0000000000000000000000000000000000000000..54993b2ee0cf71db491c7080f2b242677893c0f3 Binary files /dev/null and b/options/35.jpg differ diff --git a/options/36.jpg b/options/36.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b59aaa39322a4d8e8ad007d41b25d53c8a6fb216 Binary files /dev/null and b/options/36.jpg differ diff --git a/options/37.jpg b/options/37.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6504b6a06f15a716eec448868b272c1924c78a72 Binary files /dev/null and b/options/37.jpg differ diff --git a/options/38.jpg b/options/38.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fcd1b546f73f3b866641d1db2a40cb07d3b93734 Binary files /dev/null and b/options/38.jpg differ diff --git a/options/39.jpg b/options/39.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8b75cf0c56b776022e61034fd24c1f726e79f14a Binary files /dev/null and b/options/39.jpg differ diff --git a/options/4.jpg b/options/4.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b65f0f1e93a4e63a37c5ed57bfe1ec08d57337b2 Binary files /dev/null and b/options/4.jpg differ diff --git a/options/40.jpg b/options/40.jpg new file mode 100644 index 0000000000000000000000000000000000000000..da7934eae385faf58a4db561f0f1e62548bc7de0 Binary files /dev/null and b/options/40.jpg differ diff --git a/options/41.jpg b/options/41.jpg new file mode 100644 index 0000000000000000000000000000000000000000..719596426af69d30122ab6b6c2cc3520b9f07fe0 Binary files /dev/null and b/options/41.jpg differ diff --git a/options/42.jpg b/options/42.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9b09bc12de508abd2af21da926b9affdee4e9385 Binary files /dev/null and b/options/42.jpg differ diff --git a/options/43.jpg b/options/43.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c3be08f3efd1bcebe102d33ba9ed0a9f1ba1a4a6 Binary files /dev/null and b/options/43.jpg differ diff --git a/options/44.jpg b/options/44.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6d2d5515a6fe6124d94bfb83425acbd697b773c2 Binary files /dev/null and b/options/44.jpg differ diff --git a/options/45.jpg b/options/45.jpg new file mode 100644 index 0000000000000000000000000000000000000000..41ccacfa26daf8a8be6b6b3a902166c5359f2809 Binary files /dev/null and b/options/45.jpg differ diff --git a/options/46.jpg b/options/46.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3b485054793d674d6aeb8ce07bd1eef8d037bf00 Binary files /dev/null and b/options/46.jpg differ diff --git a/options/47.jpg b/options/47.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1a3163e25f02254e5b9cb24230cf9847951bec75 Binary files /dev/null and b/options/47.jpg differ diff --git a/options/48.jpg b/options/48.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ca87fb8968be5809a768407365ce7b5548886c0d Binary files /dev/null and b/options/48.jpg differ diff --git a/options/49.jpg b/options/49.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0055bb0c36c44e3d2f7c20149dd93e7515b15419 Binary files /dev/null and b/options/49.jpg differ diff --git a/options/5.jpg b/options/5.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d3082bb4cb6db2c8b1739f905bad78b03ff918ff Binary files /dev/null and b/options/5.jpg differ diff --git a/options/50.jpg b/options/50.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f5ca675cb5a317d13bf3516c3eed1099b4912e63 Binary files /dev/null and b/options/50.jpg differ diff --git a/options/51.jpg b/options/51.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0c545589a17db0d7ccd1093670c4e6b1b0424fb0 Binary files /dev/null and b/options/51.jpg differ diff --git a/options/52.jpg b/options/52.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a6fbe4ff94b09e7c08a4eaaa58b374cfdb07e93b Binary files /dev/null and b/options/52.jpg differ diff --git a/options/53.jpg b/options/53.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8802d35cdf7876468fdfbfbbdc9dc983061f2682 Binary files /dev/null and b/options/53.jpg differ diff --git a/options/54.jpg b/options/54.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cf46ea543247faa07b299fc68dc3cb3d55b49f9e Binary files /dev/null and b/options/54.jpg differ diff --git a/options/55.jpg b/options/55.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5b8981799a4d5bd8870e48df491e58f4b49252fc Binary files /dev/null and b/options/55.jpg differ diff --git a/options/56.jpg b/options/56.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4dcec410b5611eba732076c409e08e108673caf0 Binary files /dev/null and b/options/56.jpg differ diff --git a/options/57.jpg b/options/57.jpg new file mode 100644 index 0000000000000000000000000000000000000000..48e8168948f24bbe995eb52abca0eb95f6e6a171 Binary files /dev/null and b/options/57.jpg differ diff --git a/options/58.jpg b/options/58.jpg new file mode 100644 index 0000000000000000000000000000000000000000..87af1ca3b2ca33573e7af08440b1e0b8c394bf43 Binary files /dev/null and b/options/58.jpg differ diff --git a/options/59.jpg b/options/59.jpg new file mode 100644 index 0000000000000000000000000000000000000000..378e2f58faea8951e152ff013337293238b4edb9 Binary files /dev/null and b/options/59.jpg differ diff --git a/options/6.jpg b/options/6.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8d86f2b10ff0bec4a2b2a91ef8398be7da0f9f1c Binary files /dev/null and b/options/6.jpg differ diff --git a/options/60.jpg b/options/60.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e2f5880be276eb3aeb2e3b6314c6189abd02d536 Binary files /dev/null and b/options/60.jpg differ diff --git a/options/61.jpg b/options/61.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8eb12c33c78adcf2295a82a0984bc0f724308687 Binary files /dev/null and b/options/61.jpg differ diff --git a/options/62.jpg b/options/62.jpg new file mode 100644 index 0000000000000000000000000000000000000000..21fdcdb894231dcacb22cf40f8f3ba749d5ac4e4 Binary files /dev/null and b/options/62.jpg differ diff --git a/options/63.jpg b/options/63.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a6f253c8118f609b50ee9e489dc64ff5a59c1fe9 Binary files /dev/null and b/options/63.jpg differ diff --git a/options/64.jpg b/options/64.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b777f552fb1d5d0b6cc4a0246f6e918b3f0c0251 Binary files /dev/null and b/options/64.jpg differ diff --git a/options/65.jpg b/options/65.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6021a9ae79439bb8a6fa8af4c493765839064845 Binary files /dev/null and b/options/65.jpg differ diff --git a/options/66.jpg b/options/66.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2eeca5fbdd4772ee8dbabe4fa4a610e491537859 Binary files /dev/null and b/options/66.jpg differ diff --git a/options/67.jpg b/options/67.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9ccf363bcf4045ffa2e422000f6b4782a27d3cad Binary files /dev/null and b/options/67.jpg differ diff --git a/options/68.jpg b/options/68.jpg new file mode 100644 index 0000000000000000000000000000000000000000..027a2824a29dc0ae5570e832c186ce573bce65dd Binary files /dev/null and b/options/68.jpg differ diff --git a/options/69.jpg b/options/69.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d0405cfd7f131f238b43194704e9259116e16ed5 Binary files /dev/null and b/options/69.jpg differ diff --git a/options/7.jpg b/options/7.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c2ae594a78a1b10abc23603f401c13c50ba12196 Binary files /dev/null and b/options/7.jpg differ diff --git a/options/70.jpg b/options/70.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b6e4a08876ea52d321f90038b357734839574b42 Binary files /dev/null and b/options/70.jpg differ diff --git a/options/71.jpg b/options/71.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6b83fde8eb8be01b514d972d4dd92be636d1b37a Binary files /dev/null and b/options/71.jpg differ diff --git a/options/72.jpg b/options/72.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1e31d2b2b9a25f4d08abd093fad2009483de4cc8 Binary files /dev/null and b/options/72.jpg differ diff --git a/options/73.jpg b/options/73.jpg new file mode 100644 index 0000000000000000000000000000000000000000..70275f098a6ea4a9067edc61d6489fc16563d1ab Binary files /dev/null and b/options/73.jpg differ diff --git a/options/74.jpg b/options/74.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4edf4f7ea66796abc406d55c2ce769ddf5e6ed0e Binary files /dev/null and b/options/74.jpg differ diff --git a/options/75.jpg b/options/75.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e78ad5d69de542c2f41680e3c2645b84afa3c962 Binary files /dev/null and b/options/75.jpg differ diff --git a/options/76.jpg b/options/76.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9b396037c7208c56904c50a8b66007a9e1e7fa53 Binary files /dev/null and b/options/76.jpg differ diff --git a/options/77.jpg b/options/77.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b2026991876777d9965f4b97031f4a451df6b816 Binary files /dev/null and b/options/77.jpg differ diff --git a/options/78.jpg b/options/78.jpg new file mode 100644 index 0000000000000000000000000000000000000000..65fdf97837a854b347dddefbd7c8202638897501 Binary files /dev/null and b/options/78.jpg differ diff --git a/options/79.jpg b/options/79.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c55682fb6e7a355cd3a3ed3874109cfe7379662d Binary files /dev/null and b/options/79.jpg differ diff --git a/options/8.jpg b/options/8.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d88a27dcc164841a1420e03a059ce1ffc9e0d975 Binary files /dev/null and b/options/8.jpg differ diff --git a/options/80.jpg b/options/80.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cfafec31d92cbc188d15b5f1fd08aee0d0ea49fa Binary files /dev/null and b/options/80.jpg differ diff --git a/options/81.jpg b/options/81.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a5a4607b802d272f409d6dd89848a17fb0d8f74a Binary files /dev/null and b/options/81.jpg differ diff --git a/options/82.jpg b/options/82.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ca36b822216aeb1e4700dbe200bbc362a5218daa Binary files /dev/null and b/options/82.jpg differ diff --git a/options/83.jpg b/options/83.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5053dbe3856097b5ee7487343ffe4ad08ebf9d04 Binary files /dev/null and b/options/83.jpg differ diff --git a/options/84.jpg b/options/84.jpg new file mode 100644 index 0000000000000000000000000000000000000000..756f278e539d61afb2915af8c4e75e7091a23abc Binary files /dev/null and b/options/84.jpg differ diff --git a/options/85.jpg b/options/85.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a465df53e8b04ee4d56a6b3c7b98495d63006e7b Binary files /dev/null and b/options/85.jpg differ diff --git a/options/86.jpg b/options/86.jpg new file mode 100644 index 0000000000000000000000000000000000000000..117ea249ba996a86337fad755d90c72639bb3bef Binary files /dev/null and b/options/86.jpg differ diff --git a/options/87.jpg b/options/87.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4579e02aafb5c6531d649245099e5f5c53d4c3b9 Binary files /dev/null and b/options/87.jpg differ diff --git a/options/88.jpg b/options/88.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1defd0ef6ed9ece31f5164e9ac714cf8201c57c9 Binary files /dev/null and b/options/88.jpg differ diff --git a/options/89.jpg b/options/89.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7bd9977bd271d80538aa5b0ca356c968751d3c76 Binary files /dev/null and b/options/89.jpg differ diff --git a/options/9.jpg b/options/9.jpg new file mode 100644 index 0000000000000000000000000000000000000000..05b51b8f85f5ef629675a587679d179a1ec0126a Binary files /dev/null and b/options/9.jpg differ diff --git a/options/90.jpg b/options/90.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0db00190502161e1bef39551ccb7a6b67671d42d Binary files /dev/null and b/options/90.jpg differ diff --git a/options/91.jpg b/options/91.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e2a1aba9076fc60db7d5356dc064783d0f1844d8 Binary files /dev/null and b/options/91.jpg differ diff --git a/options/92.jpg b/options/92.jpg new file mode 100644 index 0000000000000000000000000000000000000000..63fa4de6c790bc6ccdbb4d2a9056a4a4fbd12c21 Binary files /dev/null and b/options/92.jpg differ diff --git a/options/93.jpg b/options/93.jpg new file mode 100644 index 0000000000000000000000000000000000000000..31ad2f1caeb23f31079db74cd90e60d14276e113 Binary files /dev/null and b/options/93.jpg differ diff --git a/options/94.jpg b/options/94.jpg new file mode 100644 index 0000000000000000000000000000000000000000..56d1f9acaea829e13fc48e6b74c228dfeaa4ad85 Binary files /dev/null and b/options/94.jpg differ diff --git a/options/95.jpg b/options/95.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2731c8f431a7e1790e0ca578a9113b80d9e11dac Binary files /dev/null and b/options/95.jpg differ diff --git a/options/96.jpg b/options/96.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2c547b086feeb7e7287223745b71f6e97fdbbb00 Binary files /dev/null and b/options/96.jpg differ diff --git a/options/97.jpg b/options/97.jpg new file mode 100644 index 0000000000000000000000000000000000000000..13e0a95a6f1b72490d728d3a5fb3994f28b7d556 Binary files /dev/null and b/options/97.jpg differ diff --git a/options/98.jpg b/options/98.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9c925515a6cde81e22e5ae8331cefd423b33c0c2 Binary files /dev/null and b/options/98.jpg differ diff --git a/options/99.jpg b/options/99.jpg new file mode 100644 index 0000000000000000000000000000000000000000..667dc330dda40d48df6eb3f8631cde9a440f2c11 Binary files /dev/null and b/options/99.jpg differ diff --git a/options_heatmap/1.jpg b/options_heatmap/1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2c04c49cb13b0b8798ef2fe200ef85bd56ee0c72 --- /dev/null +++ b/options_heatmap/1.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c1f8fe167abe3b6c81000787038bbbe66a1c13d691ccebdacf4e6fb8dd2460ca +size 440412 diff --git a/options_heatmap/10.jpg b/options_heatmap/10.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cd84ae59220b841e22fc25f9d98642ec92155fac --- /dev/null +++ b/options_heatmap/10.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bbc488311da135a44e10e1815cadba71a9ee2d0a1f264ec665642c45b14b7fed +size 379124 diff --git a/options_heatmap/100.jpg b/options_heatmap/100.jpg new file mode 100644 index 0000000000000000000000000000000000000000..aff0414209a7aba64af5f35c7ed8b5feed002326 --- /dev/null +++ b/options_heatmap/100.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:979a16e77809c2e5c90abc91bf2306afae0143ce3f5c00fdeef3477290359698 +size 377505 diff --git a/options_heatmap/101.jpg b/options_heatmap/101.jpg new file mode 100644 index 0000000000000000000000000000000000000000..289d63c9096fbfd8c21de3a432c13eda46aaadcd --- /dev/null +++ b/options_heatmap/101.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:300abb43756a9b24f2a20fac4c3e2072d361a209b5762793cd25d92f2fb08e0a +size 548395 diff --git a/options_heatmap/102.jpg b/options_heatmap/102.jpg new file mode 100644 index 0000000000000000000000000000000000000000..29727fb3a018f2acbfdcd166fd628c149e2040c6 --- /dev/null +++ b/options_heatmap/102.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2b983b80d0297f4dcbd3eee141e773fc62c328ebe12767786f7551118f15f13 +size 748427 diff --git a/options_heatmap/103.jpg b/options_heatmap/103.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a475012e1ca69c88c5f81b6e404a7b65b37d46b5 --- /dev/null +++ b/options_heatmap/103.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b37906346017d88175cd582e4781aba5a24b1e12feae38140d23c227d4bfdea3 +size 594262 diff --git a/options_heatmap/104.jpg b/options_heatmap/104.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b7746bc1f358661c82b57548d391ba7314f3f150 --- /dev/null +++ b/options_heatmap/104.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e231be9c43fe47cf5842635fd4f6d6817d34198e4aa1bac5fcbd3b9bc3a59671 +size 593608 diff --git a/options_heatmap/105.jpg b/options_heatmap/105.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f510b59a4a7b1e0c9cadc5734aa9a8322148eb33 --- /dev/null +++ b/options_heatmap/105.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc522c6a542c19760a2321c429e5514963bf7e7562147cb8a7a90899234a7af1 +size 430443 diff --git a/options_heatmap/106.jpg b/options_heatmap/106.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ccb94e19660e6635af62653320617680ccd17350 --- /dev/null +++ b/options_heatmap/106.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8b6918ca27d0c758c254878015bafbbbcef8b677fa490ab2318d9552b12fd1b +size 647398 diff --git a/options_heatmap/107.jpg b/options_heatmap/107.jpg new file mode 100644 index 0000000000000000000000000000000000000000..19b941caebaca74108b9fd55c240508a4f1d8b9e --- /dev/null +++ b/options_heatmap/107.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dea17b52e753050fc727849336515ea869373a8b91e3c1afac5a759d67491fbf +size 385841 diff --git a/options_heatmap/108.jpg b/options_heatmap/108.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8eb88fd2990c5fcd11c3b399e710a164779a3d79 --- /dev/null +++ b/options_heatmap/108.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:403e11f02c86fd17ee4632e080b923bea4155ae14ed42d12d264cdc5d4a42c9f +size 416400 diff --git a/options_heatmap/109.jpg b/options_heatmap/109.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e1a6a4a61ad606fbb16f839d26cdc28368ac933c --- /dev/null +++ b/options_heatmap/109.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c259a20f32f973587d13227fc2be5110cdc1ca154ecf452c4e38cce9b735a9c0 +size 769043 diff --git a/options_heatmap/11.jpg b/options_heatmap/11.jpg new file mode 100644 index 0000000000000000000000000000000000000000..475baaeb2c33b483cf48ca24f00e47d065a4aa8c --- /dev/null +++ b/options_heatmap/11.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42c63ca5ce2c2e275ce250b1ad002ae5e9aed6be6ed2b2d2db540cec7766e1aa +size 511437 diff --git a/options_heatmap/110.jpg b/options_heatmap/110.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a9e0edfdb2538f2224eff78f9cda286c98116235 --- /dev/null +++ b/options_heatmap/110.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b0509fdfe9f616955263eeea66948f6a609aef62eade6a8c7c75cfd273daa79 +size 723214 diff --git a/options_heatmap/111.jpg b/options_heatmap/111.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a8002cf9d650f99e3c404639158034d21cadf140 --- /dev/null +++ b/options_heatmap/111.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5cefb9c65abffb785b90126f4ed7884ec9c94b51211e6b8cc723b3c82f067741 +size 589695 diff --git a/options_heatmap/112.jpg b/options_heatmap/112.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d2d0e412610a218332db2e0ef10c2e1d521fd4cc --- /dev/null +++ b/options_heatmap/112.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3745724dc3a4e97496474df5326d880e90c4eee3ff9f4cbfc3d069be6206828 +size 671817 diff --git a/options_heatmap/113.jpg b/options_heatmap/113.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0022a3ad83392a0086801f22a84a2f9a7afa08b0 --- /dev/null +++ b/options_heatmap/113.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:125b59d79768c0cabbd61f1641843713764e24810fbe9a047b51ae54f043fb77 +size 456972 diff --git a/options_heatmap/114.jpg b/options_heatmap/114.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4e057b492c80f70620f86858baba45a3b737ef8c --- /dev/null +++ b/options_heatmap/114.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eea1fb8c26ff90d6834bbaaaa2d70edeab8d9bb6e5f95aa79d2ffb4132a4ebe6 +size 598208 diff --git a/options_heatmap/115.jpg b/options_heatmap/115.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6f22c68e0080ac3501fd8aec5783cd1d4f674bfa --- /dev/null +++ b/options_heatmap/115.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b73110922c8e6a354db0d5e5e0e2fdccf3dded98a7f943c6f7f165ce361fe4c9 +size 872855 diff --git a/options_heatmap/116.jpg b/options_heatmap/116.jpg new file mode 100644 index 0000000000000000000000000000000000000000..096d48afccadd9ede199e3944d5fc9123bebae59 --- /dev/null +++ b/options_heatmap/116.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:962b7ce3894754093a5c3650086e1908d59b6f60569b4691a1cdb0c5516f93b2 +size 433475 diff --git a/options_heatmap/117.jpg b/options_heatmap/117.jpg new file mode 100644 index 0000000000000000000000000000000000000000..262bb3ed4678b2bb75a36a7e61549168516d1d7d --- /dev/null +++ b/options_heatmap/117.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:395ba246711b6ea84512ae0565d98de0e30d7ad9266807e1505d8e793d5fb029 +size 765309 diff --git a/options_heatmap/118.jpg b/options_heatmap/118.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0ef6e8b4d457fa95bef73e15360b537b4f059ed1 --- /dev/null +++ b/options_heatmap/118.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92566b67a7d0a9085a27d920fedd1409738a5fb7faf4c11c4a86fb67db1926a9 +size 588527 diff --git a/options_heatmap/119.jpg b/options_heatmap/119.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6bda7bd5bf6044022f546d84cc660d0604fd9038 --- /dev/null +++ b/options_heatmap/119.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b212ee5c064557a38f2ba297cc2aadc08ef13fd3c531cce6c4d7dd3a696ea7f +size 492404 diff --git a/options_heatmap/12.jpg b/options_heatmap/12.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7f296914c38f3d20adfa88ec7a5f0b9e0c570d94 --- /dev/null +++ b/options_heatmap/12.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3526ec47ded2c814bc7e9b12e584fdef5569ef02ec1739c94bdc98edeeb67670 +size 434751 diff --git a/options_heatmap/120.jpg b/options_heatmap/120.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6e45cc9ac15474fe4e78c441f39e181b702dfc41 --- /dev/null +++ b/options_heatmap/120.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c9fd8c4e9126dfb766edc60ec4bea2bb538cd5277c302a01a4e9c54c4e7a015 +size 619939 diff --git a/options_heatmap/121.jpg b/options_heatmap/121.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c0d9578ffcf386a9d9270195833f437589e682e9 --- /dev/null +++ b/options_heatmap/121.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:95de2895533c338d002023a8f057615170461640434e80f5cde96a0fcdcabdd4 +size 534852 diff --git a/options_heatmap/122.jpg b/options_heatmap/122.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dfdf683675c5ccf554ec6c7ad99761acb4cb9380 --- /dev/null +++ b/options_heatmap/122.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a30a476276e6360696b5e42bd4e39cf0549393dbd8c83a67402a64061846294 +size 783534 diff --git a/options_heatmap/123.jpg b/options_heatmap/123.jpg new file mode 100644 index 0000000000000000000000000000000000000000..aa2a23ff0c691053789fb222262b00f927ee089b --- /dev/null +++ b/options_heatmap/123.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a5f5277f19c0486127d7fd305fc99ea9b05c509e56e644f62265bff65c5470e +size 427680 diff --git a/options_heatmap/124.jpg b/options_heatmap/124.jpg new file mode 100644 index 0000000000000000000000000000000000000000..22c6dd4bc107f239787045beb41184e8fd110006 --- /dev/null +++ b/options_heatmap/124.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac51d659fd5e31984f8f4bf9e3ca79fba5b3f99b6c723d74d3d8744b1fa52240 +size 692340 diff --git a/options_heatmap/125.jpg b/options_heatmap/125.jpg new file mode 100644 index 0000000000000000000000000000000000000000..048afddd74f24ef7e796e3835e15c7b6c3cff624 --- /dev/null +++ b/options_heatmap/125.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e19db86c0733463430a1e566df4390efb307206826ca315afdcf47b5fed6e01c +size 660147 diff --git a/options_heatmap/126.jpg b/options_heatmap/126.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a6b75b39e15a0853c75bdaa3f3e2c9a89cdf19cc --- /dev/null +++ b/options_heatmap/126.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31a474da0cb4972fbb0c80210a2b188cc70c66dffa387c7a6460b00eec22a574 +size 441000 diff --git a/options_heatmap/127.jpg b/options_heatmap/127.jpg new file mode 100644 index 0000000000000000000000000000000000000000..83ec30e03900575d662bbf4c1f3033e516eb572e --- /dev/null +++ b/options_heatmap/127.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dfcc81c93bd9ee948a689d903abed86e0f50f05203e46a3150e7fca24cd4efa1 +size 707523 diff --git a/options_heatmap/128.jpg b/options_heatmap/128.jpg new file mode 100644 index 0000000000000000000000000000000000000000..13826d671eb1d2bd0c604a1035fc355a5a36161f --- /dev/null +++ b/options_heatmap/128.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c8d762afeeefc59fefebe94034b12f01afc9712f84f761092c2305b758abf44 +size 827735 diff --git a/options_heatmap/129.jpg b/options_heatmap/129.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4e0f8de5cca7b730c987c72b1958adba0edf929e --- /dev/null +++ b/options_heatmap/129.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:94c77123debff307a977fa872fd8d7488a1691ab30f2778c9329515954b9be15 +size 674135 diff --git a/options_heatmap/13.jpg b/options_heatmap/13.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c11efcad2a091ede1d77e57b0cf01392e87090c3 --- /dev/null +++ b/options_heatmap/13.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:22eadcda3721ff16c9876a3e762e9312956489360142faf3c7697b91220dc1eb +size 527804 diff --git a/options_heatmap/130.jpg b/options_heatmap/130.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b9edd741863e6f061a50870a700cbf5b3e2d658c --- /dev/null +++ b/options_heatmap/130.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c0f1649ee7eb924f0fc3c442ebaaea455ef7a1e50e6294b854d450d682e00a9 +size 505302 diff --git a/options_heatmap/131.jpg b/options_heatmap/131.jpg new file mode 100644 index 0000000000000000000000000000000000000000..754097478a4e361509dfe2c5a4627f32dbd4c023 --- /dev/null +++ b/options_heatmap/131.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0965b6d5b7f957add3b1d09f8100db564c1fabf580d5ad43f87be6c2dd7f8d8 +size 866191 diff --git a/options_heatmap/132.jpg b/options_heatmap/132.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0d068aad123533b64bc54cfcf975ebc05451f872 --- /dev/null +++ b/options_heatmap/132.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c00207fab00af29dc431cf78b789a1bc0cf682f8c23724c64cb1bc36b82a1491 +size 460731 diff --git a/options_heatmap/133.jpg b/options_heatmap/133.jpg new file mode 100644 index 0000000000000000000000000000000000000000..26191bd1c01120ce6613601ee54b880312e34d24 --- /dev/null +++ b/options_heatmap/133.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2157b71ef8f65a7ebb8071ab9c2c9383f2b9f1cce1aad908a173583d2f87dcf +size 741159 diff --git a/options_heatmap/134.jpg b/options_heatmap/134.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5d5f4b1230d9fb380b10630694511ea5600f8cb4 --- /dev/null +++ b/options_heatmap/134.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11c8bb96a1df2ff3067680a35e1885ebd219a6955b218e953a8bbe3895dd1117 +size 1075099 diff --git a/options_heatmap/135.jpg b/options_heatmap/135.jpg new file mode 100644 index 0000000000000000000000000000000000000000..464b89d977a5949a47765575770588bf9ab534ec --- /dev/null +++ b/options_heatmap/135.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c336d885e2c2728ff91bb2c442c7201cff80e4379f49a20f85bdb9a25e9019b2 +size 462077 diff --git a/options_heatmap/136.jpg b/options_heatmap/136.jpg new file mode 100644 index 0000000000000000000000000000000000000000..194ef5231b0294fd11b4f63bfa022588ce8db7dc --- /dev/null +++ b/options_heatmap/136.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7467ccaeab0b20585771bba563f44e9d003a05f0614e28e8c85105604c33349f +size 399371 diff --git a/options_heatmap/137.jpg b/options_heatmap/137.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7ffb33e404cfdb8d1cc8bce9e9d629797c17a72b --- /dev/null +++ b/options_heatmap/137.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16a95658c029a82631dfc4ac9b8a8da2cbb95465107928aed8cb7a367a429276 +size 514871 diff --git a/options_heatmap/138.jpg b/options_heatmap/138.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2453f1614fa0c4f64016ac3b686e9c89a1a5af26 --- /dev/null +++ b/options_heatmap/138.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2a741914a0b3ad7db13feadfa3d9d7e2b282c18b0fdbb70beaa66781479275b7 +size 250355 diff --git a/options_heatmap/139.jpg b/options_heatmap/139.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6e134167459a4e9c45958cbacb66e8eb56312ea9 --- /dev/null +++ b/options_heatmap/139.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:633eabf1b9be7f97b42d73971ef2193eafab3e4b099d56d57f4cfeac9daf4a7b +size 563272 diff --git a/options_heatmap/14.jpg b/options_heatmap/14.jpg new file mode 100644 index 0000000000000000000000000000000000000000..118067c177f19235b4da15df1b94f0b468a69bdd --- /dev/null +++ b/options_heatmap/14.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70274d40e1dffd47565c71ae83de51a644107b66ba93e5e691b0dfaec46e75a9 +size 510439 diff --git a/options_heatmap/140.jpg b/options_heatmap/140.jpg new file mode 100644 index 0000000000000000000000000000000000000000..32f6c41b3a80738ecb241f9b40266462f71aa23b --- /dev/null +++ b/options_heatmap/140.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c2c8304bef56f63d7b158415da8aa608ceddbfa2cde8a40a5492fe2ccec4560 +size 576477 diff --git a/options_heatmap/141.jpg b/options_heatmap/141.jpg new file mode 100644 index 0000000000000000000000000000000000000000..85c2a63a384d2b1b1bb32e6748c4a37a4695827d --- /dev/null +++ b/options_heatmap/141.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:592d57d44ce2252a3e3b56e83246be170f23fb1af2bc6b1d55cb40ff6a2bcd85 +size 735938 diff --git a/options_heatmap/142.jpg b/options_heatmap/142.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6cc73f0d87ab52047312cfb6f7d4122b2d0e3972 --- /dev/null +++ b/options_heatmap/142.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92b64e596ca55a7c161ff8d165232c46a30e46ffa40d002547e8c957f3004ce8 +size 464267 diff --git a/options_heatmap/143.jpg b/options_heatmap/143.jpg new file mode 100644 index 0000000000000000000000000000000000000000..19b24bc403926c709c2bbc9ed8a1ae05e5044791 --- /dev/null +++ b/options_heatmap/143.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8848fba852828bf845e674969ef4fdaa9c3a88e184c7a09d454b4e22c6cd37d7 +size 286680 diff --git a/options_heatmap/144.jpg b/options_heatmap/144.jpg new file mode 100644 index 0000000000000000000000000000000000000000..043a88b8f5fd9f5181d6e2229235a7fc2ddcacae --- /dev/null +++ b/options_heatmap/144.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:766830a6b19340eb4a3a8d2d6500f50c4616b4a4478377960c38c496ecb6ef6a +size 404034 diff --git a/options_heatmap/145.jpg b/options_heatmap/145.jpg new file mode 100644 index 0000000000000000000000000000000000000000..44e8eac3c5c4f7c18dc92409877358a568aea2de --- /dev/null +++ b/options_heatmap/145.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2288d8b7350880261a434f8c6c6cf310f97cafcc074dbeacf60e2d9f92c7601d +size 300263 diff --git a/options_heatmap/146.jpg b/options_heatmap/146.jpg new file mode 100644 index 0000000000000000000000000000000000000000..bd75605ddcbfa8a809fecf8399be21310c25812e --- /dev/null +++ b/options_heatmap/146.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e86f2702bcfa89a9a03b58cd41dddb70c6723d3ea6bb73623bd2ebb69396de9f +size 514144 diff --git a/options_heatmap/147.jpg b/options_heatmap/147.jpg new file mode 100644 index 0000000000000000000000000000000000000000..42cbe0d66c9a0e0c099b11ebf5e6c27b10bf1b60 --- /dev/null +++ b/options_heatmap/147.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:701c807146ac49ad49f1689041eec0f540025c8cfb01a8f67fb8edba90f4a087 +size 1030585 diff --git a/options_heatmap/148.jpg b/options_heatmap/148.jpg new file mode 100644 index 0000000000000000000000000000000000000000..58f1069428c11d7b7ac4d1674e06b3a972b31108 --- /dev/null +++ b/options_heatmap/148.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0cec48212685d07d6c2a7d23f2a3a96dd1a7dd86c7e8d78d198076b37b15f7e +size 731115 diff --git a/options_heatmap/149.jpg b/options_heatmap/149.jpg new file mode 100644 index 0000000000000000000000000000000000000000..156934c19d8652dd20c4579f21c82a4afb3ed3a9 --- /dev/null +++ b/options_heatmap/149.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f95671a3fa6dd66f21c21b6061acb175c18499173bf716f1b574bc2d62485717 +size 855513 diff --git a/options_heatmap/15.jpg b/options_heatmap/15.jpg new file mode 100644 index 0000000000000000000000000000000000000000..634bb0c07ab7b03b977456944b82890418378545 --- /dev/null +++ b/options_heatmap/15.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1fff455bfcfa5091cedc9576f77a0c3ac81c71ba95cc989f8c47851e8b8c128 +size 472917 diff --git a/options_heatmap/150.jpg b/options_heatmap/150.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f2b7728194f925ee7efddf6b0884397e20b4bf50 --- /dev/null +++ b/options_heatmap/150.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70a6926f22ceee0fcac8dc1522aebb5a981e80dfb0540f2787c0ab49868ee0fe +size 775397 diff --git a/options_heatmap/151.jpg b/options_heatmap/151.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6ae3076c38c01056052e846aa5049a0f7879dcd1 --- /dev/null +++ b/options_heatmap/151.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c175d71260809160d974a5f8777a600627c018873403e2a1f2ce96c5851ef7c +size 431395 diff --git a/options_heatmap/152.jpg b/options_heatmap/152.jpg new file mode 100644 index 0000000000000000000000000000000000000000..592c293d4c61349bd7cfd95c01606a9e90e03b84 --- /dev/null +++ b/options_heatmap/152.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb5fbb6b68cbff3af464d4132f380b59a1f67a82fcc31b9c87d1f4a44b83f156 +size 581872 diff --git a/options_heatmap/153.jpg b/options_heatmap/153.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4842fbab29102f306e646d331050eed271695eb2 --- /dev/null +++ b/options_heatmap/153.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f92eec1890c359a1f080e932eb34555a085eed04112b78b5903a53012ece7ab +size 442633 diff --git a/options_heatmap/154.jpg b/options_heatmap/154.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e32f2d06093953ae008864079e0d14fa62e84982 --- /dev/null +++ b/options_heatmap/154.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a52bd4b506e77dcddc4246661f7edc33f9c56e99301d2cb765c5dd472434f1f4 +size 431233 diff --git a/options_heatmap/155.jpg b/options_heatmap/155.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ee2694adfa5eab371924fc260bca508ba3b0861d --- /dev/null +++ b/options_heatmap/155.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e077bec91d665551f2d473a7f26af4bb92df601e230cb42bf19845eb9e44e000 +size 800685 diff --git a/options_heatmap/156.jpg b/options_heatmap/156.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c4028ca56b2d91db763ae38b6895eb39e1c039a6 --- /dev/null +++ b/options_heatmap/156.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33f61c1c27757ae1caf355a162fa594b4d4dee7ce437c3b20899ca3adfc50362 +size 500797 diff --git a/options_heatmap/157.jpg b/options_heatmap/157.jpg new file mode 100644 index 0000000000000000000000000000000000000000..035ee7f7c9cb57fdc49061fd85ba8eef0a5fb4bd --- /dev/null +++ b/options_heatmap/157.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f8c88a2cdc9802eaa01c112a50a47ef9062415256ea213f7b316267e6103f214 +size 520867 diff --git a/options_heatmap/158.jpg b/options_heatmap/158.jpg new file mode 100644 index 0000000000000000000000000000000000000000..09d0175ba4017e948680ff9eff851eb0bcc7e502 --- /dev/null +++ b/options_heatmap/158.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:caea0202b2f8b02f5e8a579ce0ebb8d293ad7a63ba88ac9d584f11af97eb20a8 +size 557680 diff --git a/options_heatmap/159.jpg b/options_heatmap/159.jpg new file mode 100644 index 0000000000000000000000000000000000000000..de58333016210a1e896fe12ec9282b4c79a50c6a --- /dev/null +++ b/options_heatmap/159.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f04c2d4ddc3735a1fbb7a51c14293bca8693a8a817be91173e3e3c029c3cdb68 +size 420353 diff --git a/options_heatmap/16.jpg b/options_heatmap/16.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b539908c8a0ed2eba933dd7ae302917b93e42b16 --- /dev/null +++ b/options_heatmap/16.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8f3bb9addf18d42e5c32b314b80c5e4b707e9e53750180075c82148deabfdc6 +size 667466 diff --git a/options_heatmap/160.jpg b/options_heatmap/160.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0ca88f12e7e6d09bc094d3a6b580eaabfc465daa --- /dev/null +++ b/options_heatmap/160.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c33685f7410262e4159429179cc46329dc6da34a157ae3d88e898cb848e2e75 +size 520773 diff --git a/options_heatmap/161.jpg b/options_heatmap/161.jpg new file mode 100644 index 0000000000000000000000000000000000000000..35506b7de05c8a6352193d6c051d123120ce81e1 --- /dev/null +++ b/options_heatmap/161.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cd5ca358aa9fee4058a3bb34bce5311918e98dc6e182b3aef69ca910ca8d834f +size 477584 diff --git a/options_heatmap/162.jpg b/options_heatmap/162.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d9f9fba8727ecd1a8891c688f8e89e54e746788f --- /dev/null +++ b/options_heatmap/162.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc77d5c75a51c57a2eb3b8cb16ea99849b644871345bba1458f5f5c9dcd746da +size 554707 diff --git a/options_heatmap/163.jpg b/options_heatmap/163.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e4ad535952098cbe222ec0624b841dbd9e0f082e --- /dev/null +++ b/options_heatmap/163.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a1fac0ef97236d0cb2cfb7c9af54a85767148d88ff6f056bb0138caebb4f0f47 +size 419034 diff --git a/options_heatmap/164.jpg b/options_heatmap/164.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c220b556b40ae57f6601f79ea27f81b42de71b69 --- /dev/null +++ b/options_heatmap/164.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2a60c474f33a536b1dd245b44b76199cee547fc4c4f49556cde99a1fccd2d437 +size 283237 diff --git a/options_heatmap/165.jpg b/options_heatmap/165.jpg new file mode 100644 index 0000000000000000000000000000000000000000..39a5d14a23af3da98443ba2a35d62209ada3db84 --- /dev/null +++ b/options_heatmap/165.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:448d81699c376edeace865c6305c8ae8b9525d7dc3c5fcf89f3f6ee4b7202d84 +size 740651 diff --git a/options_heatmap/166.jpg b/options_heatmap/166.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c991126e889d4c46fb8eb02bd179db2be319ed55 --- /dev/null +++ b/options_heatmap/166.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c12eedb38092b09815cf22125f01a91a769eca6267b5678f7fa33217f599708 +size 597280 diff --git a/options_heatmap/167.jpg b/options_heatmap/167.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2a2d2cabab146f1d306212db5ff5662b2abee990 --- /dev/null +++ b/options_heatmap/167.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e3d44ffafcd8dd50afe902883f1d8392959184c13f51ecd56be32286e4b39809 +size 601388 diff --git a/options_heatmap/168.jpg b/options_heatmap/168.jpg new file mode 100644 index 0000000000000000000000000000000000000000..be0532ef64bb16838678bd5a40d3e55e76303d72 --- /dev/null +++ b/options_heatmap/168.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5fbcb6c33e8bbb59c0334409b6ab3a3480f7b49727926ce90babec2a587e3475 +size 503927 diff --git a/options_heatmap/169.jpg b/options_heatmap/169.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2f1c7ffc70cbf6bf29880ad87f56e61ea06eb7a7 --- /dev/null +++ b/options_heatmap/169.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d80aee45562cbf488ef0b78b8a5a37a5e87bb58dc6b3cf7a19e5bdd469610713 +size 553807 diff --git a/options_heatmap/17.jpg b/options_heatmap/17.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d145cc500bb53698f56f30f2bd6e011f33dcf386 --- /dev/null +++ b/options_heatmap/17.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86c2aeb48bb9c8d81b641257156ad33d8b38ee0a303a10d213abde60a6fb8a62 +size 421789 diff --git a/options_heatmap/170.jpg b/options_heatmap/170.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6ae0f6fbed548ff91c344e536452f4ef06243393 --- /dev/null +++ b/options_heatmap/170.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b1059c64f135a7a8534a9241e794ce1f4bb0586e610d1a3d94c780787f369053 +size 680952 diff --git a/options_heatmap/171.jpg b/options_heatmap/171.jpg new file mode 100644 index 0000000000000000000000000000000000000000..57e63cca4d6667d7b725de4be855b084bb3607ba --- /dev/null +++ b/options_heatmap/171.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d2b717cffb3f018adb17cba233c0247357b50f0493fa9392692ce31daaf45ea +size 549071 diff --git a/options_heatmap/172.jpg b/options_heatmap/172.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b6bfb9ad25540205bdbf5ab07a22b19d11436499 --- /dev/null +++ b/options_heatmap/172.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c2f9aadeee2c8c691eafa2073edd1c717da12c1a0d787ffb0c0f888595f39b4 +size 692832 diff --git a/options_heatmap/173.jpg b/options_heatmap/173.jpg new file mode 100644 index 0000000000000000000000000000000000000000..78ff4b06068374636c190f256b289402ee98ff0b --- /dev/null +++ b/options_heatmap/173.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2a0abfe74e79d6694172b0f5006fcc10b7c4ac885f1379eff27f8a28d16ee95 +size 488113 diff --git a/options_heatmap/174.jpg b/options_heatmap/174.jpg new file mode 100644 index 0000000000000000000000000000000000000000..814f248a4eb16f024957c7ed55c6e77d380491dc --- /dev/null +++ b/options_heatmap/174.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f726d8c29636275a2d112109d61efd4caa443660a8db9dd918601ff0589cc83f +size 728406 diff --git a/options_heatmap/175.jpg b/options_heatmap/175.jpg new file mode 100644 index 0000000000000000000000000000000000000000..aea0046109fde4ccd252b5f5c001752d2682f946 --- /dev/null +++ b/options_heatmap/175.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8106b911a83999da24151d95f3228e3892095ede48dc7fd2d9777f52425eb614 +size 590654 diff --git a/options_heatmap/176.jpg b/options_heatmap/176.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fd3f337c032f9419162f605ea2d270476a43817e --- /dev/null +++ b/options_heatmap/176.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c52af8857c971d5c28b46d26e355e5dbfe736023be83dc2cf981b430060facf +size 320738 diff --git a/options_heatmap/177.jpg b/options_heatmap/177.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4dbc86c2517fcdb8b66a88c7f68adbb56c474e5d --- /dev/null +++ b/options_heatmap/177.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff58e3ea1364c1329702d560ac0161d3b2e0dfc008e250641459119e77f28cd9 +size 332517 diff --git a/options_heatmap/178.jpg b/options_heatmap/178.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a27f65537a1aaa7a938d9c6758be6aa198c0ad1f --- /dev/null +++ b/options_heatmap/178.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:644e51a215ca7e2e3c1db6fa2a0739c3ddf9a8264321603589f1fdd218ea82f3 +size 648549 diff --git a/options_heatmap/179.jpg b/options_heatmap/179.jpg new file mode 100644 index 0000000000000000000000000000000000000000..70313808cb7c7dc3841537a5d0c34f128a83f177 --- /dev/null +++ b/options_heatmap/179.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a82471e846af757109e559c23b684dfc764110633f59ed1bfcd19818b5bc49f4 +size 580112 diff --git a/options_heatmap/18.jpg b/options_heatmap/18.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5f2e181dd6907e0d425a40805988b255bce9ecfb --- /dev/null +++ b/options_heatmap/18.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1161971303a4a91b6192868760184ca59f2278a90d80dbc929d6de498a421cbc +size 479268 diff --git a/options_heatmap/180.jpg b/options_heatmap/180.jpg new file mode 100644 index 0000000000000000000000000000000000000000..bd82fa10ebc44fdbee0080b5712808bba4b4436f --- /dev/null +++ b/options_heatmap/180.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc6138fd2f79b5b643399f8bdb666bf37683723e5396fd84f6f8048eb496d42c +size 385096 diff --git a/options_heatmap/181.jpg b/options_heatmap/181.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f4836c8f525e7a535cc1b7ea7cfbf0f11c58609e --- /dev/null +++ b/options_heatmap/181.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:449060fd4b3e84c2970d38ac4ff072b051682a312f04fd9fa3c2e4c856481c5b +size 468524 diff --git a/options_heatmap/182.jpg b/options_heatmap/182.jpg new file mode 100644 index 0000000000000000000000000000000000000000..654c1c59eade2f43354cfb0f519d2853fcca3fa0 --- /dev/null +++ b/options_heatmap/182.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:528db4093eddb2b37fb534cf8e62246f6c4071e90452d5ff568376086f2b930a +size 499364 diff --git a/options_heatmap/183.jpg b/options_heatmap/183.jpg new file mode 100644 index 0000000000000000000000000000000000000000..78aba995d6a667af3de4b4d06279eb8c75c89e33 --- /dev/null +++ b/options_heatmap/183.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19641323c6fa13028ab2f6ad476ef36da05fdfab962cb843d331a72b29bd6e90 +size 667699 diff --git a/options_heatmap/184.jpg b/options_heatmap/184.jpg new file mode 100644 index 0000000000000000000000000000000000000000..44074a52d459d9f93107fe82d6c6fd27ca8c4c04 --- /dev/null +++ b/options_heatmap/184.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:296e8d3683d578e2f9a672da74e34cd9287f745e58d3e04ff01d58511981e21e +size 605035 diff --git a/options_heatmap/185.jpg b/options_heatmap/185.jpg new file mode 100644 index 0000000000000000000000000000000000000000..44e80c4e195502af217118840335cbc12c5e4000 --- /dev/null +++ b/options_heatmap/185.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:716cd968c3035f883cb5ea140df5979cbe0d50267eadb1b6a58d4c5731a8de1f +size 597473 diff --git a/options_heatmap/186.jpg b/options_heatmap/186.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d3d4a55626d7d3341d8e43bfde103f1a58ea725a --- /dev/null +++ b/options_heatmap/186.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4f502b51d53a56ecb1b44af6c69843815017a3436007d6ef1262d7d8d85f3ad1 +size 435622 diff --git a/options_heatmap/187.jpg b/options_heatmap/187.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a380522c35011c91819fb4f8341384f43dff4245 --- /dev/null +++ b/options_heatmap/187.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60a3c49c0285a870ead57bba6c93914656750800213d0b3757f3297330c405cf +size 732605 diff --git a/options_heatmap/188.jpg b/options_heatmap/188.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b95caedd38bcf0afceb017eb1692750546ea4031 --- /dev/null +++ b/options_heatmap/188.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e44076bfd9d4bf7db80140c65eb13ddc65c0f23b4dcfd41a65fd117564fbc583 +size 685975 diff --git a/options_heatmap/189.jpg b/options_heatmap/189.jpg new file mode 100644 index 0000000000000000000000000000000000000000..77a12187ce40eb1c531a961722b2b2250b86334d --- /dev/null +++ b/options_heatmap/189.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c81647308ef09514995eadd3dedc056b6fa61e73be253c8aaa41cb0414f61660 +size 615784 diff --git a/options_heatmap/19.jpg b/options_heatmap/19.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fa5335dee11cae224c87f834387531287f7bd097 --- /dev/null +++ b/options_heatmap/19.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ca28052512f06c31fd0bb3ce1aa3162a66594e70ff18aa2a30101495b0bc9a49 +size 546619 diff --git a/options_heatmap/190.jpg b/options_heatmap/190.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c4eb9c78e56243bc660f91d36a28e15af5b3019f --- /dev/null +++ b/options_heatmap/190.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f37e2d363b56f6c148582b49b7aeec1644173988c80a201bb803a87f0e1736be +size 705490 diff --git a/options_heatmap/191.jpg b/options_heatmap/191.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5a79d6f346d1251fbcabd21b42779eaf6f835d0e --- /dev/null +++ b/options_heatmap/191.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d4fd979f7c159898b4ac4c9d51d37fb99ed03d402e00dbfca98ecc84bcfad8a +size 573269 diff --git a/options_heatmap/192.jpg b/options_heatmap/192.jpg new file mode 100644 index 0000000000000000000000000000000000000000..efdd48709c0b9c179166f5957b367697d51cf6e7 --- /dev/null +++ b/options_heatmap/192.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:80fc2b15b73fa8e3c24a08a579bd425137ae931a3423547356af4eb83de24bcd +size 315919 diff --git a/options_heatmap/193.jpg b/options_heatmap/193.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e3d0ad43001ef11917533bf8e1aac755bef18ef1 --- /dev/null +++ b/options_heatmap/193.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e1b1b87641ae821076b844bffb59bd4501a1712394b683e47bf395e7e13bca1 +size 543707 diff --git a/options_heatmap/194.jpg b/options_heatmap/194.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e2af92a8e213223b0a040aa208607a30001e8edf --- /dev/null +++ b/options_heatmap/194.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e428ebcbc31cce9d90ab17e5d3e779e92fde479a913a75bfa21e2142cdec9a9c +size 457213 diff --git a/options_heatmap/195.jpg b/options_heatmap/195.jpg new file mode 100644 index 0000000000000000000000000000000000000000..acd862f048ac1cf6a33ab921780cfe9d27eec895 --- /dev/null +++ b/options_heatmap/195.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2041fd9eccd352b2c04ff154850ab0bc4ba2b89a2bb903846b9417c6932358bd +size 552359 diff --git a/options_heatmap/196.jpg b/options_heatmap/196.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3b9ce5b258f81c179531d81f59a5425614508713 --- /dev/null +++ b/options_heatmap/196.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b04bd01a53e0a76f7f24a7ef8ac4992b21743992ab45714af688874e4011f697 +size 488159 diff --git a/options_heatmap/197.jpg b/options_heatmap/197.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fd867efabfb887653b91e8ed4266a72dc727cf0e --- /dev/null +++ b/options_heatmap/197.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2afaed863a5f0611a35e560ab02429c1ab2629b7c032efd27836e39f1419642d +size 483079 diff --git a/options_heatmap/198.jpg b/options_heatmap/198.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c050ba95f5db4ccb66f0afe3095e16178860f857 --- /dev/null +++ b/options_heatmap/198.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1f0d0062b3f051deba67a9ec1fa23ebd2bc9ec359a31b069c3bb2a1e673197c3 +size 414047 diff --git a/options_heatmap/199.jpg b/options_heatmap/199.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cbf5c978de3cc13e58a0870649c8d235794f4725 --- /dev/null +++ b/options_heatmap/199.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f6964afc456e477f1f5e1da41d1d4486e1312d0731b0826ed1525803018616b3 +size 711223 diff --git a/options_heatmap/2.jpg b/options_heatmap/2.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fe28f0dcf0e690922932ea254b06df66c90f62af --- /dev/null +++ b/options_heatmap/2.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f5dc690285894e716ea03f61aa6e3b1d03cb8349ce78deee335f268add60b7c +size 534283 diff --git a/options_heatmap/20.jpg b/options_heatmap/20.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f063e9140ddd25c801860f2a0bb85f2f568befea --- /dev/null +++ b/options_heatmap/20.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a4a27517a7856486c399324a247ba9fa70aad010aacaf6cc589df6af9235111 +size 437207 diff --git a/options_heatmap/200.jpg b/options_heatmap/200.jpg new file mode 100644 index 0000000000000000000000000000000000000000..53d968e3f29f6ee4cc6e7a30a9ba715b35eb076a --- /dev/null +++ b/options_heatmap/200.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f3c9494588690c5217caafc5506d8c44cd305baef70a33a8ab4856584bc0bc6d +size 326585 diff --git a/options_heatmap/21.jpg b/options_heatmap/21.jpg new file mode 100644 index 0000000000000000000000000000000000000000..20697367eb7739fc0a158a7ac8dbbf1c81d1bef3 --- /dev/null +++ b/options_heatmap/21.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a45702eb1357feec944a2105878fea70abd2ecf981bb715abbd22f73cf9fada +size 728577 diff --git a/options_heatmap/22.jpg b/options_heatmap/22.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ab6dd761040e6300c777f2f1d6a23a48fc79d4da --- /dev/null +++ b/options_heatmap/22.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6a90517bed0441ae81e5167b21206388c0152d4c207e1f2d9ad86700630c903 +size 533901 diff --git a/options_heatmap/23.jpg b/options_heatmap/23.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1bbfa8af34f52539f771a817cf5b2aad24a6f457 --- /dev/null +++ b/options_heatmap/23.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:05eda483f1842fe31a1809de811d6eb795b9730db0357f946b0ae57c6b0bbf75 +size 398011 diff --git a/options_heatmap/24.jpg b/options_heatmap/24.jpg new file mode 100644 index 0000000000000000000000000000000000000000..16cb117e46f208e311d68a6f69f8d8cb10fe4ccf --- /dev/null +++ b/options_heatmap/24.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8dcd0d98d091ca476de3187eb0a6651bf7eb6f355fcdb23e12e083d7989e26ca +size 516541 diff --git a/options_heatmap/25.jpg b/options_heatmap/25.jpg new file mode 100644 index 0000000000000000000000000000000000000000..350d75c20145bc731f8b2a4f0b09acf1e0631614 --- /dev/null +++ b/options_heatmap/25.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3009d69bb268ca5bc93472cffe90b996c7330b00ae642b3812f9772c3375093f +size 667592 diff --git a/options_heatmap/26.jpg b/options_heatmap/26.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2547fd5b8770ebd8bef2711529c817ccf1beab5e --- /dev/null +++ b/options_heatmap/26.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2c4bca029b485185166c756b4d9beeadda49cb2383c84ca5d5f26947132392d +size 737092 diff --git a/options_heatmap/27.jpg b/options_heatmap/27.jpg new file mode 100644 index 0000000000000000000000000000000000000000..82bb195da08329005d06c522446141ac347627d2 --- /dev/null +++ b/options_heatmap/27.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cdae0ee411bc92e89d47be1426d60e3c435b0aa189c7db73f1f7dd46f270f2d2 +size 765192 diff --git a/options_heatmap/28.jpg b/options_heatmap/28.jpg new file mode 100644 index 0000000000000000000000000000000000000000..441732d1697bb6df6920a9b2c19c9f675a7465da --- /dev/null +++ b/options_heatmap/28.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a2f91869be3e8e3c25ecddbfd5ec5da29142d0f280001e66abb64870620c9c0 +size 637716 diff --git a/options_heatmap/29.jpg b/options_heatmap/29.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fec57b128f64b0adeddc24f260ffc0efa6b97d79 --- /dev/null +++ b/options_heatmap/29.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:40e03a34fe4672fc2fa2fefad4fb72a3748c6d1efd6f6f439c7511e34cee79ad +size 550883 diff --git a/options_heatmap/3.jpg b/options_heatmap/3.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1c2d09ee9db4c0b48f815c045e9a1aa4cdf30455 --- /dev/null +++ b/options_heatmap/3.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b34a9a7ea1023d63e37e5aaf99ebcc7eeec2e061927ae9ffc2ce872f44bcebcf +size 493958 diff --git a/options_heatmap/30.jpg b/options_heatmap/30.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2bc521f699cf46a72173456da95e920c8f9c52bb --- /dev/null +++ b/options_heatmap/30.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1f07b39d9f229f72a8c12cc7473b06a792d5f0235f312ed1668189bc2d37c970 +size 540681 diff --git a/options_heatmap/31.jpg b/options_heatmap/31.jpg new file mode 100644 index 0000000000000000000000000000000000000000..676225d671e020ba8265cbb3691c102cd454ef3f --- /dev/null +++ b/options_heatmap/31.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93825405902050f773dfe131177c951422abf8a831f81f4d434eeefba70f65e3 +size 648254 diff --git a/options_heatmap/32.jpg b/options_heatmap/32.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ff65a37f0fef8cf4aeb3e610a3cd2d1621736e1c --- /dev/null +++ b/options_heatmap/32.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a28b77bc7479591d5817f01f9f0eff66bc7df661cf187be31bc10d0e14eabda +size 605609 diff --git a/options_heatmap/33.jpg b/options_heatmap/33.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b4dac63eb3d4cc057f227d17732cb3b4312e18d4 --- /dev/null +++ b/options_heatmap/33.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b58a9c0e9e07fad2a91133eae6ec4b6bd8905a4af7bbb5af129a5d81f140769 +size 995080 diff --git a/options_heatmap/34.jpg b/options_heatmap/34.jpg new file mode 100644 index 0000000000000000000000000000000000000000..911841a304d647911841515014475f78fb6410b7 --- /dev/null +++ b/options_heatmap/34.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bfa1321e84758915998f444216e988841496f4b45b59e174da9c6dc455034a0f +size 763921 diff --git a/options_heatmap/35.jpg b/options_heatmap/35.jpg new file mode 100644 index 0000000000000000000000000000000000000000..171ddc8af0a9163e725a8494d7e7db3c33467746 --- /dev/null +++ b/options_heatmap/35.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b62fc37a03f8c72204f72e5e3a389dbce11b3beb2d346d7c47fa05edc6ec7c14 +size 296029 diff --git a/options_heatmap/36.jpg b/options_heatmap/36.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b0f0c1999d993e520b55fa3920bfc5843118bcfd --- /dev/null +++ b/options_heatmap/36.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27bb2b765a85d93d491f497f19d8d2cbb4ee9dd71b89fd473b5cd1f1e166e48a +size 468869 diff --git a/options_heatmap/37.jpg b/options_heatmap/37.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3ed256319b0abcd22340a9cfb59b5e824b7acf57 --- /dev/null +++ b/options_heatmap/37.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0cb96052614ecde3718ccf0118b693ac0eda56ddbd1ee224a0ef130b9aa01ac0 +size 537828 diff --git a/options_heatmap/38.jpg b/options_heatmap/38.jpg new file mode 100644 index 0000000000000000000000000000000000000000..48a32cdc574f28ff2d9e1027cba740ba9e86ce6e --- /dev/null +++ b/options_heatmap/38.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7ac3c66d9aacbd5c09e924fe1ca3b8eebabb42578bbf719134e90131e58054b +size 416980 diff --git a/options_heatmap/39.jpg b/options_heatmap/39.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3e93621ccc24650681eb52965223e012b32681c9 --- /dev/null +++ b/options_heatmap/39.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1cf5c033ffd59aa72832944e38b0359e4b06777625573c452c9914a8c080a9f0 +size 576730 diff --git a/options_heatmap/4.jpg b/options_heatmap/4.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e8572114fb72067a6f98ccbf3ab1151d3e9531cf --- /dev/null +++ b/options_heatmap/4.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:202406eddc6856c91252b3a9ccd85e6dfde6ba290391c2067bf88ecf0ad636d7 +size 1028689 diff --git a/options_heatmap/40.jpg b/options_heatmap/40.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2e8af4aa4d51ad4c28ee923a32df0df3bdb8ea72 --- /dev/null +++ b/options_heatmap/40.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a46d0a35bd376f7c5b80a9ee905ecc6f1e30970f1514b12a02462b6035eeff7b +size 402495 diff --git a/options_heatmap/41.jpg b/options_heatmap/41.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7eb9acdea020d6bebd40b9967d8e5040d5274930 --- /dev/null +++ b/options_heatmap/41.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a55814224233927e6ad2fef99713713f4a6c71629c78a085119f32a4c6cf1ea +size 477414 diff --git a/options_heatmap/42.jpg b/options_heatmap/42.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fc841f7099410f3a8af9c101beb90b4bf64ee3f6 --- /dev/null +++ b/options_heatmap/42.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a6790a69d1317e2467070e2e14f66a21b48cc114e2beb7201ab973de2508083 +size 408233 diff --git a/options_heatmap/43.jpg b/options_heatmap/43.jpg new file mode 100644 index 0000000000000000000000000000000000000000..22e1062e486c90db63e953c919fd3708ecc409cd --- /dev/null +++ b/options_heatmap/43.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71775dcdad35513d1441f241e2556193f4b6658ce4d84ba5c39b95b3d67a2182 +size 478578 diff --git a/options_heatmap/44.jpg b/options_heatmap/44.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2214405fccb639b474a562e5bbbbf8091ea53f5c --- /dev/null +++ b/options_heatmap/44.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b1a3de9c38e68ed7ef9e4d192945d6a0ca20ecb7b104b5745c59b4b8481705cc +size 643760 diff --git a/options_heatmap/45.jpg b/options_heatmap/45.jpg new file mode 100644 index 0000000000000000000000000000000000000000..33dca6febb581ec2634005a7f0025d8c96b502a6 --- /dev/null +++ b/options_heatmap/45.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9814d584b60764c8c23c41c4dd3bfb49d5fc612134a9b17b9c62152f728eddd5 +size 460486 diff --git a/options_heatmap/46.jpg b/options_heatmap/46.jpg new file mode 100644 index 0000000000000000000000000000000000000000..121e8fb009b067db0320a3f5900ba84b30d7ec3a --- /dev/null +++ b/options_heatmap/46.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17c5fee32541dbdf8e0aadf4ec95e07c25a83b8cb13f242a8939cbe665265a60 +size 713834 diff --git a/options_heatmap/47.jpg b/options_heatmap/47.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a954360de2a56861e1a586e25c6abe1203f9910b --- /dev/null +++ b/options_heatmap/47.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec3f95924dd3aa667f75130178b20350544daf493e3960617f40a2ffe62fe930 +size 497545 diff --git a/options_heatmap/48.jpg b/options_heatmap/48.jpg new file mode 100644 index 0000000000000000000000000000000000000000..5f8bd5b24c5ebb333b1ab44abb08fc80f9848a8f --- /dev/null +++ b/options_heatmap/48.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f4b3d23f84617063893afe4881c928a5934ee5598a4ffe0dbf97e0eece391040 +size 540540 diff --git a/options_heatmap/49.jpg b/options_heatmap/49.jpg new file mode 100644 index 0000000000000000000000000000000000000000..77fd45d7b08fe46e0fffcfa3246018375f141703 --- /dev/null +++ b/options_heatmap/49.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:90f78b3d37a05a41729ce2d591ce352c83b80edf613f96f4a96b15da4facd62f +size 731439 diff --git a/options_heatmap/5.jpg b/options_heatmap/5.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9668386318113c8d50983873346aa267187feadb --- /dev/null +++ b/options_heatmap/5.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3fe07dfcdb769f9b9a5f396051447b45d6ae2e7b6118cffd6ffc57735500b39c +size 384850 diff --git a/options_heatmap/50.jpg b/options_heatmap/50.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2e673d535e7391e6636e328fefd6b9e2c953debb --- /dev/null +++ b/options_heatmap/50.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51a0b301dff360df1337c200eba93a9c534009171e1deb92d549cdba95b79ed9 +size 435571 diff --git a/options_heatmap/51.jpg b/options_heatmap/51.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e610a84ecdabf22816a2428df17aaca0115b4749 --- /dev/null +++ b/options_heatmap/51.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a82e1d585303547a6e29e513f0edd99a692ecde64fba22f87755121f5cc6189d +size 673713 diff --git a/options_heatmap/52.jpg b/options_heatmap/52.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ea1d5938deb47e2179f82bfd61ad015da77f384 --- /dev/null +++ b/options_heatmap/52.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a46e8b30eea6464a99c3078d429beb8c503b022c6706c9214710e174122736d +size 675021 diff --git a/options_heatmap/53.jpg b/options_heatmap/53.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ccde610ecfc87e24a4aaba0bbbd12ad5ac3cc7c6 --- /dev/null +++ b/options_heatmap/53.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b2441ca4f9fbee7a5c631be08b06cbeef013eef488ccc51277f470506bf3ce5 +size 553770 diff --git a/options_heatmap/54.jpg b/options_heatmap/54.jpg new file mode 100644 index 0000000000000000000000000000000000000000..aca8d19d7e367a2ebbb43eb8c148dfd441cd707b --- /dev/null +++ b/options_heatmap/54.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76db2038bc9ade371283c860ea793cb3dd3605c39596addd4fa0408d5988f4b6 +size 619326 diff --git a/options_heatmap/55.jpg b/options_heatmap/55.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a71e74cfd9c390fd16eeafbfd7fcf4b57bbd9f07 --- /dev/null +++ b/options_heatmap/55.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f53f57981dbe67110b79cd9ad824cc62b25d8c6812f4b47e84a2e5e740a27464 +size 472827 diff --git a/options_heatmap/56.jpg b/options_heatmap/56.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a93e495a03edfc6d1beb21e910242b3f9c9fa4e9 --- /dev/null +++ b/options_heatmap/56.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a48f38cff8d1583d43d621063eaf47743451dd30c7ae625554f12d5c97ccac24 +size 374395 diff --git a/options_heatmap/57.jpg b/options_heatmap/57.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ad2ad8b1a879b27031e18bdec0d1b66d37bf52b --- /dev/null +++ b/options_heatmap/57.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a2a918e01605013054512289145b0d5ee744dfdc0036e1244a9bf743a04e625 +size 346410 diff --git a/options_heatmap/58.jpg b/options_heatmap/58.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f13fb5d0fc57e798fb81ec832712f695ae1bf3b4 --- /dev/null +++ b/options_heatmap/58.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8058440a53e5cd7526c0e0b7fc24f077e2311688ae4d2e457ca0539badeaddd +size 601601 diff --git a/options_heatmap/59.jpg b/options_heatmap/59.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9e56311077f940278dc73a9cba1af6bdf343fe1f --- /dev/null +++ b/options_heatmap/59.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5385a30aa6b069ed2fa2658e3d3cd15ca26fbce185009ee838d3c500430b9a3 +size 597507 diff --git a/options_heatmap/6.jpg b/options_heatmap/6.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3940e422c19e1d313bbcfeebd50e93d536b31faf --- /dev/null +++ b/options_heatmap/6.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:586cb82c9ccfc0c403eef6a958aae057a0bcd89b0f589fa3f6e95ebaa2b40bc2 +size 439774 diff --git a/options_heatmap/60.jpg b/options_heatmap/60.jpg new file mode 100644 index 0000000000000000000000000000000000000000..90b5cbfbc7e634c840abb3b679acd6fec690a43d --- /dev/null +++ b/options_heatmap/60.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ccbd36a05f3ec4cceec8c9766b4eb4d067e8298cc9f42100d9db4c542e2239bb +size 684688 diff --git a/options_heatmap/61.jpg b/options_heatmap/61.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e58b64c47f4589b24d4f68df68111964345b7060 --- /dev/null +++ b/options_heatmap/61.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:190d0a4254cdbc7afbb4b86358402cd855f9bffb4777533e6a4223bfd2e09d85 +size 436744 diff --git a/options_heatmap/62.jpg b/options_heatmap/62.jpg new file mode 100644 index 0000000000000000000000000000000000000000..df5336289d24ae9fcc9df5f78ddcbd677ef846fa --- /dev/null +++ b/options_heatmap/62.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2f4a9d5b33f6f7bd49ef2376bc0fb7452a39510725026858e28a86ebaff78628 +size 927621 diff --git a/options_heatmap/63.jpg b/options_heatmap/63.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0dc3a3735fcb1c627ec6b3f46c54b87aba4667b4 --- /dev/null +++ b/options_heatmap/63.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d92d5e5d5d471edb373a93542edcd22d6e40130de59aa303f3a91ec2a633404 +size 260204 diff --git a/options_heatmap/64.jpg b/options_heatmap/64.jpg new file mode 100644 index 0000000000000000000000000000000000000000..08c68ab4ba0f250193c5c5a245c5279576e04035 --- /dev/null +++ b/options_heatmap/64.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4155711ef85bbd9089bc4d1b41942b6a43549665cbe21c0197007e5ec7f87d3 +size 581752 diff --git a/options_heatmap/65.jpg b/options_heatmap/65.jpg new file mode 100644 index 0000000000000000000000000000000000000000..21ead6ea6f41e9e67299b47d643b1a741f871e99 --- /dev/null +++ b/options_heatmap/65.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea6af91c36d3a7f3a0d6968bdb2ec534ca3e06fba68903f8b49069630804f56e +size 330188 diff --git a/options_heatmap/66.jpg b/options_heatmap/66.jpg new file mode 100644 index 0000000000000000000000000000000000000000..90ce068fe7b8ce7903ea0ca3a69cc2223edb0506 --- /dev/null +++ b/options_heatmap/66.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a4fdb6c2b6471a335721ed178a7d05dccb8073b2ae448873f87b43e5148df51d +size 641184 diff --git a/options_heatmap/67.jpg b/options_heatmap/67.jpg new file mode 100644 index 0000000000000000000000000000000000000000..02b90de3630d7a5a5d0a65726cdb797bd9c68841 --- /dev/null +++ b/options_heatmap/67.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:611f771623a876229a418bbd1719b24d7b20e77e5b0b57157ecf04848581d374 +size 275503 diff --git a/options_heatmap/68.jpg b/options_heatmap/68.jpg new file mode 100644 index 0000000000000000000000000000000000000000..02445f8d11e18941ed234d5d9bb7f07f01a2441e --- /dev/null +++ b/options_heatmap/68.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2abc000fe1b2058ba88eeeeb9e63f5e2e84810c4aade39eb84cddb2278fba49 +size 372706 diff --git a/options_heatmap/69.jpg b/options_heatmap/69.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f7118bee40cada98d9562b3d1b43734cd0018a0a --- /dev/null +++ b/options_heatmap/69.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a56f9c1bdcd3820fbc07c9a7e273f2e488f7885f580847869f5b6055e876fa5 +size 247718 diff --git a/options_heatmap/7.jpg b/options_heatmap/7.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0cd1606c5a9417f84eef224bf79fc59a42693274 --- /dev/null +++ b/options_heatmap/7.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3925dd77b331a21ebbaf55093fda3cfdb22bb36142c583752d8147b5f5f1b4c +size 452886 diff --git a/options_heatmap/70.jpg b/options_heatmap/70.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a0ebd986e8aa5f384eaf1bca8cff652999df9349 --- /dev/null +++ b/options_heatmap/70.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:927f0dd025b2d3fdfab7c5cd5de26f30e23f122ee4a759d3abf5618baad08380 +size 277338 diff --git a/options_heatmap/71.jpg b/options_heatmap/71.jpg new file mode 100644 index 0000000000000000000000000000000000000000..04f2de2cd2d486db7ec8ea799a892ae2000d55d2 --- /dev/null +++ b/options_heatmap/71.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea412a68ba5a4e21b4eb092a756e6ed134b9023194c0d6d24339ac28f78086d3 +size 285113 diff --git a/options_heatmap/72.jpg b/options_heatmap/72.jpg new file mode 100644 index 0000000000000000000000000000000000000000..18bff401b54ec7c6089c05c0856d2c7faae11d18 --- /dev/null +++ b/options_heatmap/72.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:daaea1cbbf753f81d70073f058d66fcf2fff78808c145f959dda75f25ebe3de9 +size 318202 diff --git a/options_heatmap/73.jpg b/options_heatmap/73.jpg new file mode 100644 index 0000000000000000000000000000000000000000..41614641bad92ad3116c60fe88d5488e17cac98d --- /dev/null +++ b/options_heatmap/73.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76186f72ebb762302f998f72bb17f512a02bc348880bd0e4c075eb4fcf0eb697 +size 422221 diff --git a/options_heatmap/74.jpg b/options_heatmap/74.jpg new file mode 100644 index 0000000000000000000000000000000000000000..92417450958e9fa86133565667f9d52813aa44ba --- /dev/null +++ b/options_heatmap/74.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:926fa0b1dd670fe728328a73884851e3ffba31b44e4dd08de4987f4af72a919c +size 772096 diff --git a/options_heatmap/75.jpg b/options_heatmap/75.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4e2f2906036570d13fe182b60370d95e01185c65 --- /dev/null +++ b/options_heatmap/75.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:054849f6990af4a540da32974caedabd2ad1dbed6adb5938d5946e93fa7f250c +size 539374 diff --git a/options_heatmap/76.jpg b/options_heatmap/76.jpg new file mode 100644 index 0000000000000000000000000000000000000000..3ed6e224b570e4b4c6566321cc4c75649a8e5820 --- /dev/null +++ b/options_heatmap/76.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fabf26fc4c216510ee9373efbf7e6075536cd41e91a41a784768ca255ee77a7f +size 630734 diff --git a/options_heatmap/77.jpg b/options_heatmap/77.jpg new file mode 100644 index 0000000000000000000000000000000000000000..73361aa8ce6756a5ae582fa91267bab4a83376c1 --- /dev/null +++ b/options_heatmap/77.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:429c89a3c4afba33030c80f1bc2b887b610509bc3cd15059f27638bd561f1bc8 +size 535626 diff --git a/options_heatmap/78.jpg b/options_heatmap/78.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f9ed529027d7c22b63a882b65d101c77c2129dd7 --- /dev/null +++ b/options_heatmap/78.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45eb9cacbe42b9dcb67fe514cb0b8f27cbf86419a47cb0ed59e8270b51d18989 +size 428074 diff --git a/options_heatmap/79.jpg b/options_heatmap/79.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6ae7c7d451170fca81f16cba6b55402ad69e5dbf --- /dev/null +++ b/options_heatmap/79.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68a24f5105663beada3d2a0a32656a1bd5c77ef703a5b1689ca77668c71e37b4 +size 624694 diff --git a/options_heatmap/8.jpg b/options_heatmap/8.jpg new file mode 100644 index 0000000000000000000000000000000000000000..eed1303885e25a985534f91f838942d01752ccdb --- /dev/null +++ b/options_heatmap/8.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1f79ce286f28b65d78dfa8574ad12600d8feccbc66cea59b70d422c6929f903c +size 621083 diff --git a/options_heatmap/80.jpg b/options_heatmap/80.jpg new file mode 100644 index 0000000000000000000000000000000000000000..910cc79415c43e0f5cfb46b1b52c5c4a64f49a76 --- /dev/null +++ b/options_heatmap/80.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e303a0c17da27c6d8d9c601720452390f09c3149532816f1e373e4c8141b70e1 +size 461415 diff --git a/options_heatmap/81.jpg b/options_heatmap/81.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7bb55d1e3a5d6bfaeaf469608aa57dc2ddf1c3c2 --- /dev/null +++ b/options_heatmap/81.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fc0874495ce9eb0c69153f75ba6e5a7a25925891206a6973f0e0e72a4830e5e +size 416665 diff --git a/options_heatmap/82.jpg b/options_heatmap/82.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b2ba8d78acbcb5d5e5ec6adfec018e32753abb10 --- /dev/null +++ b/options_heatmap/82.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9356c3711ffe8d8b4ba0a5299dae5551dfab7d883304cfad693314d3877d26a +size 793372 diff --git a/options_heatmap/83.jpg b/options_heatmap/83.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f21477d307eac5269a2703e9046cb0e585b188f4 --- /dev/null +++ b/options_heatmap/83.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2df6edd2ef7b30c7bdf34321cc632b3422ea46d2d8ee771276629e20fe713f9 +size 406072 diff --git a/options_heatmap/84.jpg b/options_heatmap/84.jpg new file mode 100644 index 0000000000000000000000000000000000000000..89764a7a5c6e8aa851496a0e3292371b039fac5d --- /dev/null +++ b/options_heatmap/84.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0afb8f69c8ab8c3fa2bac68505db96caa371ea61b14bd6ed1701c7dc694e3f1a +size 461389 diff --git a/options_heatmap/85.jpg b/options_heatmap/85.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dc94293fa4e42a7bf18fbbe097345a19a1924c43 --- /dev/null +++ b/options_heatmap/85.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d9bb47d90bb47e7bf620ff1c3085b8e93974854cf73c45d6dc2b295d6a08f4e +size 1015722 diff --git a/options_heatmap/86.jpg b/options_heatmap/86.jpg new file mode 100644 index 0000000000000000000000000000000000000000..353255067b3b48c6442a7c1cca0d6569aa2bb9c0 --- /dev/null +++ b/options_heatmap/86.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d569b52293e8aeeb61dc5911c740cfab44ce94e7e1abaf456cc6a885e588df4 +size 823537 diff --git a/options_heatmap/87.jpg b/options_heatmap/87.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c4073a0c624057b3588093f052e8074733e1fc9e --- /dev/null +++ b/options_heatmap/87.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ce5f34ea040228f61718c29ccc8f7b3c218d4cc12c76443b3cb8c63705595f5 +size 1022708 diff --git a/options_heatmap/88.jpg b/options_heatmap/88.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ee4a9b699e903cd75a11cfa6bf7b3700822aad34 --- /dev/null +++ b/options_heatmap/88.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0a4e44779933f7f3a666c7035b16ddda9d6b3d6a23c116b9dccd5eb35682536 +size 608450 diff --git a/options_heatmap/89.jpg b/options_heatmap/89.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2193c45af5b641b5927d6eb2ff579db7507d93d6 --- /dev/null +++ b/options_heatmap/89.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f73b80da1d908a2952692b7894ab6b4b95a82a678cd6ea7085c7b4aaf9f4ae54 +size 477770 diff --git a/options_heatmap/9.jpg b/options_heatmap/9.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b943b6699443d36cfdbfef39cb98e840f9eb689f --- /dev/null +++ b/options_heatmap/9.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eecc5a3171f9d58ba222f51a2e04a6fa58f70ea20bdc5a81fc430d9366b26635 +size 1075909 diff --git a/options_heatmap/90.jpg b/options_heatmap/90.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2006c60ef96a000645500bfee91a3b24c57307ac --- /dev/null +++ b/options_heatmap/90.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84a8f810b028b17d470f3ed3d07eab0f8b018a58456002173a87d782361978e5 +size 533536 diff --git a/options_heatmap/91.jpg b/options_heatmap/91.jpg new file mode 100644 index 0000000000000000000000000000000000000000..bef554afee40ed0cc0bd5adde03cb340e4e278a4 --- /dev/null +++ b/options_heatmap/91.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5c1b529c48d69871564674899d1a5fbc5e8e57ea9766df0812184fdbe221e7db +size 468249 diff --git a/options_heatmap/92.jpg b/options_heatmap/92.jpg new file mode 100644 index 0000000000000000000000000000000000000000..480b43d01d03db7dffa715bde682e30f4067018c --- /dev/null +++ b/options_heatmap/92.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e101c44a0f225242ab81f610e4cbd7ceb3f18e32f495129bb65b841e6a84c991 +size 607712 diff --git a/options_heatmap/93.jpg b/options_heatmap/93.jpg new file mode 100644 index 0000000000000000000000000000000000000000..366670abc84d39c4e7e4978b7f5985dcdbc90d1f --- /dev/null +++ b/options_heatmap/93.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6ade1447a037d326b1461f418abf6ea03d27526ae62b045bf7cad45e747a540 +size 705139 diff --git a/options_heatmap/94.jpg b/options_heatmap/94.jpg new file mode 100644 index 0000000000000000000000000000000000000000..13906116b6c67f6950280a1a7dc75b6b3c7eda66 --- /dev/null +++ b/options_heatmap/94.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:23368067170fbd94793546da731def46bd0b1b00f1eb514da70a0a33cf14d54d +size 466794 diff --git a/options_heatmap/95.jpg b/options_heatmap/95.jpg new file mode 100644 index 0000000000000000000000000000000000000000..73685f38cdff1ade82ce46a0e63bc206ef1b1826 --- /dev/null +++ b/options_heatmap/95.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb94022e86faa35fd97d03da200ce9a237f6e11ef1e7d1e977b29a7dd6f520e7 +size 711093 diff --git a/options_heatmap/96.jpg b/options_heatmap/96.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d17a7badf1392c205359e4a14b24045e69beb0c2 --- /dev/null +++ b/options_heatmap/96.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:29402c5d0849080f7d45e91e5da0c75477eea9c8f8ff7fc39920c0a290e0e581 +size 575321 diff --git a/options_heatmap/97.jpg b/options_heatmap/97.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c1b1384b16aad393dcf063c211da9b46dacd011b --- /dev/null +++ b/options_heatmap/97.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb2cb5aea7f14ba61f2da4f7cf23cbcde40d33109f193dcacbf241415d5df834 +size 564168 diff --git a/options_heatmap/98.jpg b/options_heatmap/98.jpg new file mode 100644 index 0000000000000000000000000000000000000000..a3a5863a0e1f707956e5faf0cc89495f5fe5b6ed --- /dev/null +++ b/options_heatmap/98.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a123520be23be366c9792d80c8097f5cc44e74bf03cf53b9b54f1202fa34512 +size 660366 diff --git a/options_heatmap/99.jpg b/options_heatmap/99.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f862710dae9ef67c328b29e94591530a4a3ef6dd --- /dev/null +++ b/options_heatmap/99.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31bc135b3d32e6449b0776f4c5e11c06c033fc8d9708685aea7515da309cd67e +size 588638 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..4f8641a287620411061756d1e997027170cd2a33 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +torch +torchvision +opencv-python \ No newline at end of file diff --git a/saving/logging.py b/saving/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..377e31b33e06865bb588bfec32678c947e5c3bb3 --- /dev/null +++ b/saving/logging.py @@ -0,0 +1,27 @@ +import sys + + +class Tee(object): + def __init__(self, name, file_only=False): + self.file = open(name, "a") + self.stdout = sys.stdout + self.stderr = sys.stderr + sys.stdout = self + sys.stderr = self + self.file_only = file_only + + def __del__(self): + sys.stdout = self.stdout + sys.stderr = self.stderr + self.file.close() + + def write(self, data): + self.file.write(data) + if not self.file_only: + self.stdout.write(data) + self.flush() + + def flush(self): + self.file.flush() + + diff --git a/saving/utils.py b/saving/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6bf35c3c5129e84ab6f13f85c4c04fba4f7e33b4 --- /dev/null +++ b/saving/utils.py @@ -0,0 +1,6 @@ +import json + + +def json_save(filename, data): + with open(filename, "w") as f: + json.dump(data, f,indent=4) \ No newline at end of file diff --git a/select_with_diversity.py b/select_with_diversity.py new file mode 100644 index 0000000000000000000000000000000000000000..5cfce18817c8cd0da783224ba34298bd1c782256 --- /dev/null +++ b/select_with_diversity.py @@ -0,0 +1,91 @@ +from load_model import extract_sel_mean_std_bias_assignemnt +from pathlib import Path +from architectures.model_mapping import get_model +from configs.dataset_params import dataset_constants +import torch +import torchvision.transforms as transforms +import pandas as pd +import cv2 +import numpy as np +from PIL import Image +from get_data import get_augmentation +from configs.dataset_params import normalize_params +import random +from evaluation.diversity import MultiKCrossChannelMaxPooledSum +from visualization import filter_with_diversity + +def select_with_diversity(dataset="CUB2011", arch="resnet50",seed=123456, model_type="qsenn", n_features = 50, n_per_class=5, img_size=448, reduced_strides=False, folder = None): + n_classes = dataset_constants[dataset]["num_classes"] + model = get_model(arch, n_classes, reduced_strides) + if folder is None: + folder = Path.home() / f"tmp/{arch}/{dataset}/{seed}/" + print(folder) + model.load_state_dict(torch.load(folder / "Trained_DenseModel.pth"))#REMOVE + state_dict = torch.load(folder / f"{model_type}_{n_features}_{n_per_class}_FinetunedModel.pth") + selection= torch.load(folder / f"SlDD_Selection_50.pt") + state_dict['linear.selection']=selection + + feature_sel, sparse_layer, current_mean, current_std, bias_sparse = extract_sel_mean_std_bias_assignemnt(state_dict) + model.set_model_sldd(feature_sel, sparse_layer, current_mean, current_std, bias_sparse) + model.load_state_dict(state_dict) + + W=model.linear.layer.weight + + TR=get_augmentation(0.1, img_size, False, False, True, True, normalize_params["CUB2011"]) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = model.to(device) + model.eval() + + #get name list and label + data_dir=Path.home()/"tmp/Datasets/CUB200/CUB_200_2011/" + labels = pd.read_csv(data_dir/"image_class_labels.txt", sep=' ', names=['img_id', 'target']) + namelist=pd.read_csv(data_dir/"images.txt",sep=' ',names=['img_id','file_name']) + # classlist=pd.read_csv(data_dir/"classes.txt",sep=' ',names=['cl_id','class_name']) + # targets=labels.loc[labels['img_id']==i,'target'].values[0] + + + Label_txt = pd.DataFrame({'img_id': pd.Series(dtype='int'), 'target': pd.Series(dtype='str')}) + with torch.no_grad(): + for t in range(1, 201): + print("in class:",t) + img_list=labels[labels['target']==t] + l=t-1 + weights=W[l,:] + k = (weights > 0).sum().item() + imgid_diver=[] + for i in img_list['img_id']: + filename=namelist.loc[namelist['img_id']==i,'file_name'].values[0] + + img=cv2.imread(data_dir/f"images/{filename}") + img=cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + + + img=Image.fromarray(img) + img=TR(img) + img=img.unsqueeze(0) + img=img.to(device) + output, featuremaps =model(img,with_feature_maps=True,with_final_features=False) + + #div calculate + localizer = MultiKCrossChannelMaxPooledSum(range(1, k+1), W, None) + localizer(output.to("cuda"),featuremaps.to("cuda")) + locality, exlusive_locality = localizer.get_result() + diversity = locality[k-1] + diversity=diversity.item() + imgid_diver.append((i,diversity)) + + top_k = sorted(imgid_diver, key=lambda x: x[1], reverse=True)[:4] + top_k_imgids = [imgid for imgid, div in top_k] + t_list = [t] * len(top_k_imgids) + new_data = pd.DataFrame({'img_id': top_k_imgids, 'target': t_list}) + Label_txt = pd.concat([Label_txt, new_data], ignore_index=True) + Label_txt.to_csv('image_class_labels.txt', sep=' ', index=False, header=False) + + + + +select_with_diversity(dataset="CUB2011", arch="resnet50",seed=123456, model_type="qsenn", n_features = 50, n_per_class=5, img_size=448, reduced_strides=False, folder = None) + + + + \ No newline at end of file diff --git a/sparsification/FeatureSelection.py b/sparsification/FeatureSelection.py new file mode 100644 index 0000000000000000000000000000000000000000..885a7bc9fab8842e9eece0f07690636b1d623233 --- /dev/null +++ b/sparsification/FeatureSelection.py @@ -0,0 +1,473 @@ +from argparse import ArgumentParser +import logging +import math +import os.path +import sys +import time +import warnings + +import numpy as np +import torch +import torch.nn.functional as F +from glm_saga.elasticnet import maximum_reg_loader, get_device, elastic_loss_and_acc_loader +from torch import nn + +import torch as ch + +from sparsification.utils import safe_zip + +# TODO checkout this change: Marks changes to the group version of glmsaga + +""" +This would need glm_saga to run +usage to select 50 features with parameters as in paper: +metadata contains information about the precomputed train features in feature_loaders +args contains the default arguments for glm-saga, as described at the bottom +def get_glm_to_zero(feature_loaders, metadata, args, num_classes, device, train_ds, Ntotal): + num_features = metadata["X"]["num_features"][0] + fittingClass = FeatureSelectionFitting(num_features, num_lasses, args, 0.8, + 50, + True,0.1, + lookback=3, tol=1e-4, + epsilon=1,) + to_drop, test_acc = fittingClass.fit(feature_loaders, metadata, device) + return to_drop + +to_drop is then used to remove the features from the downstream fitting and finetuning. +""" + + +class FeatureSelectionFitting: + def __init__(self, n_features, n_classes, args, selalpha, nKeep, lam_fac,out_dir, lookback=None, tol=None, + epsilon=None): + """ + This is an adaption of the group version of glm-saga (https://github.com/MadryLab/DebuggableDeepNetworks) + The function extended_mask_max covers the changed operator, + Args: + n_features: + n_classes: + args: default args for glmsaga + selalpha: alpha for elastic net + nKeep: target number features + lam_fac: discount factor for lambda + parameters of glmsaga + lookback: + tol: + epsilon: + """ + self.selected_features = torch.zeros(n_features, dtype=torch.bool) + self.num_features = n_features + self.selalpha = selalpha + self.lam_Fac = lam_fac + self.out_dir = out_dir + self.n_classes = n_classes + self.nKeep = nKeep + self.args = self.extend_args(args, lookback, tol, epsilon) + + # Extended Proximal Operator for Feature Selection + def extended_mask_max(self, greater_to_keep, thresh): + prev = greater_to_keep[self.selected_features] + greater_to_keep[self.selected_features] = torch.min(greater_to_keep) + max_entry = torch.argmax(greater_to_keep) + greater_to_keep[self.selected_features] = prev + mask = torch.zeros_like(greater_to_keep) + mask[max_entry] = 1 + final_mask = (greater_to_keep > thresh) + final_mask = final_mask * mask + allowed_to_keep = torch.logical_or(self.selected_features, final_mask) + return allowed_to_keep + + def extend_args(self, args, lookback, tol, epsilon): + for key, entry in safe_zip(["lookbehind", "tol", + "lr_decay_factor", ], [lookback, tol, epsilon]): + if entry is not None: + setattr(args, key, entry) + return args + + # Grouped L1 regularization + # proximal operator for f(weight) = lam * \|weight\|_2 + # where the 2-norm is taken columnwise + def group_threshold(self, weight, lam): + norm = weight.norm(p=2, dim=0) + 1e-6 + # print(ch.sum((norm > lam))) + return (weight - lam * weight / norm) * self.extended_mask_max(norm, lam) + + # Elastic net regularization with group sparsity + # proximal operator for f(x) = alpha * \|x\|_1 + beta * \|x\|_2^2 + # where the 2-norm is taken columnwise + def group_threshold_with_shrinkage(self, x, alpha, beta): + y = self.group_threshold(x, alpha) + return y / (1 + beta) + + def threshold(self, weight_new, lr, lam): + alpha = self.selalpha + if alpha == 1: + # Pure L1 regularization + weight_new = self.group_threshold(weight_new, lr * lam * alpha) + else: + # Elastic net regularization + weight_new = self.group_threshold_with_shrinkage(weight_new, lr * lam * alpha, + lr * lam * (1 - alpha)) + return weight_new + + # Train an elastic GLM with proximal SAGA + # Since SAGA stores a scalar for each example-class pair, either pass + # the number of examples and number of classes or calculate it with an + # initial pass over the loaders + def train_saga(self, linear, loader, lr, nepochs, lam, alpha, group=True, verbose=None, + state=None, table_device=None, n_ex=None, n_classes=None, tol=1e-4, + preprocess=None, lookbehind=None, family='multinomial', logger=None): + if logger is None: + logger = print + with ch.no_grad(): + weight, bias = list(linear.parameters()) + if table_device is None: + table_device = weight.device + + # get total number of examples and initialize scalars + # for computing the gradients + if n_ex is None: + n_ex = sum(tensors[0].size(0) for tensors in loader) + if n_classes is None: + if family == 'multinomial': + n_classes = max(tensors[1].max().item() for tensors in loader) + 1 + elif family == 'gaussian': + for batch in loader: + y = batch[1] + break + n_classes = y.size(1) + + # Storage for scalar gradients and averages + if state is None: + a_table = ch.zeros(n_ex, n_classes).to(table_device) + w_grad_avg = ch.zeros_like(weight).to(weight.device) + b_grad_avg = ch.zeros_like(bias).to(weight.device) + else: + a_table = state["a_table"].to(table_device) + w_grad_avg = state["w_grad_avg"].to(weight.device) + b_grad_avg = state["b_grad_avg"].to(weight.device) + + obj_history = [] + obj_best = None + nni = 0 + for t in range(nepochs): + total_loss = 0 + for n_batch, batch in enumerate(loader): + if len(batch) == 3: + X, y, idx = batch + w = None + elif len(batch) == 4: + X, y, w, idx = batch + else: + raise ValueError( + f"Loader must return (data, target, index) or (data, target, index, weight) but instead got a tuple of length {len(batch)}") + + if preprocess is not None: + device = get_device(preprocess) + with ch.no_grad(): + X = preprocess(X.to(device)) + X = X.to(weight.device) + out = linear(X) + + # split gradient on only the cross entropy term + # for efficient storage of gradient information + if family == 'multinomial': + if w is None: + loss = F.cross_entropy(out, y.to(weight.device), reduction='mean') + else: + loss = F.cross_entropy(out, y.to(weight.device), reduction='none') + loss = (loss * w).mean() + I = ch.eye(linear.weight.size(0)) + target = I[y].to(weight.device) # change to OHE + + # Calculate new scalar gradient + logits = F.softmax(linear(X)) + elif family == 'gaussian': + if w is None: + loss = 0.5 * F.mse_loss(out, y.to(weight.device), reduction='mean') + else: + loss = 0.5 * F.mse_loss(out, y.to(weight.device), reduction='none') + loss = (loss * (w.unsqueeze(1))).mean() + target = y + + # Calculate new scalar gradient + logits = linear(X) + else: + raise ValueError(f"Unknown family: {family}") + total_loss += loss.item() * X.size(0) + + # BS x NUM_CLASSES + a = logits - target + if w is not None: + a = a * w.unsqueeze(1) + a_prev = a_table[idx].to(weight.device) + + # weight parameter + w_grad = (a.unsqueeze(2) * X.unsqueeze(1)).mean(0) + w_grad_prev = (a_prev.unsqueeze(2) * X.unsqueeze(1)).mean(0) + w_saga = w_grad - w_grad_prev + w_grad_avg + weight_new = weight - lr * w_saga + weight_new = self.threshold(weight_new, lr, lam) + # bias parameter + b_grad = a.mean(0) + b_grad_prev = a_prev.mean(0) + b_saga = b_grad - b_grad_prev + b_grad_avg + bias_new = bias - lr * b_saga + + # update table and averages + a_table[idx] = a.to(table_device) + w_grad_avg.add_((w_grad - w_grad_prev) * X.size(0) / n_ex) + b_grad_avg.add_((b_grad - b_grad_prev) * X.size(0) / n_ex) + + if lookbehind is None: + dw = (weight_new - weight).norm(p=2) + db = (bias_new - bias).norm(p=2) + criteria = ch.sqrt(dw ** 2 + db ** 2) + + if criteria.item() <= tol: + return { + "a_table": a_table.cpu(), + "w_grad_avg": w_grad_avg.cpu(), + "b_grad_avg": b_grad_avg.cpu() + } + + weight.data = weight_new + bias.data = bias_new + + saga_obj = total_loss / n_ex + lam * alpha * weight.norm(p=1) + 0.5 * lam * (1 - alpha) * ( + weight ** 2).sum() + + # save amount of improvement + obj_history.append(saga_obj.item()) + if obj_best is None or saga_obj.item() + tol < obj_best: + obj_best = saga_obj.item() + nni = 0 + else: + nni += 1 + + # Stop if no progress for lookbehind iterationsd:]) + criteria = lookbehind is not None and (nni >= lookbehind) + + nnz = (weight.abs() > 1e-5).sum().item() + total = weight.numel() + if verbose and (t % verbose) == 0: + if lookbehind is None: + logger( + f"obj {saga_obj.item()} weight nnz {nnz}/{total} ({nnz / total:.4f}) criteria {criteria:.4f} {dw} {db}") + else: + logger( + f"obj {saga_obj.item()} weight nnz {nnz}/{total} ({nnz / total:.4f}) obj_best {obj_best}") + + if lookbehind is not None and criteria: + logger( + f"obj {saga_obj.item()} weight nnz {nnz}/{total} ({nnz / total:.4f}) obj_best {obj_best} [early stop at {t}]") + return { + "a_table": a_table.cpu(), + "w_grad_avg": w_grad_avg.cpu(), + "b_grad_avg": b_grad_avg.cpu() + } + + logger(f"did not converge at {nepochs} iterations (criteria {criteria})") + return { + "a_table": a_table.cpu(), + "w_grad_avg": w_grad_avg.cpu(), + "b_grad_avg": b_grad_avg.cpu() + } + + def glm_saga(self, linear, loader, max_lr, nepochs, alpha, dropout, tries, + table_device=None, preprocess=None, group=False, + verbose=None, state=None, n_ex=None, n_classes=None, + tol=1e-4, epsilon=0.001, k=100, checkpoint=None, + do_zero=True, lr_decay_factor=1, metadata=None, + val_loader=None, test_loader=None, lookbehind=None, + family='multinomial', encoder=None, tot_tries=1): + if encoder is not None: + warnings.warn("encoder argument is deprecated; please use preprocess instead", DeprecationWarning) + preprocess = encoder + device = get_device(linear) + checkpoint = self.out_dir + if preprocess is not None and (device != get_device(preprocess)): + raise ValueError( + f"Linear and preprocess must be on same device (got {get_device(linear)} and {get_device(preprocess)})") + + if metadata is not None: + if n_ex is None: + n_ex = metadata['X']['num_examples'] + if n_classes is None: + n_classes = metadata['y']['num_classes'] + lam_fac = (1 + (tries - 1) / tot_tries) + print("Using lam_fac ", lam_fac) + max_lam = maximum_reg_loader(loader, group=group, preprocess=preprocess, metadata=metadata, + family=family) / max( + 0.001, alpha) * lam_fac + group_lam = maximum_reg_loader(loader, group=True, preprocess=preprocess, metadata=metadata, + family=family) / max( + 0.001, alpha) * lam_fac + min_lam = epsilon * max_lam + group_min_lam = epsilon * group_lam + # logspace is base 10 but log is base e so use log10 + lams = ch.logspace(math.log10(max_lam), math.log10(min_lam), k) + lrs = ch.logspace(math.log10(max_lr), math.log10(max_lr / lr_decay_factor), k) + found = False + if do_zero: + lams = ch.cat([lams, lams.new_zeros(1)]) + lrs = ch.cat([lrs, lrs.new_ones(1) * lrs[-1]]) + + path = [] + best_val_loss = float('inf') + + if checkpoint is not None: + os.makedirs(checkpoint, exist_ok=True) + + file_handler = logging.FileHandler(filename=os.path.join(checkpoint, 'output.log')) + stdout_handler = logging.StreamHandler(sys.stdout) + handlers = [file_handler, stdout_handler] + + logging.basicConfig( + level=logging.DEBUG, + format='[%(asctime)s] %(levelname)s - %(message)s', + handlers=handlers + ) + logger = logging.getLogger('glm_saga').info + else: + logger = print + while self.selected_features.sum() < self.nKeep: # TODO checkout this change, one iteration per feature + n_feature_to_keep = self.selected_features.sum() + for i, (lam, lr) in enumerate(zip(lams, lrs)): + lam = lam * self.lam_Fac + start_time = time.time() + self.selected_features = self.selected_features.to(device) + state = self.train_saga(linear, loader, lr, nepochs, lam, alpha, + table_device=table_device, preprocess=preprocess, group=group, verbose=verbose, + state=state, n_ex=n_ex, n_classes=n_classes, tol=tol, lookbehind=lookbehind, + family=family, logger=logger) + + with ch.no_grad(): + loss, acc = elastic_loss_and_acc_loader(linear, loader, lam, alpha, preprocess=preprocess, + family=family) + loss, acc = loss.item(), acc.item() + + loss_val, acc_val = -1, -1 + if val_loader: + loss_val, acc_val = elastic_loss_and_acc_loader(linear, val_loader, lam, alpha, + preprocess=preprocess, + family=family) + loss_val, acc_val = loss_val.item(), acc_val.item() + + loss_test, acc_test = -1, -1 + if test_loader: + loss_test, acc_test = elastic_loss_and_acc_loader(linear, test_loader, lam, alpha, + preprocess=preprocess, family=family) + loss_test, acc_test = loss_test.item(), acc_test.item() + + params = { + "lam": lam, + "lr": lr, + "alpha": alpha, + "time": time.time() - start_time, + "loss": loss, + "metrics": { + "loss_tr": loss, + "acc_tr": acc, + "loss_val": loss_val, + "acc_val": acc_val, + "loss_test": loss_test, + "acc_test": acc_test, + }, + "weight": linear.weight.detach().cpu().clone(), + "bias": linear.bias.detach().cpu().clone() + + } + path.append(params) + if loss_val is not None and loss_val < best_val_loss: + best_val_loss = loss_val + best_params = params + found = True + nnz = (linear.weight.abs() > 1e-5).sum().item() + total = linear.weight.numel() + if family == 'multinomial': + logger( + f"{n_feature_to_keep} Feature ({i}) lambda {lam:.4f}, loss {loss:.4f}, acc {acc:.4f} [val acc {acc_val:.4f}] [test acc {acc_test:.4f}], sparsity {nnz / total} [{nnz}/{total}], time {time.time() - start_time}, lr {lr:.4f}") + elif family == 'gaussian': + logger( + f"({i}) lambda {lam:.4f}, loss {loss:.4f} [val loss {loss_val:.4f}] [test loss {loss_test:.4f}], sparsity {nnz / total} [{nnz}/{total}], time {time.time() - start_time}, lr {lr:.4f}") + + if self.check_new_feature(linear.weight): # TODO checkout this change, canceling if new feature is used + if checkpoint is not None: + ch.save(params, os.path.join(checkpoint, f"params{n_feature_to_keep}.pth")) + break + if found: + return { + 'path': path, + 'best': best_params, + 'state': state + } + else: + return False + + def check_new_feature(self, weight): + # TODO checkout this change, checking if new feature is used + copied_weight = torch.tensor(weight.cpu()) + used_features = torch.unique( + torch.nonzero(copied_weight)[:, 1]) + if len(used_features) > 0: + new_set = set(used_features.tolist()) + old_set = set(torch.nonzero(self.selected_features)[:, 0].tolist()) + diff = new_set - old_set + if len(diff) > 0: + self.selected_features[used_features] = True + return True + return False + + def fit(self, feature_loaders, metadata, device): + # TODO checkout this change, glm saga code slightly adapted to return to_drop + print("Initializing linear model...") + linear = nn.Linear(self.num_features, self.n_classes).to(device) + for p in [linear.weight, linear.bias]: + p.data.zero_() + + print("Preparing normalization preprocess and indexed dataloader") + preprocess = NormalizedRepresentation(feature_loaders['train'], + metadata=metadata, + device=linear.weight.device) + + print("Calculating the regularization path") + mpl_logger = logging.getLogger("matplotlib") + mpl_logger.setLevel(logging.WARNING) + selected_features = self.glm_saga(linear, + feature_loaders['train'], + self.args.lr, + self.args.max_epochs, + self.selalpha, 0, 1, + val_loader=feature_loaders['val'], + test_loader=feature_loaders['test'], + n_classes=self.n_classes, + verbose=self.args.verbose, + tol=self.args.tol, + lookbehind=self.args.lookbehind, + lr_decay_factor=self.args.lr_decay_factor, + group=True, + epsilon=self.args.lam_factor, + metadata=metadata, + preprocess=preprocess, tot_tries=1) + to_drop = np.where(self.selected_features.cpu().numpy() == 0)[0] + test_acc = selected_features["path"][-1]["metrics"]["acc_test"] + torch.set_grad_enabled(True) + return to_drop, test_acc + + +class NormalizedRepresentation(ch.nn.Module): + def __init__(self, loader, metadata, device='cuda', tol=1e-5): + super(NormalizedRepresentation, self).__init__() + + assert metadata is not None + self.device = device + self.mu = metadata['X']['mean'] + self.sigma = ch.clamp(metadata['X']['std'], tol) + + def forward(self, X): + return (X - self.mu.to(self.device)) / self.sigma.to(self.device) + + + + diff --git a/sparsification/data_helpers.py b/sparsification/data_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..d48424564050c66238f9b731b433ca25d29d5a6b --- /dev/null +++ b/sparsification/data_helpers.py @@ -0,0 +1,16 @@ + +import torch + + +class NormalizedRepresentation(torch.nn.Module): + def __init__(self, loader, metadata, device='cuda', tol=1e-5): + super(NormalizedRepresentation, self).__init__() + + assert metadata is not None + self.device = device + self.mu = metadata['X']['mean'] + self.sigma = torch.clamp(metadata['X']['std'], tol) + + def forward(self, X): + return (X - self.mu.to(self.device)) / self.sigma.to(self.device) + diff --git a/sparsification/feature_helpers.py b/sparsification/feature_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..8c11867077be5ab067548f498dad17fb299fa162 --- /dev/null +++ b/sparsification/feature_helpers.py @@ -0,0 +1,378 @@ +import math +import os +import sys + +import torch.cuda + +import sparsification.utils + +sys.path.append('') +import numpy as np +import torch as ch +from torch.utils.data import Subset +from tqdm import tqdm + + + +# From glm_saga +def get_features_batch(batch, model, device='cuda'): + if not torch.cuda.is_available(): + device = "cpu" + ims, targets = batch + output, latents = model(ims.to(device), with_final_features=True ) + return latents, targets + + +def compute_features(loader, model, dataset_type, pooled_output, + batch_size, num_workers, + shuffle=False, device='cpu', n_epoch=1, + filename=None, chunk_threshold=20000, balance=False): + """Compute deep features for a given dataset using a modeln and returnss + them as a pytorch dataset and loader. + Args: + loader : Torch data loader + model: Torch model + dataset_type (str): One of vision or language + pooled_output (bool): Whether or not to pool outputs + (only relevant for some language models) + batch_size (int): Batch size for output loader + num_workers (int): Number of workers to use for output loader + shuffle (bool): Whether or not to shuffle output data loaoder + device (str): Device on which to keep the model + filename (str):Optional file to cache computed feature. Recommended + for large dataset_classes like ImageNet. + chunk_threshold (int): Size of shard while caching + balance (bool): Whether or not to balance output data loader + (only relevant for some language models) + Returns: + feature_dataset: Torch dataset with deep features + feature_loader: Torch data loader with deep features + """ + if torch.cuda.is_available(): + device = "cuda" + print("mem_get_info before", torch.cuda.mem_get_info()) + torch.cuda.empty_cache() + print("mem_get_info after", torch.cuda.mem_get_info()) + model = model.to(device) + if filename is None or not os.path.exists(os.path.join(filename, f'0_features.npy')): + model.eval() + all_latents, all_targets, all_images = [], [], [] + Nsamples, chunk_id = 0, 0 + for idx_epoch in range(n_epoch): + for batch_idx, batch in tqdm(enumerate(loader), total=len(loader)): + with ch.no_grad(): + latents, targets = get_features_batch(batch, model, + device=device) + if batch_idx == 0: + print("Latents shape", latents.shape) + Nsamples += latents.size(0) + + all_latents.append(latents.cpu()) + if len(targets.shape) > 1: + targets = targets[:, 0] + all_targets.append(targets.cpu()) + # all_images.append(batch[0]) + if filename is not None and Nsamples > chunk_threshold: + if not os.path.exists(filename): os.makedirs(filename) + np.save(os.path.join(filename, f'{chunk_id}_features.npy'), ch.cat(all_latents).numpy()) + np.save(os.path.join(filename, f'{chunk_id}_labels.npy'), ch.cat(all_targets).numpy()) + + all_latents, all_targets, Nsamples = [], [], 0 + chunk_id += 1 + + if filename is not None and Nsamples > 0: + if not os.path.exists(filename): os.makedirs(filename) + np.save(os.path.join(filename, f'{chunk_id}_features.npy'), ch.cat(all_latents).numpy()) + np.save(os.path.join(filename, f'{chunk_id}_labels.npy'), ch.cat(all_targets).numpy()) + # np.save(os.path.join(filename, f'{chunk_id}_images.npy'), ch.cat(all_images).numpy()) + feature_dataset = load_features(filename) if filename is not None else \ + ch.utils.data.TensorDataset(ch.cat(all_latents), ch.cat(all_targets)) + if balance: + feature_dataset = balance_dataset(feature_dataset) + + feature_loader = ch.utils.data.DataLoader(feature_dataset, + num_workers=num_workers, + batch_size=batch_size, + shuffle=shuffle) + + return feature_dataset, feature_loader + + +def load_feature_loader(out_dir_feats, val_frac, batch_size, num_workers, random_seed): + feature_loaders = {} + for mode in ['train', 'test']: + print(f"For {mode} set...") + sink_path = f"{out_dir_feats}/features_{mode}" + metadata_path = f"{out_dir_feats}/metadata_{mode}.pth" + feature_ds = load_features(sink_path) + feature_loader = ch.utils.data.DataLoader(feature_ds, + num_workers=num_workers, + batch_size=batch_size) + if mode == 'train': + metadata = calculate_metadata(feature_loader, + num_classes=2048, + filename=metadata_path) + split_datasets, split_loaders = split_dataset(feature_ds, + len(feature_ds), + val_frac=val_frac, + batch_size=batch_size, + num_workers=num_workers, + random_seed=random_seed, + shuffle=True) + feature_loaders.update({mm: sparsification.utils.add_index_to_dataloader(split_loaders[mi]) + for mi, mm in enumerate(['train', 'val'])}) + + else: + feature_loaders[mode] = feature_loader + return feature_loaders, metadata + + +def balance_dataset(dataset): + """Balances a given dataset to have the same number of samples/class. + Args: + dataset : Torch dataset + Returns: + Torch dataset with equal number of samples/class + """ + + print("Balancing dataset...") + n = len(dataset) + labels = ch.Tensor([dataset[i][1] for i in range(n)]).int() + n0 = sum(labels).item() + I_pos = labels == 1 + + idx = ch.arange(n) + idx_pos = idx[I_pos] + ch.manual_seed(0) + I = ch.randperm(n - n0)[:n0] + idx_neg = idx[~I_pos][I] + idx_bal = ch.cat([idx_pos, idx_neg], dim=0) + return Subset(dataset, idx_bal) + + +def load_metadata(feature_path): + return ch.load(os.path.join(feature_path, f'metadata_train.pth')) + + +def get_mean_std(feature_path): + metadata = load_metadata(feature_path) + return metadata["X"]["mean"], metadata["X"]["std"] + + +def load_features_dataset_mode(feature_path, mode='test', + num_workers=10, batch_size=128): + """Loads precomputed deep features corresponding to the + train/test set along with normalization statitic. + Args: + feature_path (str): Path to precomputed deep features + mode (str): One of train or tesst + num_workers (int): Number of workers to use for output loader + batch_size (int): Batch size for output loader + + Returns: + features (np.array): Recovered deep features + feature_mean: Mean of deep features + feature_std: Standard deviation of deep features + """ + feature_dataset = load_features(os.path.join(feature_path, f'features_{mode}')) + feature_loader = ch.utils.data.DataLoader(feature_dataset, + num_workers=num_workers, + batch_size=batch_size, + shuffle=False) + feature_metadata = ch.load(os.path.join(feature_path, f'metadata_train.pth')) + feature_mean, feature_std = feature_metadata['X']['mean'], feature_metadata['X']['std'] + return feature_loader, feature_mean, feature_std + + +def load_joint_dataset(feature_path, mode='test', + num_workers=10, batch_size=128): + feature_dataset = load_features(os.path.join(feature_path, f'features_{mode}')) + feature_loader = ch.utils.data.DataLoader(feature_dataset, + num_workers=num_workers, + batch_size=batch_size, + shuffle=False) + features = [] + labels = [] + for _, (feature, label) in tqdm(enumerate(feature_loader), total=len(feature_loader)): + features.append(feature) + labels.append(label) + features = np.concatenate(features) + labels = np.concatenate(labels) + dataset = ch.utils.data.TensorDataset(torch.tensor(features), torch.tensor(labels)) + return dataset + + +def load_features_mode(feature_path, mode='test', + num_workers=10, batch_size=128): + """Loads precomputed deep features corresponding to the + train/test set along with normalization statitic. + Args: + feature_path (str): Path to precomputed deep features + mode (str): One of train or tesst + num_workers (int): Number of workers to use for output loader + batch_size (int): Batch size for output loader + + Returns: + features (np.array): Recovered deep features + feature_mean: Mean of deep features + feature_std: Standard deviation of deep features + """ + feature_dataset = load_features(os.path.join(feature_path, f'features_{mode}')) + feature_loader = ch.utils.data.DataLoader(feature_dataset, + num_workers=num_workers, + batch_size=batch_size, + shuffle=False) + + feature_metadata = ch.load(os.path.join(feature_path, f'metadata_train.pth')) + feature_mean, feature_std = feature_metadata['X']['mean'], feature_metadata['X']['std'] + + features = [] + + for _, (feature, _) in tqdm(enumerate(feature_loader), total=len(feature_loader)): + features.append(feature) + + features = ch.cat(features).numpy() + return features, feature_mean, feature_std + + +def load_features(feature_path): + """Loads precomputed deep features. + Args: + feature_path (str): Path to precomputed deep features + + Returns: + Torch dataset with recovered deep features. + """ + if not os.path.exists(os.path.join(feature_path, f"0_features.npy")): + raise ValueError(f"The provided location {feature_path} does not contain any representation files") + + ds_list, chunk_id = [], 0 + while os.path.exists(os.path.join(feature_path, f"{chunk_id}_features.npy")): + features = ch.from_numpy(np.load(os.path.join(feature_path, f"{chunk_id}_features.npy"))).float() + labels = ch.from_numpy(np.load(os.path.join(feature_path, f"{chunk_id}_labels.npy"))).long() + ds_list.append(ch.utils.data.TensorDataset(features, labels)) + chunk_id += 1 + + print(f"==> loaded {chunk_id} files of representations...") + return ch.utils.data.ConcatDataset(ds_list) + + +def calculate_metadata(loader, num_classes=None, filename=None): + """Calculates mean and standard deviation of the deep features over + a given set of images. + Args: + loader : torch data loader + num_classes (int): Number of classes in the dataset + filename (str): Optional filepath to cache metadata. Recommended + for large dataset_classes like ImageNet. + + Returns: + metadata (dict): Dictionary with desired statistics. + """ + + if filename is not None and os.path.exists(filename): + print("loading Metadata from ", filename) + return ch.load(filename) + + # Calculate number of classes if not given + if num_classes is None: + num_classes = 1 + for batch in loader: + y = batch[1] + print(y) + num_classes = max(num_classes, y.max().item() + 1) + + eye = ch.eye(num_classes) + + X_bar, y_bar, y_max, n = 0, 0, 0, 0 + + # calculate means and maximum + print("Calculating means") + for ans in tqdm(loader, total=len(loader)): + X, y = ans[:2] + X_bar += X.sum(0) + y_bar += eye[y].sum(0) + y_max = max(y_max, y.max()) + n += y.size(0) + X_bar = X_bar.float() / n + y_bar = y_bar.float() / n + + # calculate std + X_std, y_std = 0, 0 + print("Calculating standard deviations") + for ans in tqdm(loader, total=len(loader)): + X, y = ans[:2] + X_std += ((X - X_bar) ** 2).sum(0) + y_std += ((eye[y] - y_bar) ** 2).sum(0) + X_std = ch.sqrt(X_std.float() / n) + y_std = ch.sqrt(y_std.float() / n) + + # calculate maximum regularization + inner_products = 0 + print("Calculating maximum lambda") + for ans in tqdm(loader, total=len(loader)): + X, y = ans[:2] + y_map = (eye[y] - y_bar) / y_std + inner_products += X.t().mm(y_map) * y_std + + inner_products_group = inner_products.norm(p=2, dim=1) + + metadata = { + "X": { + "mean": X_bar, + "std": X_std, + "num_features": X.size()[1:], + "num_examples": n + }, + "y": { + "mean": y_bar, + "std": y_std, + "num_classes": y_max + 1 + }, + "max_reg": { + "group": inner_products_group.abs().max().item() / n, + "nongrouped": inner_products.abs().max().item() / n + } + } + + if filename is not None: + ch.save(metadata, filename) + + return metadata + + +def split_dataset(dataset, Ntotal, val_frac, + batch_size, num_workers, + random_seed=0, shuffle=True, balance=False): + """Splits a given dataset into train and validation + Args: + dataset : Torch dataset + Ntotal: Total number of dataset samples + val_frac: Fraction to reserve for validation + batch_size (int): Batch size for output loader + num_workers (int): Number of workers to use for output loader + random_seed (int): Random seed + shuffle (bool): Whether or not to shuffle output data loaoder + balance (bool): Whether or not to balance output data loader + (only relevant for some language models) + + Returns: + split_datasets (list): List of dataset_classes (one each for train and val) + split_loaders (list): List of loaders (one each for train and val) + """ + + Nval = math.floor(Ntotal * val_frac) + train_ds, val_ds = ch.utils.data.random_split(dataset, + [Ntotal - Nval, Nval], + generator=ch.Generator().manual_seed(random_seed)) + if balance: + val_ds = balance_dataset(val_ds) + split_datasets = [train_ds, val_ds] + + split_loaders = [] + for ds in split_datasets: + split_loaders.append(ch.utils.data.DataLoader(ds, + num_workers=num_workers, + batch_size=batch_size, + shuffle=shuffle)) + return split_datasets, split_loaders diff --git a/sparsification/glmBasedSparsification.py b/sparsification/glmBasedSparsification.py new file mode 100644 index 0000000000000000000000000000000000000000..4a681147b4394281069c3a6bf0596baf435ecae0 --- /dev/null +++ b/sparsification/glmBasedSparsification.py @@ -0,0 +1,130 @@ +import logging +import os +import shutil + +import numpy as np +import pandas as pd +import torch +from glm_saga.elasticnet import glm_saga +from torch import nn + +from sparsification.FeatureSelection import FeatureSelectionFitting +from sparsification import data_helpers +from sparsification.utils import get_default_args, compute_features_and_metadata, select_in_loader, get_feature_loaders + + +def get_glm_selection(feature_loaders, metadata, args, num_classes, device, n_features_to_select, folder): + num_features = metadata["X"]["num_features"][0] + fittingClass = FeatureSelectionFitting(num_features, num_classes, args, 0.8, + n_features_to_select, + 0.1,folder, + lookback=3, tol=1e-4, + epsilon=1,) + to_drop, test_acc = fittingClass.fit(feature_loaders, metadata, device) + selected_features = torch.tensor([i for i in range(num_features) if i not in to_drop]) + return selected_features + + +def compute_feature_selection_and_assignment(model, train_loader, test_loader, log_folder,num_classes, seed, select_features = 50): + feature_loaders, metadata, device,args = get_feature_loaders(seed, log_folder,train_loader, test_loader, model, num_classes, ) + + if os.path.exists(log_folder / f"SlDD_Selection_{select_features}.pt"): + feature_selection = torch.load(log_folder / f"SlDD_Selection_{select_features}.pt") + else: + used_features = model.linear.weight.shape[1] + if used_features != select_features: + selection_folder = log_folder / "sldd_selection" # overwrite with None to prevent saving + feature_selection = get_glm_selection(feature_loaders, metadata, args, + num_classes, + device,select_features, selection_folder + ) + else: + feature_selection = model.linear.selection + torch.save(feature_selection, log_folder / f"SlDD_Selection_{select_features}.pt") + feature_loaders = select_in_loader(feature_loaders, feature_selection) + mean, std = metadata["X"]["mean"], metadata["X"]["std"] + mean_to_pass_in = mean + std_to_pass_in = std + if len(mean) != feature_selection.shape[0]: + mean_to_pass_in = mean[feature_selection] + std_to_pass_in = std[feature_selection] + + sparse_matrices, biases = fit_glm(log_folder, mean_to_pass_in, std_to_pass_in, feature_loaders, num_classes, select_features) + + return feature_selection, sparse_matrices, biases, mean, std + + +def fit_glm(log_dir,mean, std , feature_loaders, num_classes, select_features = 50): + output_folder = log_dir / "glm_path" + if not output_folder.exists() or len(list(output_folder.iterdir())) != 102: + shutil.rmtree(output_folder, ignore_errors=True) + output_folder.mkdir(exist_ok=True, parents=True) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + linear = nn.Linear(select_features, num_classes).to(device) + for p in [linear.weight, linear.bias]: + p.data.zero_() + print("Preparing normalization preprocess and indexed dataloader") + metadata = {"X": {"mean": mean, "std": std},} + preprocess = data_helpers.NormalizedRepresentation(feature_loaders['train'], + metadata=metadata, + device=linear.weight.device) + + print("Calculating the regularization path") + mpl_logger = logging.getLogger("matplotlib") + mpl_logger.setLevel(logging.WARNING) + params = glm_saga(linear, + feature_loaders['train'], + 0.1, + 2000, + 0.99, k=100, + val_loader=feature_loaders['val'], + test_loader=feature_loaders['test'], + n_classes=num_classes, + checkpoint=str(output_folder), + verbose=200, + tol=1e-4, # Change for ImageNet + lookbehind=5, + lr_decay_factor=1, + group=False, + epsilon=0.001, + metadata=None, # To let it be recomputed + preprocess=preprocess, ) + results = load_glm(output_folder) + sparse_matrices = results["weights"] + biases = results["biases"] + + return sparse_matrices, biases + +def load_glm(result_dir): + Nlambda = max([int(f.split('params')[1].split('.pth')[0]) + for f in os.listdir(result_dir) if 'params' in f]) + 1 + + print(f"Loading regularization path of length {Nlambda}") + + params_dict = {i: torch.load(os.path.join(result_dir, f"params{i}.pth"), + map_location=torch.device('cpu')) for i in range(Nlambda)} + + regularization_strengths = [params_dict[i]['lam'].item() for i in range(Nlambda)] + weights = [params_dict[i]['weight'] for i in range(Nlambda)] + biases = [params_dict[i]['bias'] for i in range(Nlambda)] + + metrics = {'acc_tr': [], 'acc_val': [], 'acc_test': []} + + for k in metrics.keys(): + for i in range(Nlambda): + metrics[k].append(params_dict[i]['metrics'][k]) + metrics[k] = 100 * np.stack(metrics[k]) + metrics = pd.DataFrame(metrics) + metrics = metrics.rename(columns={'acc_tr': 'acc_train'}) + + # weights_stacked = ch.stack(weights) + # sparsity = ch.sum(weights_stacked != 0, dim=2).numpy() + sparsity = np.array([torch.sum(w != 0, dim=1).numpy() for w in weights]) + + return {'metrics': metrics, + 'regularization_strengths': regularization_strengths, + 'weights': weights, + 'biases': biases, + 'sparsity': sparsity, + 'weight_dense': weights[-1], + 'bias_dense': biases[-1]} diff --git a/sparsification/qsenn.py b/sparsification/qsenn.py new file mode 100644 index 0000000000000000000000000000000000000000..45eb1bde64846d26996962f7a1c3b8d8e0ffa6ab --- /dev/null +++ b/sparsification/qsenn.py @@ -0,0 +1,63 @@ +import numpy as np +import torch + +from sparsification.glmBasedSparsification import compute_feature_selection_and_assignment + + +def compute_qsenn_feature_selection_and_assignment(model, train_loader, test_loader, log_folder, num_classes, seed,n_features, per_class = 5): + feature_sel, sparse_matrices, biases, mean, std = compute_feature_selection_and_assignment(model, train_loader, + test_loader, + log_folder, num_classes, seed, n_features) + weight_sparse, bias_sparse = get_sparsified_weights_for_factor(sparse_matrices[:-1], biases[:-1], per_class) # Last one in regularisation path has no regularisation + print(f"Number of nonzeros in weight matrix: {torch.sum(weight_sparse != 0)}") + return feature_sel, weight_sparse, bias_sparse, mean, std +def get_sparsified_weights_for_factor(weights, biases, factor,): + no_reg_result_mat, no_reg_result_bias = weights[-1], biases[-1] + goal_nonzeros = factor * no_reg_result_mat.shape[0] + values = no_reg_result_mat.flatten() + values = values[values != 0] + values = -(torch.sort(-torch.abs(values))[0]) + if goal_nonzeros < len(values): + threshold = (values[int(goal_nonzeros) - 1] + values[int(goal_nonzeros)]) / 2 + else: + threshold = values[-1] + max_val = torch.max(torch.abs(values)) + weight_sparse = discretize_2_bins_to_threshold(no_reg_result_mat, threshold, max_val) + sel_idx = len(weights) - 1 + positive_weights_per_class = np.array(torch.sum(weight_sparse > 0, dim=1)) + negative_weights_per_class = np.array(torch.sum(weight_sparse < 0, dim=1)) + total_weight_count_per_class = positive_weights_per_class - negative_weights_per_class + max_bias = torch.max(torch.abs(biases[sel_idx])) + bias_sparse = torch.ones_like(biases[sel_idx]) * max_bias + diff_n_weight = total_weight_count_per_class - np.min(total_weight_count_per_class) + steps = np.max(diff_n_weight) + single_step = 2 * max_bias / steps + bias_sparse = bias_sparse - torch.tensor(diff_n_weight) * single_step + bias_sparse = torch.clamp(bias_sparse, -max_bias, max_bias) + return weight_sparse, bias_sparse + + +def discretize_2_bins_to_threshold(data, treshold, max): + boundaries = torch.tensor([-max, -treshold, treshold, max], device=data.device) + bucketized_tensor = torch.bucketize(data, boundaries) + means = torch.tensor([-max, 0, max], device=data.device) + for i in range(len(means)): + if means[i] == 0: + break + positive_index = int(len(means) / 2 + 1) + i + positive_bucket = data[bucketized_tensor == positive_index + 1] + negative_bucket = data[bucketized_tensor == i + 1] + sum = 0 + total = 0 + for bucket in [positive_bucket, negative_bucket]: + if len(bucket) == 0: + continue + sum += torch.sum(torch.abs(bucket)) + total += len(bucket) + if total == 0: + continue + avg = sum / total + means[i] = -avg + means[positive_index] = avg + discretized_tensor = means.cpu()[bucketized_tensor.cpu() - 1].to(bucketized_tensor.device) + return discretized_tensor \ No newline at end of file diff --git a/sparsification/sldd.py b/sparsification/sldd.py new file mode 100644 index 0000000000000000000000000000000000000000..3eeb3733797950107c6a0987fe242a0bfe2e732a --- /dev/null +++ b/sparsification/sldd.py @@ -0,0 +1,44 @@ +import numpy as np +import torch + +from sparsification.glmBasedSparsification import compute_feature_selection_and_assignment + + +def compute_sldd_feature_selection_and_assignment(model, train_loader, test_loader, log_folder, num_classes, seed, + per_class=5, select_features=50): + feature_sel, sparse_matrices, biases, mean, std = compute_feature_selection_and_assignment(model, train_loader, + test_loader, + log_folder, num_classes, + seed, select_features=select_features) + weight_sparse, bias_sparse = get_sparsified_weights_for_factor(sparse_matrices,biases, + per_class) # Last one in regularisation path has none + return feature_sel, weight_sparse, bias_sparse, mean, std + +def get_sparsified_weights_for_factor(sparse_layer,biases,keep_per_class, drop_rate=0.5): + nonzero_entries = [torch.sum(torch.count_nonzero(sparse_layer[i])) for i in range(len(sparse_layer))] + mean_sparsity = np.array([nonzero_entries[i] / sparse_layer[i].shape[0] for i in range(len(sparse_layer))]) + factor =keep_per_class / drop_rate + # Get layer with desired sparsity + sparse_enough = mean_sparsity <= factor + sel_idx = np.argmax(sparse_enough * mean_sparsity) + if sel_idx == 0 and np.sum(mean_sparsity) > 1: # sometimes first one is odd + sparse_enough[0] = False + sel_idx = np.argmax(sparse_enough * mean_sparsity) + selected_weight = sparse_layer[sel_idx] + selected_bias = biases[sel_idx] + # only keep 5 per class on average + weight_5_per_matrix = set_lowest_percent_to_zero(selected_weight,5) + + return weight_5_per_matrix,selected_bias + + +def set_lowest_percent_to_zero(matrix, keep_per): + nonzero_indices = torch.nonzero(matrix) + values = torch.tensor([matrix[x[0], x[1]] for x in nonzero_indices]) + sorted_indices = torch.argsort(torch.abs(values)) + total_allowed = int(matrix.shape[0] * keep_per) + sorted_indices = sorted_indices[:-total_allowed] + nonzero_indices_to_zero = [nonzero_indices[x] for x in sorted_indices] + for to_zero in nonzero_indices_to_zero: + matrix[to_zero[0], to_zero[1]] = 0 + return matrix \ No newline at end of file diff --git a/sparsification/utils.py b/sparsification/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1e960e5c4131032242e41bf7ff5a8a02b571fc89 --- /dev/null +++ b/sparsification/utils.py @@ -0,0 +1,159 @@ +from argparse import ArgumentParser + +import torch + +#from sparsification.glm_saga import glm_saga +from sparsification import feature_helpers + + +def safe_zip(*args): + for iterable in args[1:]: + if len(iterable) != len(args[0]): + print("Unequally sized iterables to zip, printing lengths") + for i, entry in enumerate(args): + print(i, len(entry)) + raise ValueError("Unequally sized iterables to zip") + return zip(*args) + + +def compute_features_and_metadata(args, train_loader, test_loader, model, out_dir_feats, num_classes, + ): + print("Computing/loading deep features...") + + Ntotal = len(train_loader.dataset) + feature_loaders = {} + # Compute Features for not augmented train and test set + train_loader_transforms = train_loader.dataset.transform + test_loader_transforms = test_loader.dataset.transform + train_loader.dataset.transform = test_loader_transforms + for mode, loader in zip(['train', 'test', ], [train_loader, test_loader, ]): # + print(f"For {mode} set...") + + sink_path = f"{out_dir_feats}/features_{mode}" + metadata_path = f"{out_dir_feats}/metadata_{mode}.pth" + + feature_ds, feature_loader = feature_helpers.compute_features(loader, + model, + dataset_type=args.dataset_type, + pooled_output=None, + batch_size=args.batch_size, + num_workers=0, # args.num_workers, + shuffle=(mode == 'test'), + device=args.device, + filename=sink_path, n_epoch=1, + balance=False, + ) # args.balance if mode == 'test' else False) + + if mode == 'train': + metadata = feature_helpers.calculate_metadata(feature_loader, + num_classes=num_classes, + filename=metadata_path) + if metadata["max_reg"]["group"] == 0.0: + return None, False + split_datasets, split_loaders = feature_helpers.split_dataset(feature_ds, + Ntotal, + val_frac=args.val_frac, + batch_size=args.batch_size, + num_workers=args.num_workers, + random_seed=args.random_seed, + shuffle=True, + balance=False) + feature_loaders.update({mm: add_index_to_dataloader(split_loaders[mi]) + for mi, mm in enumerate(['train', 'val'])}) + + else: + feature_loaders[mode] = feature_loader + train_loader.dataset.transform = train_loader_transforms + return feature_loaders, metadata + +def get_feature_loaders(seed, log_folder,train_loader, test_loader, model, num_classes, ): + args = get_default_args() + args.random_seed = seed + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + feature_folder = log_folder / "features" + feature_loaders, metadata, = compute_features_and_metadata(args, train_loader, test_loader, model, + feature_folder + , + num_classes, + ) + return feature_loaders, metadata, device,args +def add_index_to_dataloader(loader, sample_weight=None,): + return torch.utils.data.DataLoader( + IndexedDataset(loader.dataset, sample_weight=sample_weight), + batch_size=loader.batch_size, + sampler=loader.sampler, + num_workers=loader.num_workers, + collate_fn=loader.collate_fn, + pin_memory=loader.pin_memory, + drop_last=loader.drop_last, + timeout=loader.timeout, + worker_init_fn=loader.worker_init_fn, + multiprocessing_context=loader.multiprocessing_context + ) + + +class IndexedDataset(torch.utils.data.Dataset): + def __init__(self, ds, sample_weight=None): + super(torch.utils.data.Dataset, self).__init__() + self.dataset = ds + self.sample_weight = sample_weight + + def __getitem__(self, index): + val = self.dataset[index] + if self.sample_weight is None: + return val + (index,) + else: + weight = self.sample_weight[index] + return val + (weight, index) + + def __len__(self): + return len(self.dataset) + + +def get_default_args(): + # Default args from glm_saga, https://github.com/MadryLab/glm_saga + parser = ArgumentParser() + parser.add_argument('--dataset', type=str, help='dataset name') + parser.add_argument('--dataset-type', type=str, help='One of ["language", "vision"]') + parser.add_argument('--dataset-path', type=str, help='path to dataset') + parser.add_argument('--model-path', type=str, help='path to model checkpoint') + parser.add_argument('--arch', type=str, help='model architecture type') + parser.add_argument('--out-path', help='location for saving results') + parser.add_argument('--cache', action='store_true', help='cache deep features') + parser.add_argument('--balance', action='store_true', help='balance classes for evaluation') + + parser.add_argument('--device', default='cuda') + parser.add_argument('--random-seed', default=0) + parser.add_argument('--num-workers', type=int, default=2) + parser.add_argument('--batch-size', type=int, default=256) + parser.add_argument('--val-frac', type=float, default=0.1) + parser.add_argument('--lr-decay-factor', type=float, default=1) + parser.add_argument('--lr', type=float, default=0.1) + parser.add_argument('--alpha', type=float, default=0.99) + parser.add_argument('--max-epochs', type=int, default=2000) + parser.add_argument('--verbose', type=int, default=200) + parser.add_argument('--tol', type=float, default=1e-4) + parser.add_argument('--lookbehind', type=int, default=3) + parser.add_argument('--lam-factor', type=float, default=0.001) + parser.add_argument('--group', action='store_true') + args = parser.parse_args() + + args = parser.parse_args() + return args + + +def select_in_loader(feature_loaders, feature_selection): + for dataset in feature_loaders["train"].dataset.dataset.dataset.datasets: # Val is indexed via the same dataset as train + tensors = list(dataset.tensors) + if tensors[0].shape[1] == len(feature_selection): + continue + tensors[0] = tensors[0][:, feature_selection] + dataset.tensors = tensors + for dataset in feature_loaders["test"].dataset.datasets: + tensors = list(dataset.tensors) + if tensors[0].shape[1] == len(feature_selection): + continue + tensors[0] = tensors[0][:, feature_selection] + dataset.tensors = tensors + return feature_loaders + diff --git a/tmp/Datasets/CUB200/CUB_200_2011/README b/tmp/Datasets/CUB200/CUB_200_2011/README new file mode 100644 index 0000000000000000000000000000000000000000..4cf4b8f6a8e963af922b4c320df3c9700af95914 --- /dev/null +++ b/tmp/Datasets/CUB200/CUB_200_2011/README @@ -0,0 +1,140 @@ +=========================================== +The Caltech-UCSD Birds-200-2011 Dataset +=========================================== + +For more information about the dataset, visit the project website: + + http://www.vision.caltech.edu/visipedia + +If you use the dataset in a publication, please cite the dataset in +the style described on the dataset website (see url above). + +Directory Information +--------------------- + +- images/ + The images organized in subdirectories based on species. See + IMAGES AND CLASS LABELS section below for more info. +- parts/ + 15 part locations per image. See PART LOCATIONS section below + for more info. +- attributes/ + 322 binary attribute labels from MTurk workers. See ATTRIBUTE LABELS + section below for more info. + + + +========================= +IMAGES AND CLASS LABELS: +========================= +Images are contained in the directory images/, with 200 subdirectories (one for each bird species) + +------- List of image files (images.txt) ------ +The list of image file names is contained in the file images.txt, with each line corresponding to one image: + + +------------------------------------------ + + +------- Train/test split (train_test_split.txt) ------ +The suggested train/test split is contained in the file train_test_split.txt, with each line corresponding to one image: + + + +where corresponds to the ID in images.txt, and a value of 1 or 0 for denotes that the file is in the training or test set, respectively. +------------------------------------------------------ + + +------- List of class names (classes.txt) ------ +The list of class names (bird species) is contained in the file classes.txt, with each line corresponding to one class: + + +-------------------------------------------- + + +------- Image class labels (image_class_labels.txt) ------ +The ground truth class labels (bird species labels) for each image are contained in the file image_class_labels.txt, with each line corresponding to one image: + + + +where and correspond to the IDs in images.txt and classes.txt, respectively. +--------------------------------------------------------- + + + + + +========================= +BOUNDING BOXES: +========================= + +Each image contains a single bounding box label. Bounding box labels are contained in the file bounding_boxes.txt, with each line corresponding to one image: + + + +where corresponds to the ID in images.txt, and , , , and are all measured in pixels + + + + +========================= +PART LOCATIONS: +========================= + +------- List of part names (parts/parts.txt) ------ +The list of all part names is contained in the file parts/parts.txt, with each line corresponding to one part: + + +------------------------------------------ + + +------- Part locations (parts/part_locs.txt) ------ +The set of all ground truth part locations is contained in the file parts/part_locs.txt, with each line corresponding to the annotation of a particular part in a particular image: + + + +where and correspond to the IDs in images.txt and parts/parts.txt, respectively. and denote the pixel location of the center of the part. is 0 if the part is not visible in the image and 1 otherwise. +---------------------------------------------------------- + + +------- MTurk part locations (parts/part_click_locs.txt) ------ +A set of multiple part locations for each image and part, as perceived by multiple MTurk users is contained in parts/part_click_locs.txt, with each line corresponding to the annotation of a particular part in a particular image by a different MTurk worker: + +