relative_path
stringclasses 812
values | section
stringclasses 339
values | filename
stringlengths 2
61
| text
stringlengths 6
1.76M
|
---|---|---|---|
PyTorch/LanguageModeling/BERT/scripts/configs | configs | pretrain_config | #!/usr/bin/env bash
# Copyright (c) 2020-2021 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
dgxa100-80g_8gpu_fp16 ()
{
train_batch_size="8192"
learning_rate="6e-3"
precision="fp16"
num_gpus=8
warmup_proportion="0.2843"
train_steps=7038
save_checkpoint_steps=200
resume_training="false"
create_logfile="true"
accumulate_gradients="true"
gradient_accumulation_steps=32
seed=42
job_name="bert_lamb_pretraining"
allreduce_post_accumulation="true"
allreduce_post_accumulation_fp16="true"
train_batch_size_phase2=4096
learning_rate_phase2="4e-3"
warmup_proportion_phase2="0.128"
train_steps_phase2=1563
gradient_accumulation_steps_phase2=128
DATASET=pretrain/phase1/unbinned/parquet # change this for other datasets
DATA_DIR_PHASE1="$BERT_PREP_WORKING_DIR/${DATASET}/"
BERT_CONFIG=bert_configs/large.json
CODEDIR="/workspace/bert"
init_checkpoint="None"
DATASET2=pretrain/phase2/bin_size_64/parquet # change this for other datasets
DATA_DIR_PHASE2="$BERT_PREP_WORKING_DIR/${DATASET2}/"
wikipedia_source=$BERT_PREP_WORKING_DIR/wikipedia/source/
num_dask_workers=128
num_shards_per_worker=128
num_workers=4
sample_ratio="0.9"
phase2_bin_size=64
masking=static
echo $train_batch_size $learning_rate $precision $num_gpus \
$warmup_proportion $train_steps $save_checkpoint_steps \
$resume_training $create_logfile $accumulate_gradients \
$gradient_accumulation_steps $seed $job_name $allreduce_post_accumulation \
$allreduce_post_accumulation_fp16 $train_batch_size_phase2 $learning_rate_phase2 \
$warmup_proportion_phase2 $train_steps_phase2 $gradient_accumulation_steps_phase2 \
$DATA_DIR_PHASE1 $DATA_DIR_PHASE2 $CODEDIR $init_checkpoint \
$wikipedia_source $num_dask_workers $num_shards_per_worker $num_workers \
$sample_ratio $phase2_bin_size $masking \
$BERT_CONFIG
}
dgxa100-80g_8gpu_tf32 ()
{
train_batch_size="8192"
learning_rate="6e-3"
precision="tf32"
num_gpus=8
warmup_proportion="0.2843"
train_steps=7038
save_checkpoint_steps=200
resume_training="false"
create_logfile="true"
accumulate_gradients="true"
gradient_accumulation_steps=64
seed=42
job_name="bert_lamb_pretraining"
allreduce_post_accumulation="true"
allreduce_post_accumulation_fp16="false"
train_batch_size_phase2=4096
learning_rate_phase2="4e-3"
warmup_proportion_phase2="0.128"
train_steps_phase2=1563
gradient_accumulation_steps_phase2=256
DATASET=pretrain/phase1/unbinned/parquet # change this for other datasets
DATA_DIR_PHASE1="$BERT_PREP_WORKING_DIR/${DATASET}/"
BERT_CONFIG=bert_configs/large.json
CODEDIR="/workspace/bert"
init_checkpoint="None"
DATASET2=pretrain/phase2/bin_size_64/parquet # change this for other datasets
DATA_DIR_PHASE2="$BERT_PREP_WORKING_DIR/${DATASET2}/"
wikipedia_source=$BERT_PREP_WORKING_DIR/wikipedia/source/
num_dask_workers=128
num_shards_per_worker=128
num_workers=4
sample_ratio="0.9"
phase2_bin_size=64
masking=static
echo $train_batch_size $learning_rate $precision $num_gpus \
$warmup_proportion $train_steps $save_checkpoint_steps \
$resume_training $create_logfile $accumulate_gradients \
$gradient_accumulation_steps $seed $job_name $allreduce_post_accumulation \
$allreduce_post_accumulation_fp16 $train_batch_size_phase2 $learning_rate_phase2 \
$warmup_proportion_phase2 $train_steps_phase2 $gradient_accumulation_steps_phase2 \
$DATA_DIR_PHASE1 $DATA_DIR_PHASE2 $CODEDIR $init_checkpoint \
$wikipedia_source $num_dask_workers $num_shards_per_worker $num_workers \
$sample_ratio $phase2_bin_size $masking \
$BERT_CONFIG
}
dgx1-32g_8gpu_fp16 ()
{
train_batch_size="8192"
learning_rate="6e-3"
precision="fp16"
num_gpus=8
warmup_proportion="0.2843"
train_steps=7038
save_checkpoint_steps=200
resume_training="false"
create_logfile="true"
accumulate_gradients="true"
gradient_accumulation_steps=64
seed=42
job_name="bert_lamb_pretraining"
allreduce_post_accumulation="true"
allreduce_post_accumulation_fp16="true"
train_batch_size_phase2=4096
learning_rate_phase2="4e-3"
warmup_proportion_phase2="0.128"
train_steps_phase2=1563
gradient_accumulation_steps_phase2=256
DATASET=pretrain/phase1/unbinned/parquet # change this for other datasets
DATA_DIR_PHASE1="$BERT_PREP_WORKING_DIR/${DATASET}/"
BERT_CONFIG=bert_configs/large.json
CODEDIR="/workspace/bert"
init_checkpoint="None"
DATASET2=pretrain/phase2/bin_size_64/parquet # change this for other datasets
DATA_DIR_PHASE2="$BERT_PREP_WORKING_DIR/${DATASET2}/"
wikipedia_source=$BERT_PREP_WORKING_DIR/wikipedia/source/
num_dask_workers=128
num_shards_per_worker=128
num_workers=4
sample_ratio="0.9"
phase2_bin_size=64
masking=static
echo $train_batch_size $learning_rate $precision $num_gpus \
$warmup_proportion $train_steps $save_checkpoint_steps \
$resume_training $create_logfile $accumulate_gradients \
$gradient_accumulation_steps $seed $job_name $allreduce_post_accumulation \
$allreduce_post_accumulation_fp16 $train_batch_size_phase2 $learning_rate_phase2 \
$warmup_proportion_phase2 $train_steps_phase2 $gradient_accumulation_steps_phase2 \
$DATA_DIR_PHASE1 $DATA_DIR_PHASE2 $CODEDIR $init_checkpoint \
$wikipedia_source $num_dask_workers $num_shards_per_worker $num_workers \
$sample_ratio $phase2_bin_size $masking \
$BERT_CONFIG
}
dgx1-32g_8gpu_fp32 ()
{
train_batch_size="8192"
learning_rate="6e-3"
precision="fp32"
num_gpus=8
warmup_proportion="0.2843"
train_steps=7038
save_checkpoint_steps=200
resume_training="false"
create_logfile="true"
accumulate_gradients="true"
gradient_accumulation_steps=128
seed=42
job_name="bert_lamb_pretraining"
allreduce_post_accumulation="true"
allreduce_post_accumulation_fp16="false"
train_batch_size_phase2=4096
learning_rate_phase2="4e-3"
warmup_proportion_phase2="0.128"
train_steps_phase2=1563
gradient_accumulation_steps_phase2=512
DATASET=pretrain/phase1/unbinned/parquet # change this for other datasets
DATA_DIR_PHASE1="$BERT_PREP_WORKING_DIR/${DATASET}/"
BERT_CONFIG=bert_configs/large.json
CODEDIR="/workspace/bert"
init_checkpoint="None"
DATASET2=pretrain/phase2/bin_size_64/parquet # change this for other datasets
DATA_DIR_PHASE2="$BERT_PREP_WORKING_DIR/${DATASET2}/"
wikipedia_source=$BERT_PREP_WORKING_DIR/wikipedia/source/
num_dask_workers=128
num_shards_per_worker=128
num_workers=4
sample_ratio="0.9"
phase2_bin_size=64
masking=static
echo $train_batch_size $learning_rate $precision $num_gpus \
$warmup_proportion $train_steps $save_checkpoint_steps \
$resume_training $create_logfile $accumulate_gradients \
$gradient_accumulation_steps $seed $job_name $allreduce_post_accumulation \
$allreduce_post_accumulation_fp16 $train_batch_size_phase2 $learning_rate_phase2 \
$warmup_proportion_phase2 $train_steps_phase2 $gradient_accumulation_steps_phase2 \
$DATA_DIR_PHASE1 $DATA_DIR_PHASE2 $CODEDIR $init_checkpoint \
$wikipedia_source $num_dask_workers $num_shards_per_worker $num_workers \
$sample_ratio $phase2_bin_size $masking \
$BERT_CONFIG
}
|
PyTorch/Forecasting/TFT/triton/runner | runner | __init__ | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
Tools/PyTorch/TimeSeriesPredictionPlatform/conf | conf | preproc_config | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defaults:
- dataset@_here_: ???
_target_: data.data_utils.Preprocessor
|
PyTorch/LanguageModeling/BERT/triton/large/runner | runner | pipeline_impl | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from ...runner.pipeline import Pipeline
pipeline = Pipeline()
pipeline.model_export(
commands=(
r"""
if [[ "${EXPORT_FORMAT}" == "ts-trace" || "${EXPORT_FORMAT}" == "ts-script" ]]; then
export FORMAT_SUFFIX="pt"
else
export FORMAT_SUFFIX="${EXPORT_FORMAT}"
fi
if [[ "${EXPORT_FORMAT}" == "trt" ]]; then
export FLAG="--fixed-batch-dim"
else
export FLAG=""
fi
python3 triton/export_model.py \
--input-path triton/model.py \
--input-type pyt \
--output-path ${SHARED_DIR}/exported_model.${FORMAT_SUFFIX} \
--output-type ${EXPORT_FORMAT} \
--dataloader triton/dataloader.py \
--ignore-unknown-parameters \
--onnx-opset 13 \
${FLAG} \
\
--config-file bert_configs/large.json \
--checkpoint ${CHECKPOINT_DIR}/bert_large_qa.pt \
--precision ${EXPORT_PRECISION} \
\
--vocab-file ${DATASETS_DIR}/data/google_pretrained_weights/uncased_L-24_H-1024_A-16/vocab.txt \
--max-seq-length ${MAX_SEQ_LENGTH} \
--predict-file ${DATASETS_DIR}/data/squad/v1.1/dev-v1.1.json \
--batch-size ${MAX_BATCH_SIZE}
""",
)
)
pipeline.model_conversion(
commands=(
r"""
if [[ "${EXPORT_FORMAT}" == "ts-trace" || "${EXPORT_FORMAT}" == "ts-script" ]]; then
export FORMAT_SUFFIX="pt"
else
export FORMAT_SUFFIX="${EXPORT_FORMAT}"
fi
if [ "${EXPORT_FORMAT}" != "${FORMAT}" ]; then
model-navigator convert \
--model-name ${MODEL_NAME} \
--model-path ${SHARED_DIR}/exported_model.${FORMAT_SUFFIX} \
--output-path ${SHARED_DIR}/converted_model \
--target-formats ${FORMAT} \
--target-precisions ${PRECISION} \
--launch-mode local \
--override-workspace \
--verbose \
\
--onnx-opsets 13 \
--inputs input__0:${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH}:int32 \
--inputs input__1:${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH}:int32 \
--inputs input__2:${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH}:int32 \
--min-shapes input__0=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
input__1=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
input__2=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
--max-shapes input__0=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
input__1=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
input__2=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
--opt-shapes input__0=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
input__1=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
input__2=${MAX_BATCH_SIZE},${MAX_SEQ_LENGTH} \
--max-batch-size ${MAX_BATCH_SIZE} \
--tensorrt-max-workspace-size 8589934592 \
--atol 2 output__0=5.0 \
output__1=5.0 \
--rtol 1 output__0=5.0 \
output__1=5.0 \
| grep -v "broadcasting input1 to make tensors conform"
else
mv ${SHARED_DIR}/exported_model.${FORMAT_SUFFIX} ${SHARED_DIR}/converted_model
mv ${SHARED_DIR}/exported_model.${FORMAT_SUFFIX}.yaml ${SHARED_DIR}/converted_model.yaml 2>/dev/null || true
fi
""",
)
)
pipeline.model_deploy(
commands=(
r"""
if [[ "${FORMAT}" == "ts-trace" || "${FORMAT}" == "ts-script" ]]; then
export CONFIG_FORMAT="torchscript"
else
export CONFIG_FORMAT="${FORMAT}"
fi
if [[ "${FORMAT}" == "trt" ]]; then
export MBS="0"
else
export MBS="${MAX_BATCH_SIZE}"
fi
model-navigator triton-config-model \
--model-repository ${MODEL_REPOSITORY_PATH} \
--model-name ${MODEL_NAME} \
--model-version 1 \
--model-path ${SHARED_DIR}/converted_model \
--model-format ${CONFIG_FORMAT} \
--model-control-mode ${TRITON_LOAD_MODEL_METHOD} \
--verbose \
--load-model \
--load-model-timeout-s 100 \
\
--backend-accelerator ${ACCELERATOR} \
--tensorrt-precision ${ACCELERATOR_PRECISION} \
--max-batch-size ${MBS} \
--preferred-batch-sizes ${TRITON_PREFERRED_BATCH_SIZES} \
--max-queue-delay-us ${TRITON_MAX_QUEUE_DELAY} \
--engine-count-per-device gpu=${TRITON_GPU_ENGINE_COUNT}
""",
)
)
pipeline.triton_prepare_performance_profiling_data(
commands=(
r"""
mkdir -p ${SHARED_DIR}/input_data
""",
r"""
python triton/prepare_input_data.py \
--dataloader triton/dataloader.py \
--input-data-dir ${SHARED_DIR}/input_data \
\
--batch-size ${MAX_BATCH_SIZE} \
--max-seq-length ${MAX_SEQ_LENGTH} \
--predict-file ${DATASETS_DIR}/data/squad/v1.1/dev-v1.1.json \
--vocab-file ${DATASETS_DIR}/data/google_pretrained_weights/uncased_L-24_H-1024_A-16/vocab.txt
""",
)
)
pipeline.triton_performance_offline_tests(
commands=(
r"""
python triton/run_performance_on_triton.py \
--model-repository ${MODEL_REPOSITORY_PATH} \
--model-name ${MODEL_NAME} \
--input-data ${SHARED_DIR}/input_data/data.json \
--input-shapes input__0:${MAX_SEQ_LENGTH} \
--input-shapes input__1:${MAX_SEQ_LENGTH} \
--input-shapes input__2:${MAX_SEQ_LENGTH} \
--batch-sizes ${BATCH_SIZE} \
--number-of-triton-instances ${TRITON_INSTANCES} \
--number-of-model-instances ${TRITON_GPU_ENGINE_COUNT} \
--batching-mode static \
--evaluation-mode offline \
--performance-tool perf_analyzer \
--result-path ${SHARED_DIR}/triton_performance_offline.csv
""",
),
result_path="${SHARED_DIR}/triton_performance_offline.csv",
)
|
Tools/PyTorch/TimeSeriesPredictionPlatform | TimeSeriesPredictionPlatform | requirements | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
pmdarima==1.8.0
wget==3.2
hydra-core==1.1.1
pyunpack==0.2.2
tensorboard
optuna
optuna-dashboard
hydra-optuna-sweeper==1.1.2
hydra-joblib-launcher==1.1.5
pandas==1.1.4
dgl-cu111
|
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/plugins/taco2ProjectionPlugin | taco2ProjectionPlugin | taco2ProjectionLayerPlugin | /*
* Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the NVIDIA CORPORATION nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "taco2ProjectionLayerPlugin.h"
#include "taco2ProjectionKernel.h"
#include "taco2Utils.h"
#include <cassert>
#include <cstdlib>
#include <cstring>
#include <cuda_runtime.h> // cudaError_t
#include <iostream>
#include <sstream>
#include <stdexcept>
#include <string>
using namespace nvinfer1;
namespace nvinfer1
{
namespace plugin
{
using value_type = Taco2ProjectionLayerPlugin::value_type;
/******************************************************************************
* CONSTANTS ******************************************************************
*****************************************************************************/
namespace
{
constexpr const char* const PLUGIN_NAME = "Taco2Projection";
constexpr const char* const PLUGIN_VERSION = "0.1.0";
constexpr const int NUM_INPUTS = 2;
} // namespace
const float Taco2ProjectionLayerPlugin::ONE = 1.0f;
const float Taco2ProjectionLayerPlugin::ZERO = 0.0f;
/******************************************************************************
* HELPER FUNCTIONS ***********************************************************
*****************************************************************************/
namespace
{
std::vector<value_type> toVector(const Weights& weights)
{
if (weights.type != DataType::kFLOAT)
{
throw std::runtime_error(
"Invalid data type for Taco2Projection weights: " + std::to_string(static_cast<int>(weights.type)));
}
const value_type* const valuesBegin = static_cast<const value_type*>(weights.values);
const value_type* const valuesEnd = valuesBegin + weights.count;
return std::vector<value_type>(valuesBegin, valuesEnd);
}
const void* offset(const void* ptr, const size_t offset)
{
return reinterpret_cast<const void*>(static_cast<const uint8_t*>(ptr) + offset);
}
} // namespace
/******************************************************************************
* STATIC METHODS *************************************************************
*****************************************************************************/
const char* Taco2ProjectionLayerPlugin::getName()
{
return PLUGIN_NAME;
}
const char* Taco2ProjectionLayerPlugin::getVersion()
{
return PLUGIN_VERSION;
}
Taco2ProjectionLayerPlugin Taco2ProjectionLayerPlugin::deserialize(const void* const data, const size_t length)
{
if (length < 4 * sizeof(int32_t))
{
throw std::runtime_error("Invalid serialized size: " + std::to_string(length));
}
const int hiddenInputLength = static_cast<const int32_t*>(data)[0];
const int contextInputLength = static_cast<const int32_t*>(data)[1];
const int numChannelDimension = static_cast<const int32_t*>(data)[2];
const int numGateDimension = static_cast<const int32_t*>(data)[3];
const int inputLength = hiddenInputLength + contextInputLength;
const int numDimensions = numChannelDimension + numGateDimension;
const size_t reqSize = 4 * sizeof(int32_t) + sizeof(value_type) * ((inputLength * numDimensions) + numDimensions);
if (reqSize != length)
{
throw std::runtime_error(
"Invalid serialized size: " + std::to_string(length) + " / " + std::to_string(reqSize));
}
const Weights weightsChannel{
DataType::kFLOAT, offset(data, 4 * sizeof(int32_t)), numChannelDimension * inputLength};
const Weights weightsGate{DataType::kFLOAT,
offset(weightsChannel.values, sizeof(value_type) * weightsChannel.count), numGateDimension * inputLength};
const Weights biasChannel{
DataType::kFLOAT, offset(weightsGate.values, sizeof(value_type) * weightsGate.count), numChannelDimension};
const Weights biasGate{
DataType::kFLOAT, offset(biasChannel.values, sizeof(value_type) * biasChannel.count), numGateDimension};
Taco2ProjectionLayerPlugin layer(weightsChannel, weightsGate, biasChannel, biasGate, hiddenInputLength,
contextInputLength, numChannelDimension, numGateDimension);
return layer;
}
/******************************************************************************
* CONSTRUCTORS / DESTRUCTOR **************************************************
*****************************************************************************/
Taco2ProjectionLayerPlugin::Taco2ProjectionLayerPlugin(const nvinfer1::Weights& weightsChannel,
const nvinfer1::Weights& weightsGate, const nvinfer1::Weights& biasChannel, const nvinfer1::Weights& biasGate,
const int hiddenInputLength, const int contextInputLength, const int numChannelDimension,
const int numGateDimension)
: mHiddenInputLength(hiddenInputLength)
, mContextInputLength(contextInputLength)
, mNumChannelDimension(numChannelDimension)
, mNumGateDimension(numGateDimension)
, mWeightsChannel(toVector(weightsChannel))
, mWeightsGate(toVector(weightsGate))
, mBiasChannel(toVector(biasChannel))
, mBiasGate(toVector(biasGate))
, mKernel()
, mNamespace()
{
const size_t expectedWeightsChannel = getTotalInputLength() * mNumChannelDimension;
if (mWeightsChannel.size() != expectedWeightsChannel)
{
throw std::runtime_error("Taco2Projection expected " + std::to_string(expectedWeightsChannel)
+ " channel weights but given " + std::to_string(mWeightsChannel.size()));
}
const size_t expectedWeightsGate = getTotalInputLength() * mNumGateDimension;
if (mWeightsGate.size() != expectedWeightsGate)
{
throw std::runtime_error("Taco2Projection expected " + std::to_string(expectedWeightsGate)
+ " gate weights but given " + std::to_string(mWeightsGate.size()));
}
const size_t expectedBiasChannel = mNumChannelDimension;
if (mBiasChannel.size() != expectedBiasChannel)
{
throw std::runtime_error("Taco2Projection expected " + std::to_string(expectedBiasChannel)
+ " channel bias but given " + std::to_string(mBiasChannel.size()));
}
const size_t expectedBiasGate = mNumGateDimension;
if (mBiasGate.size() != expectedBiasGate)
{
throw std::runtime_error("Taco2Projection expected " + std::to_string(expectedBiasGate)
+ " gate bias but given " + std::to_string(mBiasGate.size()));
}
}
Taco2ProjectionLayerPlugin::Taco2ProjectionLayerPlugin(Taco2ProjectionLayerPlugin&& other)
: mHiddenInputLength(other.mHiddenInputLength)
, mContextInputLength(other.mContextInputLength)
, mNumChannelDimension(other.mNumChannelDimension)
, mNumGateDimension(other.mNumGateDimension)
, mWeightsChannel(std::move(other.mWeightsChannel))
, mWeightsGate(std::move(other.mWeightsGate))
, mBiasChannel(std::move(other.mBiasChannel))
, mBiasGate(std::move(other.mBiasGate))
, mKernel(std::move(other.mKernel))
, mNamespace(std::move(other.mNamespace))
{
other.mHiddenInputLength = 0;
other.mContextInputLength = 0;
other.mNumChannelDimension = 0;
other.mNumGateDimension = 0;
}
Taco2ProjectionLayerPlugin::~Taco2ProjectionLayerPlugin()
{
destroy();
}
/******************************************************************************
* PUBLIC METHODS *************************************************************
*****************************************************************************/
Taco2ProjectionLayerPlugin& Taco2ProjectionLayerPlugin::operator=(Taco2ProjectionLayerPlugin&& other)
{
// defere to constructor
*this = Taco2ProjectionLayerPlugin(std::move(other));
return *this;
}
DataType Taco2ProjectionLayerPlugin::getOutputDataType(
const int /* index */, const DataType* const /* inputTypes */, const int /* nbInputs */) const
{
return DataType::kFLOAT;
}
const char* Taco2ProjectionLayerPlugin::getPluginType() const
{
return getName();
}
const char* Taco2ProjectionLayerPlugin::getPluginVersion() const
{
return getVersion();
}
int Taco2ProjectionLayerPlugin::getNbOutputs() const
{
return 1;
}
DimsExprs Taco2ProjectionLayerPlugin::getOutputDimensions(
const int outputIndex, const DimsExprs* inputs, const int nbInputs, IExprBuilder& exprBuilder)
{
if (outputIndex >= getNbOutputs())
{
throw std::runtime_error("Only has one output.");
}
if (nbInputs != NUM_INPUTS)
{
throw std::runtime_error(
"Can only handle " + std::to_string(NUM_INPUTS) + " input tensors: " + std::to_string(nbInputs));
}
return DimsExprs{3, {inputs[0].d[0], exprBuilder.constant(1), exprBuilder.constant(getTotalDimensions())}};
}
bool Taco2ProjectionLayerPlugin::supportsFormatCombination(
const int pos, const PluginTensorDesc* const inOut, const int /* nbInputs */, const int /* nbOutputs */)
{
return inOut[pos].format == TensorFormat::kLINEAR && inOut[pos].type == DataType::kFLOAT;
}
void Taco2ProjectionLayerPlugin::configurePlugin(const DynamicPluginTensorDesc* const in, const int nbInputs,
const DynamicPluginTensorDesc* const out, const int nbOutputs)
{
if (nbInputs != NUM_INPUTS)
{
throw std::runtime_error(
"Can only handle " + std::to_string(NUM_INPUTS) + " input tensors: " + std::to_string(nbInputs));
}
for (int i = 0; i < nbInputs; ++i)
{
if (in[i].desc.type != DataType::kFLOAT)
{
throw std::runtime_error("Only FLOAT supported as input " + std::to_string(i) + " : "
+ std::to_string(static_cast<int>(in[i].desc.type)));
}
}
// assert dimensions
{
bool foundDim = false;
const Dims dims = in[0].desc.dims;
for (int d = 1; d < dims.nbDims; ++d)
{
if (dims.d[d] != 1)
{
if (foundDim || dims.d[d] != mHiddenInputLength)
{
throw std::runtime_error(
"First projection input must be 1 x hiddenInputLength"
" : "
+ taco2::Taco2Utils::dimsToString(dims));
}
foundDim = true;
}
}
if (!foundDim)
{
throw std::runtime_error(
"First projection input must be 1 x hiddenInputLength"
" : "
+ taco2::Taco2Utils::dimsToString(dims));
}
}
{
bool foundDim = false;
const Dims dims = in[1].desc.dims;
for (int d = 1; d < dims.nbDims; ++d)
{
if (dims.d[d] != 1)
{
if (foundDim || dims.d[d] != mContextInputLength)
{
throw std::runtime_error(
"Second projection input must be 1 x contextInputLength"
" : "
+ taco2::Taco2Utils::dimsToString(dims));
}
foundDim = true;
}
}
if (!foundDim)
{
throw std::runtime_error(
"Second projection input must be 1 x contextInputLength"
" : "
+ taco2::Taco2Utils::dimsToString(dims));
}
}
for (int i = 0; i < nbOutputs; ++i)
{
if (out[i].desc.type != DataType::kFLOAT)
{
throw std::runtime_error("Only FLOAT supported as output: " + std::to_string(i) + " : "
+ std::to_string(static_cast<int>(out[i].desc.type)));
}
}
}
int Taco2ProjectionLayerPlugin::initialize()
{
try
{
// concat projection and gate FC layers
std::vector<float> hostWeightCat;
hostWeightCat.insert(hostWeightCat.end(), mWeightsChannel.begin(), mWeightsChannel.end());
hostWeightCat.insert(hostWeightCat.end(), mWeightsGate.begin(), mWeightsGate.end());
std::vector<float> hostBiasCat;
hostBiasCat.insert(hostBiasCat.end(), mBiasChannel.begin(), mBiasChannel.end());
hostBiasCat.insert(hostBiasCat.end(), mBiasGate.begin(), mBiasGate.end());
mKernel.reset(new Taco2ProjectionKernel(hostWeightCat, hostBiasCat, mHiddenInputLength, mContextInputLength,
mNumChannelDimension + mNumGateDimension));
}
catch (const std::exception& e)
{
std::cerr << "Taco2ProjectionLayerPlugin initialization failed: " << e.what() << std::endl;
return 1;
}
return 0;
}
void Taco2ProjectionLayerPlugin::terminate()
{
mKernel.reset();
}
size_t Taco2ProjectionLayerPlugin::getWorkspaceSize(
const PluginTensorDesc* const /* in */, const int /* nbInputs */, const PluginTensorDesc* const /* out */, const int /* nbOutputs */) const
{
return 0;
}
int Taco2ProjectionLayerPlugin::enqueue(const PluginTensorDesc* const inputDesc, const PluginTensorDesc* /* outputDesc */,
const void* const* const inputs, void* const* const outputs, void* const /* workspace */, cudaStream_t stream)
{
const int batchSize = inputDesc[0].dims.d[0];
if (batchSize != 1)
{
// we only support batch size of 1 right now
std::cerr << "Taco2ProjectionLayerPlugin plugin does not support batch size other "
"than 1: got "
<< batchSize << std::endl;
std::cerr << "Recompile without plugins to use a larger batch size." << std::endl;
return 1;
}
else if (!mKernel)
{
std::cerr << "Taco2ProjectionLayerPlugin is not initialized properly." << std::endl;
return 1;
}
// name inputs and outputs
const value_type* const hiddenDevice = static_cast<const value_type*>(inputs[0]);
const value_type* const contextDevice = static_cast<const value_type*>(inputs[1]);
value_type* const outputDevice = static_cast<value_type*>(outputs[0]);
mKernel->execute(hiddenDevice, contextDevice, outputDevice, stream);
return 0;
}
size_t Taco2ProjectionLayerPlugin::getSerializationSize() const
{
return 4 * sizeof(int32_t)
+ sizeof(value_type) * (getTotalInputLength() * getTotalDimensions() + getTotalDimensions());
}
void Taco2ProjectionLayerPlugin::serialize(void* const buffer) const
{
static_cast<int32_t*>(buffer)[0] = mHiddenInputLength;
static_cast<int32_t*>(buffer)[1] = mContextInputLength;
static_cast<int32_t*>(buffer)[2] = mNumChannelDimension;
static_cast<int32_t*>(buffer)[3] = mNumGateDimension;
float* const weightsChannel = reinterpret_cast<float*>(static_cast<int32_t*>(buffer) + 4);
float* const weightsGate = weightsChannel + (getTotalInputLength() * mNumChannelDimension);
float* const biasChannel = weightsGate + (getTotalInputLength() * mNumGateDimension);
float* const biasGate = biasChannel + mNumChannelDimension;
memcpy(weightsChannel, mWeightsChannel.data(), sizeof(value_type) * mWeightsChannel.size());
memcpy(weightsGate, mWeightsGate.data(), sizeof(value_type) * mWeightsGate.size());
memcpy(biasChannel, mBiasChannel.data(), sizeof(value_type) * mBiasChannel.size());
memcpy(biasGate, mBiasGate.data(), sizeof(value_type) * mBiasGate.size());
}
void Taco2ProjectionLayerPlugin::destroy()
{
terminate();
}
IPluginV2DynamicExt* Taco2ProjectionLayerPlugin::clone() const
{
// call constructor which copy's data
Taco2ProjectionLayerPlugin clone(
Weights{DataType::kFLOAT, mWeightsChannel.data(), static_cast<int64_t>(mWeightsChannel.size())},
Weights{DataType::kFLOAT, mWeightsGate.data(), static_cast<int64_t>(mWeightsGate.size())},
Weights{DataType::kFLOAT, mBiasChannel.data(), static_cast<int64_t>(mBiasChannel.size())},
Weights{DataType::kFLOAT, mBiasGate.data(), static_cast<int64_t>(mBiasGate.size())}, mHiddenInputLength,
mContextInputLength, mNumChannelDimension, mNumGateDimension);
if (mKernel)
{
// initialize the clone too
clone.initialize();
}
// move it to the heap last to avoid exceptions causing memory leaks
return new Taco2ProjectionLayerPlugin(std::move(clone));
}
void Taco2ProjectionLayerPlugin::setPluginNamespace(const char* pluginNamespace)
{
mNamespace = pluginNamespace;
}
const char* Taco2ProjectionLayerPlugin::getPluginNamespace() const
{
return mNamespace.c_str();
}
/******************************************************************************
* PRIVATE METHODS ************************************************************
*****************************************************************************/
int Taco2ProjectionLayerPlugin::getTotalDimensions() const
{
return mNumChannelDimension + mNumGateDimension;
}
int Taco2ProjectionLayerPlugin::getTotalInputLength() const
{
return mHiddenInputLength + mContextInputLength;
}
} // namespace plugin
} // namespace nvinfer1
|
PyTorch/SpeechRecognition/Jasper/triton/model_repo_configs/fp32/jasper-ts-trace | jasper-ts-trace | config | name: "jasper-ts-trace"
platform: "pytorch_libtorch"
default_model_filename: "model.pt"
max_batch_size: 8#MAX_BATCH
input [
{
name: "input__0"
data_type: TYPE_FP32
dims: [64, -1]
}
]
output [
{
name: "output__0"
data_type: TYPE_FP32
dims: [-1, 29]
}
]
instance_group {
count: 1#NUM_ENGINES
gpus: 0
kind: KIND_GPU
}
#db#dynamic_batching {
#db# preferred_batch_size: 8#MAX_BATCH
#db# max_queue_delay_microseconds: #MAX_QUEUE
#db#}
|
TensorFlow/Detection/SSD/models/research/object_detection/samples/configs | configs | faster_rcnn_resnet101_kitti | # Faster R-CNN with Resnet-101 (v1)
# Trained on KITTI dataset (cars and pedestrian), initialized from COCO
# detection checkpoint.
# Users should configure the fine_tune_checkpoint field in the train config as
# well as the label_map_path and input_path fields in the train_input_reader and
# eval_input_reader. Search for "PATH_TO_BE_CONFIGURED" to find the fields that
# should be configured.
model {
faster_rcnn {
num_classes: 2
image_resizer {
keep_aspect_ratio_resizer {
# Raw KITTI images have a resolution of 1242x375, if we wish to resize
# them to have a height of 600 then their width should be
# 1242/(375/600)=1987.2
min_dimension: 600
max_dimension: 1987
}
}
feature_extractor {
type: 'faster_rcnn_resnet101'
first_stage_features_stride: 16
}
first_stage_anchor_generator {
grid_anchor_generator {
scales: [0.25, 0.5, 1.0, 2.0]
aspect_ratios: [0.5, 1.0, 2.0]
height_stride: 16
width_stride: 16
}
}
first_stage_box_predictor_conv_hyperparams {
op: CONV
regularizer {
l2_regularizer {
weight: 0.0
}
}
initializer {
truncated_normal_initializer {
stddev: 0.01
}
}
}
first_stage_nms_score_threshold: 0.0
first_stage_nms_iou_threshold: 0.7
first_stage_max_proposals: 300
first_stage_localization_loss_weight: 2.0
first_stage_objectness_loss_weight: 1.0
initial_crop_size: 14
maxpool_kernel_size: 2
maxpool_stride: 2
second_stage_box_predictor {
mask_rcnn_box_predictor {
use_dropout: false
dropout_keep_probability: 1.0
fc_hyperparams {
op: FC
regularizer {
l2_regularizer {
weight: 0.0
}
}
initializer {
variance_scaling_initializer {
factor: 1.0
uniform: true
mode: FAN_AVG
}
}
}
}
}
second_stage_post_processing {
batch_non_max_suppression {
score_threshold: 0.0
iou_threshold: 0.6
max_detections_per_class: 100
max_total_detections: 300
}
score_converter: SOFTMAX
}
second_stage_localization_loss_weight: 2.0
second_stage_classification_loss_weight: 1.0
}
}
train_config: {
batch_size: 1
optimizer {
momentum_optimizer: {
learning_rate: {
manual_step_learning_rate {
initial_learning_rate: 0.0001
schedule {
step: 500000
learning_rate: .00001
}
schedule {
step: 700000
learning_rate: .000001
}
}
}
momentum_optimizer_value: 0.9
}
use_moving_average: false
}
gradient_clipping_by_norm: 10.0
fine_tune_checkpoint: "PATH_TO_BE_CONFIGURED/model.ckpt"
from_detection_checkpoint: true
num_steps: 800000
data_augmentation_options {
random_horizontal_flip {
}
}
}
train_input_reader: {
label_map_path: "PATH_TO_BE_CONFIGURED/kitti_label_map.pbtxt"
tf_record_input_reader: {
input_path: "PATH_TO_BE_CONFIGURED/kitti_train.tfrecord"
}
}
eval_config: {
use_moving_averages: false
num_examples: 500
}
eval_input_reader: {
label_map_path: "PATH_TO_BE_CONFIGURED/kitti_label_map.pbtxt"
tf_record_input_reader: {
input_path: "PATH_TO_BE_CONFIGURED/kitti_val.tfrecord"
}
}
|
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/modeling | modeling | registry | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from maskrcnn_benchmark.utils.registry import Registry
BACKBONES = Registry()
ROI_BOX_FEATURE_EXTRACTORS = Registry()
RPN_HEADS = Registry()
|
TensorFlow/Segmentation/UNet_3D_Medical/scripts | scripts | unet3d_train_benchmark_TF-AMP | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script launches 3D-UNet run TF-AMP train benchmark.
# Usage:
# bash examples/unet3d_train_benchmark.sh <number/of/gpus> <path/to/dataset> <path/to/results/directory> <batch/size>
horovodrun -np $1 python main.py --data_dir $2 --model_dir $3 --exec_mode train --max_steps 80 --benchmark --fold 0 --batch_size $4 --amp --xla --augment |
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/layers | layers | lstm | /*
* Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the NVIDIA CORPORATION nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "lstm.h"
#include "NvInfer.h"
using namespace nvinfer1;
namespace tts
{
/******************************************************************************
* PUBLIC STATIC METHODS ******************************************************
*****************************************************************************/
ILayer* LSTM::addPaddedBidirectional(INetworkDefinition* const network, ITensor* const input,
ITensor* const inputLength, const int numDimensions, const LayerData& lstmData)
{
// build LSTM
const int hiddenSize = numDimensions / 2;
IRNNv2Layer* lstm = network->addRNNv2(*input, 1, hiddenSize, input->getDimensions().d[1], RNNOperation::kLSTM);
lstm->setDirection(RNNDirection::kBIDIRECTION);
lstm->setSequenceLengths(*inputLength);
{
const int64_t inputBlockSize = numDimensions * hiddenSize;
// pytorch weights are stored in "weight_ih_l0" = {W_ii|W_if|W_ig|W_io}
const float* inputWeights = (const float*) lstmData.get("weight_ih_l0").values;
Weights wii{DataType::kFLOAT, (void*) (inputWeights), inputBlockSize};
Weights wif{DataType::kFLOAT, (void*) (inputWeights + inputBlockSize), inputBlockSize};
Weights wig{DataType::kFLOAT, (void*) (inputWeights + 2 * inputBlockSize), inputBlockSize};
Weights wio{DataType::kFLOAT, (void*) (inputWeights + 3 * inputBlockSize), inputBlockSize};
lstm->setWeightsForGate(0, RNNGateType::kINPUT, true, wii);
lstm->setWeightsForGate(0, RNNGateType::kCELL, true, wig);
lstm->setWeightsForGate(0, RNNGateType::kFORGET, true, wif);
lstm->setWeightsForGate(0, RNNGateType::kOUTPUT, true, wio);
const float* inputBias = (const float*) lstmData.get("bias_ih_l0").values;
Weights bii{DataType::kFLOAT, (void*) (inputBias), hiddenSize};
Weights bif{DataType::kFLOAT, (void*) (inputBias + hiddenSize), hiddenSize};
Weights big{DataType::kFLOAT, (void*) (inputBias + 2 * hiddenSize), hiddenSize};
Weights bio{DataType::kFLOAT, (void*) (inputBias + 3 * hiddenSize), hiddenSize};
lstm->setBiasForGate(0, RNNGateType::kINPUT, true, bii);
lstm->setBiasForGate(0, RNNGateType::kCELL, true, big);
lstm->setBiasForGate(0, RNNGateType::kFORGET, true, bif);
lstm->setBiasForGate(0, RNNGateType::kOUTPUT, true, bio);
const int64_t hiddenBlockSize = hiddenSize * hiddenSize;
// pytorch weights are stored in "weight_hh_l0" = {W_hi|W_hf|W_hg|W_ho}
const float* hiddenWeights = (const float*) lstmData.get("weight_hh_l0").values;
Weights whi{DataType::kFLOAT, (void*) (hiddenWeights), hiddenBlockSize};
Weights whf{DataType::kFLOAT, (void*) (hiddenWeights + hiddenBlockSize), hiddenBlockSize};
Weights whg{DataType::kFLOAT, (void*) (hiddenWeights + 2 * hiddenBlockSize), hiddenBlockSize};
Weights who{DataType::kFLOAT, (void*) (hiddenWeights + 3 * hiddenBlockSize), hiddenBlockSize};
lstm->setWeightsForGate(0, RNNGateType::kINPUT, false, whi);
lstm->setWeightsForGate(0, RNNGateType::kCELL, false, whg);
lstm->setWeightsForGate(0, RNNGateType::kFORGET, false, whf);
lstm->setWeightsForGate(0, RNNGateType::kOUTPUT, false, who);
const float* hiddenBias = (const float*) lstmData.get("bias_hh_l0").values;
Weights bhi{DataType::kFLOAT, (void*) (hiddenBias), hiddenSize};
Weights bhf{DataType::kFLOAT, (void*) (hiddenBias + hiddenSize), hiddenSize};
Weights bhg{DataType::kFLOAT, (void*) (hiddenBias + 2 * hiddenSize), hiddenSize};
Weights bho{DataType::kFLOAT, (void*) (hiddenBias + 3 * hiddenSize), hiddenSize};
lstm->setBiasForGate(0, RNNGateType::kINPUT, false, bhi);
lstm->setBiasForGate(0, RNNGateType::kCELL, false, bhg);
lstm->setBiasForGate(0, RNNGateType::kFORGET, false, bhf);
lstm->setBiasForGate(0, RNNGateType::kOUTPUT, false, bho);
}
{
const int64_t inputBlockSize = numDimensions * hiddenSize;
// pytorch weights are stored in "weight_ih_l0" = {W_ii|W_if|W_ig|W_io}
const float* inputWeights = (const float*) lstmData.get("weight_ih_l0_reverse").values;
Weights wii{DataType::kFLOAT, (void*) (inputWeights), inputBlockSize};
Weights wif{DataType::kFLOAT, (void*) (inputWeights + inputBlockSize), inputBlockSize};
Weights wig{DataType::kFLOAT, (void*) (inputWeights + 2 * inputBlockSize), inputBlockSize};
Weights wio{DataType::kFLOAT, (void*) (inputWeights + 3 * inputBlockSize), inputBlockSize};
lstm->setWeightsForGate(1, RNNGateType::kINPUT, true, wii);
lstm->setWeightsForGate(1, RNNGateType::kCELL, true, wig);
lstm->setWeightsForGate(1, RNNGateType::kFORGET, true, wif);
lstm->setWeightsForGate(1, RNNGateType::kOUTPUT, true, wio);
const float* inputBias = (const float*) lstmData.get("bias_ih_l0_reverse").values;
Weights bii{DataType::kFLOAT, (void*) (inputBias), hiddenSize};
Weights bif{DataType::kFLOAT, (void*) (inputBias + hiddenSize), hiddenSize};
Weights big{DataType::kFLOAT, (void*) (inputBias + 2 * hiddenSize), hiddenSize};
Weights bio{DataType::kFLOAT, (void*) (inputBias + 3 * hiddenSize), hiddenSize};
lstm->setBiasForGate(1, RNNGateType::kINPUT, true, bii);
lstm->setBiasForGate(1, RNNGateType::kCELL, true, big);
lstm->setBiasForGate(1, RNNGateType::kFORGET, true, bif);
lstm->setBiasForGate(1, RNNGateType::kOUTPUT, true, bio);
const int64_t hiddenBlockSize = hiddenSize * hiddenSize;
// pytorch weights are stored in "weight_hh_l0" = {W_hi|W_hf|W_hg|W_ho}
const float* hiddenWeights = (const float*) lstmData.get("weight_hh_l0_reverse").values;
Weights whi{DataType::kFLOAT, (void*) (hiddenWeights), hiddenBlockSize};
Weights whf{DataType::kFLOAT, (void*) (hiddenWeights + hiddenBlockSize), hiddenBlockSize};
Weights whg{DataType::kFLOAT, (void*) (hiddenWeights + 2 * hiddenBlockSize), hiddenBlockSize};
Weights who{DataType::kFLOAT, (void*) (hiddenWeights + 3 * hiddenBlockSize), hiddenBlockSize};
lstm->setWeightsForGate(1, RNNGateType::kINPUT, false, whi);
lstm->setWeightsForGate(1, RNNGateType::kCELL, false, whg);
lstm->setWeightsForGate(1, RNNGateType::kFORGET, false, whf);
lstm->setWeightsForGate(1, RNNGateType::kOUTPUT, false, who);
const float* hiddenBias = (const float*) lstmData.get("bias_hh_l0_reverse").values;
Weights bhi{DataType::kFLOAT, (void*) (hiddenBias), hiddenSize};
Weights bhf{DataType::kFLOAT, (void*) (hiddenBias + hiddenSize), hiddenSize};
Weights bhg{DataType::kFLOAT, (void*) (hiddenBias + 2 * hiddenSize), hiddenSize};
Weights bho{DataType::kFLOAT, (void*) (hiddenBias + 3 * hiddenSize), hiddenSize};
lstm->setBiasForGate(1, RNNGateType::kINPUT, false, bhi);
lstm->setBiasForGate(1, RNNGateType::kCELL, false, bhg);
lstm->setBiasForGate(1, RNNGateType::kFORGET, false, bhf);
lstm->setBiasForGate(1, RNNGateType::kOUTPUT, false, bho);
}
return lstm;
}
ILayer* LSTM::addUnidirectionalCell(INetworkDefinition* const network, ITensor* const input,
ITensor* const hiddenStatesIn, ITensor* const cellStatesIn, const int numDimensions, const LayerData& lstmData)
{
// build LSTM
const int hiddenSize = numDimensions;
const int inputLength = input->getDimensions().d[2];
IRNNv2Layer* lstm = network->addRNNv2(*input, 1, hiddenSize, input->getDimensions().d[1], RNNOperation::kLSTM);
lstm->setDirection(RNNDirection::kUNIDIRECTION);
const int64_t inputBlockSize = inputLength * hiddenSize;
// pytorch weights are stored in "weight_ih" = {W_ii|W_if|W_ig|W_io}
const float* inputWeights = (const float*) lstmData.get("weight_ih").values;
Weights wii{DataType::kFLOAT, (void*) (inputWeights), inputBlockSize};
Weights wif{DataType::kFLOAT, (void*) (inputWeights + inputBlockSize), inputBlockSize};
Weights wig{DataType::kFLOAT, (void*) (inputWeights + 2 * inputBlockSize), inputBlockSize};
Weights wio{DataType::kFLOAT, (void*) (inputWeights + 3 * inputBlockSize), inputBlockSize};
lstm->setWeightsForGate(0, RNNGateType::kINPUT, true, wii);
lstm->setWeightsForGate(0, RNNGateType::kCELL, true, wig);
lstm->setWeightsForGate(0, RNNGateType::kFORGET, true, wif);
lstm->setWeightsForGate(0, RNNGateType::kOUTPUT, true, wio);
const float* inputBias = (const float*) lstmData.get("bias_ih").values;
Weights bii{DataType::kFLOAT, (void*) (inputBias), hiddenSize};
Weights bif{DataType::kFLOAT, (void*) (inputBias + hiddenSize), hiddenSize};
Weights big{DataType::kFLOAT, (void*) (inputBias + 2 * hiddenSize), hiddenSize};
Weights bio{DataType::kFLOAT, (void*) (inputBias + 3 * hiddenSize), hiddenSize};
lstm->setBiasForGate(0, RNNGateType::kINPUT, true, bii);
lstm->setBiasForGate(0, RNNGateType::kCELL, true, big);
lstm->setBiasForGate(0, RNNGateType::kFORGET, true, bif);
lstm->setBiasForGate(0, RNNGateType::kOUTPUT, true, bio);
const int64_t hiddenBlockSize = hiddenSize * hiddenSize;
// pytorch weights are stored in "weight_hh" = {W_hi|W_hf|W_hg|W_ho}
const float* hiddenWeights = (const float*) lstmData.get("weight_hh").values;
Weights whi{DataType::kFLOAT, (void*) (hiddenWeights), hiddenBlockSize};
Weights whf{DataType::kFLOAT, (void*) (hiddenWeights + hiddenBlockSize), hiddenBlockSize};
Weights whg{DataType::kFLOAT, (void*) (hiddenWeights + 2 * hiddenBlockSize), hiddenBlockSize};
Weights who{DataType::kFLOAT, (void*) (hiddenWeights + 3 * hiddenBlockSize), hiddenBlockSize};
lstm->setWeightsForGate(0, RNNGateType::kINPUT, false, whi);
lstm->setWeightsForGate(0, RNNGateType::kCELL, false, whg);
lstm->setWeightsForGate(0, RNNGateType::kFORGET, false, whf);
lstm->setWeightsForGate(0, RNNGateType::kOUTPUT, false, who);
const float* hiddenBias = (const float*) lstmData.get("bias_hh").values;
Weights bhi{DataType::kFLOAT, (void*) (hiddenBias), hiddenSize};
Weights bhf{DataType::kFLOAT, (void*) (hiddenBias + hiddenSize), hiddenSize};
Weights bhg{DataType::kFLOAT, (void*) (hiddenBias + 2 * hiddenSize), hiddenSize};
Weights bho{DataType::kFLOAT, (void*) (hiddenBias + 3 * hiddenSize), hiddenSize};
lstm->setBiasForGate(0, RNNGateType::kINPUT, false, bhi);
lstm->setBiasForGate(0, RNNGateType::kCELL, false, bhg);
lstm->setBiasForGate(0, RNNGateType::kFORGET, false, bhf);
lstm->setBiasForGate(0, RNNGateType::kOUTPUT, false, bho);
lstm->setHiddenState(*hiddenStatesIn);
lstm->setCellState(*cellStatesIn);
return lstm;
}
} // namespace tts
|
TensorFlow/Classification/ConvNets/utils/hooks | hooks | prefill_hook | #! /usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import numpy as np
import tensorflow as tf
__all__ = ['PrefillStagingAreasHook']
class PrefillStagingAreasHook(tf.estimator.SessionRunHook):
def after_create_session(self, session, coord):
# TODO: This assumes TF collections are ordered; is this safe?
enqueue_ops = tf.get_collection('STAGING_AREA_PUTS')
for i in range(len(enqueue_ops)):
session.run(enqueue_ops[:i + 1])
|
PyTorch/SpeechRecognition/Jasper/triton/model_repo_configs/fp16/jasper-tensorrt | jasper-tensorrt | config | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
name: "jasper-tensorrt"
platform: "tensorrt_plan"
default_model_filename: "model.plan"
max_batch_size: 8#MAX_BATCH
input [
{
name: "input__0"
data_type: TYPE_FP16
dims: [64, -1]
}
]
output [
{
name: "output__0"
data_type: TYPE_FP16
dims: [-1, 29 ]
}
]
instance_group {
count: 1#NUM_ENGINES
gpus: 0
kind: KIND_GPU
}
#db#dynamic_batching {
#db# preferred_batch_size: 8#MAX_BATCH
#db# max_queue_delay_microseconds: #MAX_QUEUE
#db#}
|
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/layers | layers | attentionLayerCreator | /*
* Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the NVIDIA CORPORATION nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "attentionLayerCreator.h"
#include "dims5.h"
#include "layerData.h"
#include "NvInfer.h"
using namespace nvinfer1;
namespace tts
{
/******************************************************************************
* PUBLIC METHODS *************************************************************
*****************************************************************************/
ILayer* AttentionLayerCreator::addLocation(INetworkDefinition& network, ITensor* const input, const int attentionDim,
const int numFilters, const int kernelSize, const LayerData& convData, const LayerData& linearData,
const std::string& name)
{
// conv layer
const int padding = (kernelSize - 1) / 2;
#if NV_TENSORRT_MAJOR < 7
IConvolutionLayer* const convLayer = network.addConvolution(
*input, numFilters, DimsHW{kernelSize, 1}, convData.get("weight"), {DataType::kFLOAT, nullptr, 0});
convLayer->setPadding({padding, 0});
#else
IConvolutionLayer* const convLayer = network.addConvolutionNd(
*input, numFilters, Dims2(kernelSize, 1), convData.get("weight"), {DataType::kFLOAT, nullptr, 0});
convLayer->setPaddingNd(Dims2(padding, 0));
#endif
convLayer->setName((name + ".conv_layer").c_str());
// need to tranpose
IShuffleLayer* const transLayer = network.addShuffle(*convLayer->getOutput(0));
transLayer->setFirstTranspose({0, 2, 1, 3});
transLayer->setReshapeDimensions(Dims5{1, convLayer->getOutput(0)->getDimensions().d[2],
convLayer->getOutput(0)->getDimensions().d[1], 1, convLayer->getOutput(0)->getDimensions().d[3]});
transLayer->setName((name + ".transpose").c_str());
// fully connected layer
ILayer* const linearLayer = network.addFullyConnected(
*transLayer->getOutput(0), attentionDim, linearData.get("weight"), Weights{DataType::kFLOAT, 0, 0});
linearLayer->setName((name + ".linear_layer").c_str());
return linearLayer;
}
ILayer* AttentionLayerCreator::addEnergy(INetworkDefinition& network, ITensor* const input1, ITensor* const input2,
ITensor* const input3, const LayerData& linearData, const std::string& name)
{
// summation
ILayer* const add1Layer = network.addElementWise(*input1, *input2, ElementWiseOperation::kSUM);
add1Layer->setName((name + ".0.elementwise_sum").c_str());
ILayer* const add2Layer = network.addElementWise(*add1Layer->getOutput(0), *input3, ElementWiseOperation::kSUM);
add2Layer->setName((name + ".1.elementwise_sum").c_str());
// activation
ILayer* const actLayer = network.addActivation(*add2Layer->getOutput(0), ActivationType::kTANH);
actLayer->setName((name + ".tanh").c_str());
// fully connected layer
ILayer* const linearLayer = network.addFullyConnected(
*actLayer->getOutput(0), 1, linearData.get("weight"), Weights{DataType::kFLOAT, 0, 0});
linearLayer->setName((name + ".linear_layer").c_str());
return linearLayer;
}
ILayer* AttentionLayerCreator::addPaddedSoftMax(INetworkDefinition& network, ITensor* const input,
ITensor* const inputMask, ITensor* const inputSegments, const std::string& name)
{
// make our inputs 2 dimensional
IShuffleLayer* const maskShuffleLayer = network.addShuffle(*inputMask);
maskShuffleLayer->setReshapeDimensions(Dims2{1, -1});
maskShuffleLayer->setName((name + ".mask_reshape").c_str());
IShuffleLayer* const inputShuffleLayer = network.addShuffle(*input);
inputShuffleLayer->setReshapeDimensions(Dims2{1, -1});
inputShuffleLayer->setName((name + ".input_reshape").c_str());
// perform softmax over non-padding elements
ILayer* const softMaxLayer = network.addRaggedSoftMax(*inputShuffleLayer->getOutput(0), *inputSegments);
softMaxLayer->setName((name + ".ragged_softmax").c_str());
// zero padding
ILayer* const maskLayer = network.addElementWise(
*softMaxLayer->getOutput(0), *maskShuffleLayer->getOutput(0), ElementWiseOperation::kPROD);
maskLayer->setName((name + ".mask").c_str());
// return three dimensional output
IShuffleLayer* const outShuffle = network.addShuffle(*maskLayer->getOutput(0));
outShuffle->setReshapeDimensions(Dims3{-1, 1, 1});
outShuffle->setName((name + ".transpose").c_str());
return outShuffle;
}
} // namespace tts
|
PyTorch/Recommendation/DLRM/preproc | preproc | spark_data_utils | # Copyright (c) 2021 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
from argparse import ArgumentParser
from collections import OrderedDict
from contextlib import contextmanager
from operator import itemgetter
from time import time
from pyspark import broadcast
from pyspark.sql import Row, SparkSession, Window
from pyspark.sql.functions import *
from pyspark.sql.types import *
LABEL_COL = 0
INT_COLS = list(range(1, 14))
CAT_COLS = list(range(14, 40))
def get_column_counts_with_frequency_limit(df, frequency_limit = None):
cols = ['_c%d' % i for i in CAT_COLS]
df = (df
.select(posexplode(array(*cols)))
.withColumnRenamed('pos', 'column_id')
.withColumnRenamed('col', 'data')
.filter('data is not null')
.groupBy('column_id', 'data')
.count())
if frequency_limit:
frequency_limit = frequency_limit.split(",")
exclude = []
default_limit = None
for fl in frequency_limit:
frequency_pair = fl.split(":")
if len(frequency_pair) == 1:
default_limit = int(frequency_pair[0])
elif len(frequency_pair) == 2:
df = df.filter((col('column_id') != int(frequency_pair[0]) - CAT_COLS[0]) | (col('count') >= int(frequency_pair[1])))
exclude.append(int(frequency_pair[0]))
if default_limit:
remain = [x - CAT_COLS[0] for x in CAT_COLS if x not in exclude]
df = df.filter((~col('column_id').isin(remain)) | (col('count') >= default_limit))
# for comparing isin and separate filter
# for i in remain:
# df = df.filter((col('column_id') != i - CAT_COLS[0]) | (col('count') >= default_limit))
return df
def assign_id_with_window(df):
windowed = Window.partitionBy('column_id').orderBy(desc('count'))
return (df
.withColumn('id', row_number().over(windowed))
.withColumnRenamed('count', 'model_count'))
def assign_low_mem_partial_ids(df):
# To avoid some scaling issues with a simple window operation, we use a more complex method
# to compute the same thing, but in a more distributed spark specific way
df = df.orderBy(asc('column_id'), desc('count'))
# The monotonically_increasing_id is the partition id in the top 31 bits and the rest
# is an increasing count of the rows within that partition. So we split it into two parts,
# the partion id part_id and the count mono_id
df = df.withColumn('part_id', spark_partition_id())
return df.withColumn('mono_id', monotonically_increasing_id() - shiftLeft(col('part_id'), 33))
def assign_low_mem_final_ids(df):
# Now we can find the minimum and maximum mono_ids within a given column/partition pair
sub_model = df.groupBy('column_id', 'part_id').agg(max('mono_id').alias('top'), min('mono_id').alias('bottom'))
sub_model = sub_model.withColumn('diff', col('top') - col('bottom') + 1)
sub_model = sub_model.drop('top')
# This window function is over aggregated column/partition pair table. It will do a running sum of the rows
# within that column
windowed = Window.partitionBy('column_id').orderBy('part_id').rowsBetween(Window.unboundedPreceding, -1)
sub_model = sub_model.withColumn('running_sum', sum('diff').over(windowed)).na.fill(0, ["running_sum"])
joined = df.withColumnRenamed('column_id', 'i_column_id')
joined = joined.withColumnRenamed('part_id', 'i_part_id')
joined = joined.withColumnRenamed('count', 'model_count')
# Then we can join the original input with the pair it is a part of
joined = joined.join(sub_model, (col('i_column_id') == col('column_id')) & (col('part_id') == col('i_part_id')))
# So with all that we can subtract bottom from mono_id makeing it start at 0 for each partition
# and then add in the running_sum so the id is contiguous and unique for the entire column. + 1 to make it match the 1 based indexing
# for row_number
ret = joined.select(col('column_id'),
col('data'),
(col('mono_id') - col('bottom') + col('running_sum') + 1).cast(IntegerType()).alias('id'),
col('model_count'))
return ret
def get_column_models(combined_model):
for i in CAT_COLS:
model = (combined_model
.filter('column_id == %d' % (i - CAT_COLS[0]))
.drop('column_id'))
yield i, model
def col_of_rand_long():
return (rand() * (1 << 52)).cast(LongType())
def skewed_join(df, model, col_name, cutoff):
# Most versions of spark don't have a good way
# to deal with a skewed join out of the box.
# Some do and if you want to replace this with
# one of those that would be great.
# Because we have statistics about the skewedness
# that we can used we divide the model up into two parts
# one part is the highly skewed part and we do a
# broadcast join for that part, but keep the result in
# a separate column
b_model = broadcast(model.filter(col('model_count') >= cutoff)
.withColumnRenamed('data', col_name)
.drop('model_count'))
df = (df
.join(b_model, col_name, how='left')
.withColumnRenamed('id', 'id_tmp'))
# We also need to spread the skewed data that matched
# evenly. We will use a source of randomness for this
# but use a -1 for anything that still needs to be matched
if 'ordinal' in df.columns:
rand_column = col('ordinal')
else:
rand_column = col_of_rand_long()
df = df.withColumn('join_rand',
# null values are not in the model, they are filtered out
# but can be a source of skewedness so include them in
# the even distribution
when(col('id_tmp').isNotNull() | col(col_name).isNull(), rand_column)
.otherwise(lit(-1)))
# Null out the string data that already matched to save memory
df = df.withColumn(col_name,
when(col('id_tmp').isNotNull(), None)
.otherwise(col(col_name)))
# Now do the second join, which will be a non broadcast join.
# Sadly spark is too smart for its own good and will optimize out
# joining on a column it knows will always be a constant value.
# So we have to make a convoluted version of assigning a -1 to the
# randomness column for the model itself to work around that.
nb_model = (model
.withColumn('join_rand', when(col('model_count') < cutoff, lit(-1)).otherwise(lit(-2)))
.filter(col('model_count') < cutoff)
.withColumnRenamed('data', col_name)
.drop('model_count'))
df = (df
.join(nb_model, ['join_rand', col_name], how='left')
.drop(col_name, 'join_rand')
# Pick either join result as an answer
.withColumn(col_name, coalesce(col('id'), col('id_tmp')))
.drop('id', 'id_tmp'))
return df
def apply_models(df, models, broadcast_model = False, skew_broadcast_pct = 1.0):
# sort the models so broadcast joins come first. This is
# so we reduce the amount of shuffle data sooner than later
# If we parsed the string hex values to ints early on this would
# not make a difference.
models = sorted(models, key=itemgetter(3), reverse=True)
for i, model, original_rows, would_broadcast in models:
col_name = '_c%d' % i
if not (would_broadcast or broadcast_model):
# The data is highly skewed so we need to offset that
cutoff = int(original_rows * skew_broadcast_pct/100.0)
df = skewed_join(df, model, col_name, cutoff)
else:
# broadcast joins can handle skewed data so no need to
# do anything special
model = (model.drop('model_count')
.withColumnRenamed('data', col_name))
model = broadcast(model) if broadcast_model else model
df = (df
.join(model, col_name, how='left')
.drop(col_name)
.withColumnRenamed('id', col_name))
return df.fillna(0, ['_c%d' % i for i in CAT_COLS])
def transform_log(df, transform_log = False):
cols = ['_c%d' % i for i in INT_COLS]
if transform_log:
for col_name in cols:
df = df.withColumn(col_name, log(df[col_name] + 3))
return df.fillna(0, cols)
def would_broadcast(spark, str_path):
sc = spark.sparkContext
config = sc._jsc.hadoopConfiguration()
path = sc._jvm.org.apache.hadoop.fs.Path(str_path)
fs = sc._jvm.org.apache.hadoop.fs.FileSystem.get(config)
stat = fs.listFiles(path, True)
sum = 0
while stat.hasNext():
sum = sum + stat.next().getLen()
sql_conf = sc._jvm.org.apache.spark.sql.internal.SQLConf()
cutoff = sql_conf.autoBroadcastJoinThreshold() * sql_conf.fileCompressionFactor()
return sum <= cutoff
def delete_data_source(spark, path):
sc = spark.sparkContext
config = sc._jsc.hadoopConfiguration()
path = sc._jvm.org.apache.hadoop.fs.Path(path)
sc._jvm.org.apache.hadoop.fs.FileSystem.get(config).delete(path, True)
def load_raw(spark, folder, day_range):
label_fields = [StructField('_c%d' % LABEL_COL, IntegerType())]
int_fields = [StructField('_c%d' % i, IntegerType()) for i in INT_COLS]
str_fields = [StructField('_c%d' % i, StringType()) for i in CAT_COLS]
schema = StructType(label_fields + int_fields + str_fields)
paths = [os.path.join(folder, 'day_%d' % i) for i in day_range]
return (spark
.read
.schema(schema)
.option('sep', '\t')
.csv(paths))
def rand_ordinal(df):
# create a random long from the double precision float.
# The fraction part of a double is 52 bits, so we try to capture as much
# of that as possible
return df.withColumn('ordinal', col_of_rand_long())
def day_from_ordinal(df, num_days):
return df.withColumn('day', (col('ordinal') % num_days).cast(IntegerType()))
def day_from_input_file(df):
return df.withColumn('day', substring_index(input_file_name(), '_', -1).cast(IntegerType()))
def psudo_sort_by_day_plus(spark, df, num_days):
# Sort is very expensive because it needs to calculate the partitions
# which in our case may involve rereading all of the data. In some cases
# we can avoid this by repartitioning the data and sorting within a single partition
shuffle_parts = int(spark.conf.get('spark.sql.shuffle.partitions'))
extra_parts = int(shuffle_parts/num_days)
if extra_parts <= 0:
df = df.repartition('day')
else:
#We want to spread out the computation to about the same amount as shuffle_parts
divided = (col('ordinal') / num_days).cast(LongType())
extra_ident = divided % extra_parts
df = df.repartition(col('day'), extra_ident)
return df.sortWithinPartitions('day', 'ordinal')
def load_combined_model(spark, model_folder):
path = os.path.join(model_folder, 'combined.parquet')
return spark.read.parquet(path)
def save_combined_model(df, model_folder, mode=None):
path = os.path.join(model_folder, 'combined.parquet')
df.write.parquet(path, mode=mode)
def delete_combined_model(spark, model_folder):
path = os.path.join(model_folder, 'combined.parquet')
delete_data_source(spark, path)
def load_low_mem_partial_ids(spark, model_folder):
path = os.path.join(model_folder, 'partial_ids.parquet')
return spark.read.parquet(path)
def save_low_mem_partial_ids(df, model_folder, mode=None):
path = os.path.join(model_folder, 'partial_ids.parquet')
df.write.parquet(path, mode=mode)
def delete_low_mem_partial_ids(spark, model_folder):
path = os.path.join(model_folder, 'partial_ids.parquet')
delete_data_source(spark, path)
def load_column_models(spark, model_folder, count_required):
for i in CAT_COLS:
path = os.path.join(model_folder, '%d.parquet' % i)
df = spark.read.parquet(path)
if count_required:
values = df.agg(sum('model_count').alias('sum'), count('*').alias('size')).collect()
else:
values = df.agg(sum('model_count').alias('sum')).collect()
yield i, df, values[0], would_broadcast(spark, path)
def save_column_models(column_models, model_folder, mode=None):
for i, model in column_models:
path = os.path.join(model_folder, '%d.parquet' % i)
model.write.parquet(path, mode=mode)
def save_model_size(model_size, path, write_mode):
if os.path.exists(path) and write_mode == 'errorifexists':
print('Error: model size file %s exists' % path)
sys.exit(1)
os.makedirs(os.path.dirname(os.path.abspath(path)), exist_ok=True)
with open(path, 'w') as fp:
json.dump(model_size, fp, indent=4)
_benchmark = {}
@contextmanager
def _timed(step):
start = time()
yield
end = time()
_benchmark[step] = end - start
def _parse_args():
parser = ArgumentParser()
parser.add_argument(
'--mode',
required=True,
choices=['generate_models', 'transform'])
parser.add_argument('--days', required=True)
parser.add_argument('--input_folder', required=True)
parser.add_argument('--output_folder')
parser.add_argument('--model_size_file')
parser.add_argument('--model_folder', required=True)
parser.add_argument(
'--write_mode',
choices=['overwrite', 'errorifexists'],
default='errorifexists')
parser.add_argument('--frequency_limit')
parser.add_argument('--no_numeric_log_col', action='store_true')
#Support for running in a lower memory environment
parser.add_argument('--low_mem', action='store_true')
parser.add_argument(
'--output_ordering',
choices=['total_random', 'day_random', 'any', 'input'],
default='total_random')
parser.add_argument(
'--output_partitioning',
choices=['day', 'none'],
default='none')
parser.add_argument('--dict_build_shuffle_parallel_per_day', type=int, default=2)
parser.add_argument('--apply_shuffle_parallel_per_day', type=int, default=25)
parser.add_argument('--skew_broadcast_pct', type=float, default=1.0)
parser.add_argument('--debug_mode', action='store_true')
args = parser.parse_args()
start, end = args.days.split('-')
args.day_range = list(range(int(start), int(end) + 1))
args.days = len(args.day_range)
return args
def _main():
args = _parse_args()
spark = SparkSession.builder.getOrCreate()
df = load_raw(spark, args.input_folder, args.day_range)
if args.mode == 'generate_models':
spark.conf.set('spark.sql.shuffle.partitions', args.days * args.dict_build_shuffle_parallel_per_day)
with _timed('generate models'):
col_counts = get_column_counts_with_frequency_limit(df, args.frequency_limit)
if args.low_mem:
# in low memory mode we have to save an intermediate result
# because if we try to do it in one query spark ends up assigning the
# partial ids in two different locations that are not guaranteed to line up
# this prevents that from happening by assigning the partial ids
# and then writeing them out.
save_low_mem_partial_ids(
assign_low_mem_partial_ids(col_counts),
args.model_folder,
args.write_mode)
save_combined_model(
assign_low_mem_final_ids(load_low_mem_partial_ids(spark, args.model_folder)),
args.model_folder,
args.write_mode)
if not args.debug_mode:
delete_low_mem_partial_ids(spark, args.model_folder)
else:
save_combined_model(
assign_id_with_window(col_counts),
args.model_folder,
args.write_mode)
save_column_models(
get_column_models(load_combined_model(spark, args.model_folder)),
args.model_folder,
args.write_mode)
if not args.debug_mode:
delete_combined_model(spark, args.model_folder)
if args.mode == 'transform':
with _timed('transform'):
if args.output_ordering == 'total_random':
df = rand_ordinal(df)
if args.output_partitioning == 'day':
df = day_from_ordinal(df, args.days)
elif args.output_ordering == 'day_random':
df = rand_ordinal(df)
df = day_from_input_file(df)
elif args.output_ordering == 'input':
df = df.withColumn('ordinal', monotonically_increasing_id())
if args.output_partitioning == 'day':
df = day_from_input_file(df)
else: # any ordering
if args.output_partitioning == 'day':
df = day_from_input_file(df)
models = list(load_column_models(spark, args.model_folder, bool(args.model_size_file)))
if args.model_size_file:
save_model_size(
OrderedDict(('_c%d' % i, agg.size) for i, _, agg, _ in models),
args.model_size_file,
args.write_mode)
models = [(i, df, agg.sum, flag) for i, df, agg, flag in models]
df = apply_models(
df,
models,
not args.low_mem,
args.skew_broadcast_pct)
df = transform_log(df, not args.no_numeric_log_col)
if args.output_partitioning == 'day':
partitionBy = 'day'
else:
partitionBy = None
if args.output_ordering == 'total_random':
if args.output_partitioning == 'day':
df = psudo_sort_by_day_plus(spark, df, args.days)
else: # none
# Don't do a full sort it is expensive. Order is random so
# just make it random
df = df.repartition('ordinal').sortWithinPartitions('ordinal')
df = df.drop('ordinal')
elif args.output_ordering == 'day_random':
df = psudo_sort_by_day_plus(spark, df, args.days)
df = df.drop('ordinal')
if args.output_partitioning != 'day':
df = df.drop('day')
elif args.output_ordering == 'input':
if args.low_mem:
# This is the slowest option. We totally messed up the order so we have to put
# it back in the correct order
df = df.orderBy('ordinal')
else:
# Applying the dictionary happened within a single task so we are already really
# close to the correct order, just need to sort within the partition
df = df.sortWithinPartitions('ordinal')
df = df.drop('ordinal')
if args.output_partitioning != 'day':
df = df.drop('day')
# else: any ordering so do nothing the ordering does not matter
df.write.parquet(
args.output_folder,
mode=args.write_mode,
partitionBy=partitionBy)
print('=' * 100)
print(_benchmark)
if __name__ == '__main__':
_main()
|
TensorFlow2/Recommendation/WideAndDeep/triton/deployment_toolkit/triton_inference_runner | triton_inference_runner | http | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from pathlib import Path
from typing import Optional
# pytype: disable=import-error
try:
from tritonclient import utils as client_utils # noqa: F401
except ImportError:
import tritonclientutils as client_utils # noqa: F401
try:
import tritonclient.http as http_client
except (ImportError, RuntimeError):
import tritonhttpclient as http_client
# pytype: enable=import-error
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = Path(__file__).parent.name
from .base import BaseRunner
LOGGER = logging.getLogger("triton_inference_runner.http")
class HTTPInferenceRunner(BaseRunner):
def _parse_content(self, response):
return json.dumps(response, indent=4)
class SyncInferenceRunner(HTTPInferenceRunner):
def __iter__(self):
LOGGER.debug(f"Connecting to {self._server_url}")
client = http_client.InferenceServerClient(
url=self._server_url,
verbose=self._verbose,
connection_timeout=self._response_wait_t,
network_timeout=self._response_wait_t,
)
error = self._verify_triton_state(client)
if error:
raise RuntimeError(f"Could not communicate to Triton Server: {error}")
LOGGER.debug(
f"Triton server {self._server_url} and model {self._model_name}:{self._model_version} " f"are up and ready!"
)
model_config = client.get_model_config(self._model_name, self._model_version)
model_metadata = client.get_model_metadata(self._model_name, self._model_version)
LOGGER.info(f"Model config {self._parse_content(model_config)}")
LOGGER.info(f"Model metadata {self._parse_content(model_metadata)}")
inputs = {tm["name"]: tm for tm in model_metadata["inputs"]}
outputs = {tm["name"]: tm for tm in model_metadata["outputs"]}
output_names = list(outputs)
outputs_req = [http_client.InferRequestedOutput(name) for name in outputs]
for ids, x, y_real in self._dataloader:
infer_inputs = []
for name in inputs:
data = x[name]
datatype = inputs[name]["datatype"]
infer_input = http_client.InferInput(name, data.shape, datatype)
target_np_dtype = client_utils.triton_to_np_dtype(datatype)
data = data.astype(target_np_dtype)
infer_input.set_data_from_numpy(data)
infer_inputs.append(infer_input)
results = client.infer(
model_name=self._model_name,
model_version=self._model_version,
inputs=infer_inputs,
outputs=outputs_req,
timeout=self._response_wait_t_ms,
)
y_pred = {name: results.as_numpy(name) for name in output_names}
yield ids, x, y_pred, y_real
class AsyncInferenceRunner(HTTPInferenceRunner):
DEFAULT_MAX_UNRESP_REQS = 128
def __init__(
self,
server_url: str,
model_name: str,
model_version: str,
*,
dataloader,
verbose=False,
response_wait_time: Optional[float] = None,
max_unresponded_requests: Optional[int] = None,
):
super().__init__(
server_url,
model_name,
model_version,
dataloader=dataloader,
verbose=verbose,
response_wait_time=response_wait_time,
)
self._max_unresp_reqs = (
self.DEFAULT_MAX_UNRESP_REQS if max_unresponded_requests is None else max_unresponded_requests
)
def __iter__(self):
client = http_client.InferenceServerClient(
url=self._server_url,
verbose=self._verbose,
concurrency=self._max_unresp_reqs,
connection_timeout=self._response_wait_t,
network_timeout=self._response_wait_t,
)
self._errors = self._verify_triton_state(client)
if self._errors:
return
LOGGER.debug(
f"Triton server {self._server_url} and model {self._model_name}:{self._model_version} " f"are up and ready!"
)
model_config = client.get_model_config(self._model_name, self._model_version)
model_metadata = client.get_model_metadata(self._model_name, self._model_version)
LOGGER.info(f"Model config {self._parse_content(model_config)}")
LOGGER.info(f"Model metadata {self._parse_content(model_metadata)}")
inputs = {tm["name"]: tm for tm in model_metadata["inputs"]}
outputs = {tm["name"]: tm for tm in model_metadata["outputs"]}
output_names = list(outputs)
async_requests = []
for ids, x, y_real in self._dataloader:
infer_inputs = []
for name in inputs:
data = x[name]
datatype = inputs[name]["datatype"]
infer_input = http_client.InferInput(name, data.shape, datatype)
target_np_dtype = client_utils.triton_to_np_dtype(datatype)
data = data.astype(target_np_dtype)
infer_input.set_data_from_numpy(data)
infer_inputs.append(infer_input)
outputs_req = [http_client.InferRequestedOutput(name) for name in outputs]
request_id = str(ids[0])
async_request = client.async_infer(
model_name=self._model_name,
model_version=self._model_version,
inputs=infer_inputs,
outputs=outputs_req,
request_id=request_id,
timeout=self._response_wait_t_ms,
)
async_requests.append((ids, x, y_real, async_request))
if len(async_requests) > self._max_unresp_reqs:
yield from self._yield_response(async_requests, output_names)
async_requests = []
yield from self._yield_response(async_requests, output_names)
LOGGER.debug("Finished request thread")
def _yield_response(self, async_requests, output_names):
for ids, x, y_real, async_response in async_requests:
result = async_response.get_result()
y_pred = {name: result.as_numpy(name) for name in output_names}
yield ids, x, y_pred, y_real
|
PyTorch/SpeechRecognition/Jasper/common/text | text | symbols | # Copyright (c) 2017 Keith Ito
""" from https://github.com/keithito/tacotron """
'''
Defines the set of symbols used in text input to the model.
The default is a set of ASCII characters that works well for English or text that has been run through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details. '''
from . import cmudict
_pad = '_'
_punctuation = '!\'(),.:;? '
_special = '-'
_letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters):
_arpabet = ['@' + s for s in cmudict.valid_symbols]
# Export all symbols:
symbols = [_pad] + list(_special) + list(_punctuation) + list(_letters) + _arpabet
|
PyTorch/Recommendation/NCF | NCF | transcode | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from argparse import ArgumentParser
import os
import torch
import pandas as pd
from feature_spec import FeatureSpec
from neumf_constants import USER_CHANNEL_NAME, ITEM_CHANNEL_NAME, LABEL_CHANNEL_NAME
def parse_args():
parser = ArgumentParser()
parser.add_argument('--path', type=str, default='',
help='Path to input data directory')
parser.add_argument('--feature_spec_in', type=str, default='feature_spec.yaml',
help='Name of the input feature specification file, or path relative to data directory.')
parser.add_argument('--output', type=str, default='/data',
help='Path to output data directory')
parser.add_argument('--feature_spec_out', type=str, default='feature_spec.yaml',
help='Name of the output feature specification file, or path relative to data directory.')
return parser.parse_args()
def main():
args = parse_args()
args_output = args.output
args_path = args.path
args_feature_spec_in = args.feature_spec_in
args_feature_spec_out = args.feature_spec_out
feature_spec_path = os.path.join(args_path, args_feature_spec_in)
feature_spec = FeatureSpec.from_yaml(feature_spec_path)
# Only three features are transcoded - this is NCF specific
user_feature_name = feature_spec.channel_spec[USER_CHANNEL_NAME][0]
item_feature_name = feature_spec.channel_spec[ITEM_CHANNEL_NAME][0]
label_feature_name = feature_spec.channel_spec[LABEL_CHANNEL_NAME][0]
categorical_features = [user_feature_name, item_feature_name]
found_cardinalities = {f: 0 for f in categorical_features}
new_source_spec = {}
for mapping_name, mapping in feature_spec.source_spec.items():
# Load all chunks and link into one df
chunk_dfs = []
for chunk in mapping:
assert chunk['type'] == 'csv', "Only csv files supported in this transcoder"
file_dfs = []
for file in chunk['files']:
path_to_load = os.path.join(feature_spec.base_directory, file)
file_dfs.append(pd.read_csv(path_to_load, header=None))
chunk_df = pd.concat(file_dfs, ignore_index=True)
chunk_df.columns = chunk['features']
chunk_df.reset_index(drop=True, inplace=True)
chunk_dfs.append(chunk_df)
mapping_df = pd.concat(chunk_dfs, axis=1) # This takes care of making sure feature names are unique
for feature in categorical_features:
mapping_cardinality = mapping_df[feature].max() + 1
previous_cardinality = found_cardinalities[feature]
found_cardinalities[feature] = max(previous_cardinality, mapping_cardinality)
# We group together users and items, while separating labels. This is because of the target dtypes: ids are int,
# while labels are float to compute loss.
ints_tensor = torch.from_numpy(mapping_df[[user_feature_name, item_feature_name]].values).long()
ints_file = f"{mapping_name}_data_0.pt"
ints_chunk = {"type": "torch_tensor",
"features": [user_feature_name, item_feature_name],
"files": [ints_file]}
torch.save(ints_tensor, os.path.join(args_output, ints_file))
floats_tensor = torch.from_numpy(mapping_df[[label_feature_name]].values).float()
floats_file = f"{mapping_name}_data_1.pt"
floats_chunk = {"type": "torch_tensor",
"features": [label_feature_name],
"files": [floats_file]}
torch.save(floats_tensor, os.path.join(args_output, floats_file))
new_source_spec[mapping_name] = [ints_chunk, floats_chunk]
for feature in categorical_features:
found_cardinality = found_cardinalities[feature]
declared_cardinality = feature_spec.feature_spec[feature].get('cardinality', 'auto')
if declared_cardinality != "auto":
declared = int(declared_cardinality)
assert declared >= found_cardinality, "Specified cardinality conflicts data"
found_cardinalities[feature] = declared
new_inner_feature_spec = {
user_feature_name: {
"dtype": "torch.int64",
"cardinality": int(found_cardinalities[user_feature_name])
},
item_feature_name: {
"dtype": "torch.int64",
"cardinality": int(found_cardinalities[item_feature_name])
},
label_feature_name: {
"dtype": "torch.float32"
}
}
new_feature_spec = FeatureSpec(feature_spec=new_inner_feature_spec,
source_spec=new_source_spec,
channel_spec=feature_spec.channel_spec,
metadata=feature_spec.metadata,
base_directory="")
feature_spec_save_path = os.path.join(args_output, args_feature_spec_out)
new_feature_spec.to_yaml(output_path=feature_spec_save_path)
if __name__ == '__main__':
main()
|
PyTorch/LanguageModeling/BERT/triton | triton | metrics | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import subprocess
import sys
from argparse import Namespace
from typing import Any, Dict, List, Optional
import numpy as np
from run_squad import RawResult, convert_examples_to_features, get_answers, read_squad_examples
from tokenization import BertTokenizer
#
from triton.deployment_toolkit.core import BaseMetricsCalculator
class MetricsCalculator(BaseMetricsCalculator):
def __init__(
self,
eval_script: str = "data/squad/v1.1/evaluate-v1.1.py",
predict_file: str = "",
output_dir: str = "./",
n_best_size: int = 20,
max_answer_length: int = 30,
version_2_with_negative: bool = False,
max_seq_length: int = 384,
doc_stride: int = 128,
max_query_length: int = 64,
vocab_file: str = "",
do_lower_case: bool = True,
max_len: int = 512,
):
tokenizer = BertTokenizer(vocab_file, do_lower_case=do_lower_case, max_len=max_len) # for bert large
self.eval_examples = read_squad_examples(
input_file=predict_file, is_training=False, version_2_with_negative=version_2_with_negative
)
self.eval_features = convert_examples_to_features(
examples=self.eval_examples,
tokenizer=tokenizer,
max_seq_length=max_seq_length,
doc_stride=doc_stride,
max_query_length=max_query_length,
is_training=False,
)
self.output_dir = output_dir
self.eval_script = eval_script
self.predict_file = predict_file
args = Namespace(
version_2_with_negative=version_2_with_negative,
n_best_size=n_best_size,
max_answer_length=max_answer_length,
verbose_logging=False,
do_lower_case=do_lower_case,
)
self.args = args
self.all_results: List[RawResult] = []
def _calc(self) -> Dict[str, float]:
dataset_size = len(self.eval_features)
self.all_results = self.all_results[:dataset_size]
output_prediction_file = os.path.join(self.output_dir, "predictions.json")
answers, _ = get_answers(self.eval_examples, self.eval_features, self.all_results, self.args)
with open(output_prediction_file, "w") as f:
f.write(json.dumps(answers, indent=4) + "\n")
eval_out = subprocess.check_output(
[sys.executable, self.eval_script, self.predict_file, output_prediction_file]
)
scores = str(eval_out).strip()
# exact_match = float(scores.split(":")[1].split(",")[0])
f1 = float(scores.split(":")[2].split("}")[0])
return {"f1": f1}
def update(
self,
ids: List[Any],
y_pred: Dict[str, np.ndarray],
x: Optional[Dict[str, np.ndarray]],
y_real: Optional[Dict[str, np.ndarray]],
):
start_logits = y_pred["output__0"]
end_logits = y_pred["output__1"]
for unique_id, start_logit, end_logit in zip(ids, start_logits, end_logits):
start_logit = start_logit.tolist()
end_logit = end_logit.tolist()
raw_result = RawResult(unique_id=unique_id, start_logits=start_logit, end_logits=end_logit)
self.all_results.append(raw_result)
@property
def metrics(self) -> Dict[str, float]:
return self._calc()
|
Tools/PyTorch/TimeSeriesPredictionPlatform/triton | triton | export_model | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import logging
import os
from pathlib import Path
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
os.environ["TF_ENABLE_DEPRECATION_WARNINGS"] = "1"
# method from PEP-366 to support relative import in executed modules
if __name__ == "__main__" and __package__ is None:
__package__ = Path(__file__).parent.name
from .deployment_toolkit.args import ArgParserGenerator # noqa: E402 module level import not at top of file
from .deployment_toolkit.core import ( # noqa: E402 module level import not at top of file
DATALOADER_FN_NAME,
BaseLoader,
BaseSaver,
Format,
load_from_file,
)
from .deployment_toolkit.extensions import loaders, savers # noqa: E402 module level import not at top of file
LOGGER = logging.getLogger("export_model")
INPUT_MODEL_TYPES = [Format.TF_ESTIMATOR, Format.TF_KERAS, Format.PYT]
OUTPUT_MODEL_TYPES = [Format.TF_SAVEDMODEL, Format.TS_TRACE, Format.TS_SCRIPT, Format.ONNX]
def _get_args():
parser = argparse.ArgumentParser(
description="Script for exporting models from supported frameworks.", allow_abbrev=False
)
parser.add_argument("--input-path", help="Path to input python module", required=True)
parser.add_argument(
"--input-type", help="Input model type", choices=[f.value for f in INPUT_MODEL_TYPES], required=True
)
parser.add_argument("--output-path", help="Path to output model file", required=True)
parser.add_argument(
"--output-type", help="Output model type", choices=[f.value for f in OUTPUT_MODEL_TYPES], required=True
)
parser.add_argument("--dataloader", help="Path to python module containing data loader")
parser.add_argument("-v", "--verbose", help="Verbose logs", action="store_true", default=False)
parser.add_argument(
"--ignore-unknown-parameters",
help="Ignore unknown parameters (argument often used in CI where set of arguments is constant)",
action="store_true",
default=False,
)
args, unparsed_args = parser.parse_known_args()
Loader: BaseLoader = loaders.get(args.input_type)
ArgParserGenerator(Loader, module_path=args.input_path).update_argparser(parser)
if args.input_type == Format.PYT.value and args.output_type == Format.ONNX.value:
saver_type = f"{Format.PYT.value}--{Format.ONNX.value}"
else:
saver_type = args.output_type
Saver: BaseSaver = savers.get(saver_type)
ArgParserGenerator(Saver).update_argparser(parser)
if args.dataloader is not None:
get_dataloader_fn = load_from_file(args.dataloader, label="dataloader", target=DATALOADER_FN_NAME)
ArgParserGenerator(get_dataloader_fn).update_argparser(parser)
if args.ignore_unknown_parameters:
args, unknown_args = parser.parse_known_args()
LOGGER.warning(f"Got additional args {unknown_args}")
else:
args = parser.parse_args()
return args
def main():
args = _get_args()
log_level = logging.INFO if not args.verbose else logging.DEBUG
log_format = "%(asctime)s %(levelname)s %(name)s %(message)s"
logging.basicConfig(level=log_level, format=log_format)
LOGGER.info("args:")
for key, value in vars(args).items():
LOGGER.info(f" {key} = {value}")
dataloader_fn = None
if args.dataloader is not None:
get_dataloader_fn = load_from_file(args.dataloader, label="dataloader", target=DATALOADER_FN_NAME)
dataloader_fn = ArgParserGenerator(get_dataloader_fn).from_args(args)
Loader: BaseLoader = loaders.get(args.input_type)
loader = ArgParserGenerator(Loader, module_path=args.input_path).from_args(args)
model = loader.load(args.input_path, dataloader_fn=dataloader_fn, output_type=args.output_type)
LOGGER.info("inputs: %s", model.inputs)
LOGGER.info("outputs: %s", model.outputs)
if args.input_type == Format.PYT.value and args.output_type == Format.ONNX.value:
saver_type = f"{Format.PYT.value}--{Format.ONNX.value}"
else:
saver_type = args.output_type
Saver: BaseSaver = savers.get(saver_type)
saver = ArgParserGenerator(Saver).from_args(args)
saver.save(model, args.output_path, dataloader_fn)
if __name__ == "__main__":
main()
|
Tools/PyTorch/TimeSeriesPredictionPlatform | TimeSeriesPredictionPlatform | launch_training | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
import os
import hydra
from omegaconf import OmegaConf
import torch
import conf.conf_utils
from distributed_utils import is_main_process, init_distributed, init_parallel
from training.utils import set_seed, get_optimization_objectives
from loggers.log_helper import log_parameters
warnings.filterwarnings("ignore")
@hydra.main(config_path="conf", config_name="train_config")
def main(config):
trainer_type = config.trainer._target_
set_seed(config.get("seed", None))
model = hydra.utils.instantiate(config.model)
train, valid, test = hydra.utils.call(config.dataset)
evaluator = hydra.utils.instantiate(config.evaluator, test_data=test)
if 'CTLTrainer' in trainer_type:
init_parallel()
init_distributed()
model = model.to(device=config.model.config.device)
trainer = hydra.utils.instantiate(
config.trainer,
optimizer={'params': model.parameters()},
model=model,
train_dataset=train,
valid_dataset=valid,
)
log_parameters(trainer.logger, config)
trainer.train()
if is_main_process():
checkpoint = torch.load("best_checkpoint.zip", map_location=evaluator.device)
model.load_state_dict(checkpoint["model_state_dict"])
preds, labels, ids, weights = evaluator.predict(model)
eval_metrics = evaluator.evaluate(preds, labels, ids, weights)
trainer.logger.log(step=[], data=eval_metrics, verbosity=0)
trainer.logger.flush()
del train, valid, test, model, trainer
torch.cuda.empty_cache()
objectives = get_optimization_objectives(config, eval_metrics)
return objectives
elif 'XGBTrainer' in trainer_type or "StatTrainer" in trainer_type:
del config.trainer.criterion
trainer = hydra.utils.instantiate(
config.trainer,
model=model,
train_dataset=train,
valid_dataset=valid,
)
trainer.train()
preds, labels, ids, weights = evaluator.predict(model)
eval_metrics = evaluator.evaluate(preds, labels, ids, weights)
trainer.logger.log(step=[], data=eval_metrics, verbosity=0)
objectives = get_optimization_objectives(config, eval_metrics)
return objectives
else:
raise AttributeError(f"Not supported Trainer provided {trainer_type}")
if __name__ == "__main__":
main()
|
PyTorch/SpeechSynthesis/FastPitch/common/text/zh | zh | pinyin_dict | NIN N IN
FA F A
BAI B AI
YIN Y IN
DE D E
SHEN SH EN
TAN T AN
PAO P AO
WENG W ENG
LAN L AN
CHUAN CH U AN
SEI S EI
DANG D ANG
XUE X VE
YUAN Y V AN
HU H U
CUAN C U AN
BO B O
SHAI SH AI
CHUI CH UI
SHOU SH OU
QIU Q IU
SONG S ONG
KAI K AI
LING L ING
SUO S U O
ZHUAI ZH U AI
ZHEN ZH EN
GENG G ENG
YAN Y AN
CU C U
ZHUA ZH U A
MA M A
SOU S OU
GOU G OU
PU P U
GUA G U A
RONG R ONG
JIAN J I AN
FOU F OU
FO F O
ZHUAN ZH U AN
DIU D IU
TIAN T I AN
QUN Q VN
NE N E
LIN L IN
QIE Q IE
LANG L ANG
CAO C AO
PANG P ANG
GAN G AN
KUI K UI
ROU R OU
NING N ING
NOU N OU
CUI C UI
NA N A
MING M ING
JUAN J V AN
NIAN N I AN
JIONG J I ONG
LE L E
GEN G EN
CHUO CH U O
SANG S ANG
MANG M ANG
GANG G ANG
SHENG SH ENG
KENG K ENG
ANG ^ ANG
ZHONG ZH ONG
PEI P EI
LO L O
BEN B EN
SAN S AN
WAI W AI
BA B A
ZEI Z EI
BANG B ANG
MENG M ENG
HA H A
SHAO SH AO
RENG R ENG
XUAN X V AN
GUAI G U AI
QUAN Q V AN
DIE D IE
CEN C EN
QIONG Q I ONG
QIAO Q I AO
NAN N AN
CANG C ANG
NANG N ANG
LA L A
KU K U
KAO K AO
XI X I
MO M O
CHAN CH AN
DUO D U O
DIAO D I AO
HUN H UN
LOU L OU
HANG H ANG
CENG C ENG
ZHI ZH I
RUAN R U AN
QIANG Q I ANG
MIU M IU
WO W O
GEI G EI
EI ^ EI
CHAI CH AI
ZHUI ZH UI
CHU CH U
YONG Y ONG
SHUO SH U O
DING D ING
CHE CH E
YO Y O
PENG P ENG
RANG R ANG
BU B U
NIU N IU
KE K E
MI M I
GUAN G U AN
RE R E
NI N I
TI T I
DIA D I A
NUO N U O
WANG W ANG
QIAN Q I AN
LUO L U O
YA Y A
CI C I
GUN G UN
GAO G AO
DOU D OU
DAI D AI
BAO B AO
BIN B IN
NAI N AI
SE S E
PA P A
ZAO Z AO
AO ^ AO
NIE N IE
BENG B ENG
ZHU ZH U
JU J V
XIU X IU
XIAN X I AN
RUI R UI
SAI S AI
SHUANG SH U ANG
SHUAI SH U AI
HEN H EN
OU ^ OU
HUA H U A
LONG L ONG
ZI Z I
SHE SH E
JUN J VN
YE Y E
TUI T UI
GUANG G U ANG
MAN M AN
LAI L AI
ZHUN ZH UN
CHUANG CH U ANG
ZUI Z UI
SU S U
TE T E
TAO T AO
CONG C ONG
TONG T ONG
HENG H ENG
ZUO Z U O
LU L U
BAN B AN
PIAO P I AO
XIANG X I ANG
LIANG L I ANG
ZU Z U
NIANG N I ANG
LIU L IU
BIE B IE
CHA CH A
YANG Y ANG
LVE L VE
LENG L ENG
KOU K OU
AN ^ AN
CHUN CH UN
ZAI Z AI
DONG D ONG
SHI SH I
CHAO CH AO
ZHAI ZH AI
RI R I
HUAI H U AI
TOU T OU
SENG S ENG
GUO G U O
NENG N ENG
ZUN Z UN
XIONG X I ONG
ZEN Z EN
TANG T ANG
BIAN B I AN
QU Q V
QI Q I
ZHAN ZH AN
JIAO J I AO
CHENG CH ENG
CHONG CH ONG
KEI K EI
MEI M EI
LV L V
SHUA SH U A
CA C A
DENG D ENG
TING T ING
YAO Y AO
TIAO T I AO
ME M E
CE C E
ZUAN Z U AN
SEN S EN
O ^ O
ZENG Z ENG
RAO R AO
WEI W EI
KUAN K U AN
PING P ING
MAI M AI
HUAN H U AN
DEN D EN
BING B ING
QING Q ING
PIN P IN
GAI G AI
LI L I
ZHENG ZH ENG
ZAN Z AN
BEI B EI
SHU SH U
MU M U
KUO K U O
JIE J IE
CHUAI CH U AI
FAN F AN
PI P I
SHUI SH UI
YING Y ING
QIN Q IN
SHA SH A
KANG K ANG
CHEN CH EN
JIANG J I ANG
RAN R AN
LUAN L U AN
HEI H EI
XING X ING
WAN W AN
TA T A
XU X V
TENG T ENG
ZA Z A
KEN K EN
DAN D AN
TU T U
KUANG K U ANG
JING J ING
REN R EN
CHOU CH OU
KUA K U A
HE H E
DAO D AO
NEI N EI
KUAI K U AI
HAO H AO
MIAO M I AO
YI Y I
ZHAO ZH AO
TUO T U O
ZHEI ZH EI
FU F U
FEN F EN
JIA J I A
WA W A
CUO C U O
WU W U
MEN M EN
XUN X VN
MOU M OU
SHAN SH AN
PAI P AI
GONG G ONG
NONG N ONG
COU C OU
KONG K ONG
HUO H U O
HUANG H U ANG
JIU J IU
HONG H ONG
MIE M IE
HUI H UI
WEN W EN
ZHUO ZH U O
MIAN M I AN
BI B I
ZE Z E
YUN Y VN
GA G A
SUAN S U AN
SUN S UN
MAO M AO
XIA X I A
KA K A
NAO N AO
TIE T IE
GE G E
GUI G UI
LAO L AO
ZOU Z OU
SAO S AO
PO P O
JIN J IN
DUAN D U AN
DU D U
RUN R UN
YUE Y VE
DUN D UN
A ^ A
PIE P IE
SHANG SH ANG
XIN X IN
CAN C AN
PAN P AN
LIE L IE
QIA Q I A
GU G U
ZHE ZH E
ZONG Z ONG
DIAN D I AN
LIA L I A
FENG F ENG
JUE J VE
LIAO L I AO
SA S A
TAI T AI
LEI L EI
SHUN SH UN
HAI H AI
NEN N EN
MIN M IN
PIAN P I AN
CHI CH I
CHANG CH ANG
NIAO N I AO
JI J I
TEI T EI
FANG F ANG
POU P OU
QUE Q VE
ZHOU ZH OU
NV N V
ER ^ ER
YU Y V
XIE X IE
FAI F AI
EN ^ EN
NVE N VE
KAN K AN
LUN L UN
ZHUANG ZH U ANG
HAN H AN
NG N EN
DI D I
SHEI SH EI
RUO R U O
KUN K UN
DUI D UI
TUAN T U AN
ZANG Z ANG
CUN C UN
YOU Y OU
SUI S UI
DEI D EI
RU R U
NU N U
ZHANG ZH ANG
BIAO B I AO
NUAN N U AN
SHUAN SH U AN
XIAO X I AO
TUN T UN
E ^ E
SI S I
HOU H OU
FEI F EI
ZHA ZH A
CAI C AI
KIU K IU
DA D A
PEN P EN
LIAN L I AN
AI ^ AI
|
CUDA-Optimized/FastSpeech/tacotron2 | tacotron2 | utils | # BSD 3-Clause License
# Copyright (c) 2018-2020, NVIDIA Corporation
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""https://github.com/NVIDIA/tacotron2"""
import numpy as np
from scipy.io.wavfile import read
import torch
def get_mask_from_lengths(lengths):
max_len = torch.max(lengths).item()
ids = torch.arange(0, max_len, out=torch.cuda.LongTensor(max_len))
mask = (ids < lengths.unsqueeze(1)).bool()
return mask
def load_wav_to_torch(full_path):
sampling_rate, data = read(full_path)
return torch.FloatTensor(data.astype(np.float32)), sampling_rate
def load_filepaths_and_text(filename, split="|"):
with open(filename, encoding='utf-8') as f:
filepaths_and_text = [line.strip().split(split) for line in f]
return filepaths_and_text
def to_gpu(x):
x = x.contiguous()
if torch.cuda.is_available():
x = x.cuda(non_blocking=True)
return torch.autograd.Variable(x)
|
PyTorch/LanguageModeling/BERT/distillation/BERT_6L_768D | BERT_6L_768D | config | {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"max_position_embeddings": 512,
"num_attention_heads": 12,
"num_hidden_layers": 6,
"type_vocab_size": 2,
"vocab_size": 30528
}
|
PyTorch/LanguageModeling/BART/bart/tokenization | tokenization | tokenization_xlnet | # coding=utf-8
# Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
# Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tokenization classes for XLNet model."""
import os
import unicodedata
from shutil import copyfile
from typing import List, Optional
from bart.tokenization.tokenization_utils import PreTrainedTokenizer
from utils import logging
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {"vocab_file": "spiece.model"}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"xlnet-base-cased": "https://s3.amazonaws.com/models.huggingface.co/bert/xlnet-base-cased-spiece.model",
"xlnet-large-cased": "https://s3.amazonaws.com/models.huggingface.co/bert/xlnet-large-cased-spiece.model",
}
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"xlnet-base-cased": None,
"xlnet-large-cased": None,
}
SPIECE_UNDERLINE = "▁"
# Segments (not really needed)
SEG_ID_A = 0
SEG_ID_B = 1
SEG_ID_CLS = 2
SEG_ID_SEP = 3
SEG_ID_PAD = 4
class XLNetTokenizer(PreTrainedTokenizer):
"""
Constructs an XLNet tokenizer. Based on `SentencePiece <https://github.com/google/sentencepiece>`__
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizer` which contains most of the methods. Users
should refer to the superclass for more information regarding methods.
Args:
vocab_file (:obj:`string`):
`SentencePiece <https://github.com/google/sentencepiece>`__ file (generally has a .spm extension) that
contains the vocabulary necessary to instantiate a tokenizer.
do_lower_case (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether to lowercase the input when tokenizing.
remove_space (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether to strip the text when tokenizing (removing excess spaces before and after the string).
keep_accents (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether to keep accents when tokenizing.
bos_token (:obj:`string`, `optional`, defaults to "<s>"):
The beginning of sequence token that was used during pre-training. Can be used a sequence classifier token.
.. note::
When building a sequence using special tokens, this is not the token that is used for the beginning
of sequence. The token used is the :obj:`cls_token`.
eos_token (:obj:`string`, `optional`, defaults to "</s>"):
The end of sequence token.
.. note::
When building a sequence using special tokens, this is not the token that is used for the end
of sequence. The token used is the :obj:`sep_token`.
unk_token (:obj:`string`, `optional`, defaults to "<unk>"):
The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this
token instead.
sep_token (:obj:`string`, `optional`, defaults to "<sep>"):
The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences
for sequence classification or for a text and a question for question answering.
It is also used as the last token of a sequence built with special tokens.
pad_token (:obj:`string`, `optional`, defaults to "<pad>"):
The token used for padding, for example when batching sequences of different lengths.
cls_token (:obj:`string`, `optional`, defaults to "<cls>"):
The classifier token which is used when doing sequence classification (classification of the whole
sequence instead of per-token classification). It is the first token of the sequence when built with
special tokens.
mask_token (:obj:`string`, `optional`, defaults to "<mask>"):
The token used for masking values. This is the token used when training this model with masked language
modeling. This is the token which the model will try to predict.
additional_special_tokens (:obj:`List[str]`, `optional`, defaults to :obj:`["<eop>", "<eod>"]`):
Additional special tokens used by the tokenizer.
Attributes:
sp_model (:obj:`SentencePieceProcessor`):
The `SentencePiece` processor that is used for every conversion (string, tokens and IDs).
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
padding_side = "left"
def __init__(
self,
vocab_file,
do_lower_case=False,
remove_space=True,
keep_accents=False,
bos_token="<s>",
eos_token="</s>",
unk_token="<unk>",
sep_token="<sep>",
pad_token="<pad>",
cls_token="<cls>",
mask_token="<mask>",
additional_special_tokens=["<eop>", "<eod>"],
**kwargs
):
super().__init__(
bos_token=bos_token,
eos_token=eos_token,
unk_token=unk_token,
sep_token=sep_token,
pad_token=pad_token,
cls_token=cls_token,
mask_token=mask_token,
additional_special_tokens=additional_special_tokens,
**kwargs,
)
self._pad_token_type_id = 3
try:
import sentencepiece as spm
except ImportError:
logger.warning(
"You need to install SentencePiece to use XLNetTokenizer: https://github.com/google/sentencepiece"
"pip install sentencepiece"
)
raise
self.do_lower_case = do_lower_case
self.remove_space = remove_space
self.keep_accents = keep_accents
self.vocab_file = vocab_file
self.sp_model = spm.SentencePieceProcessor()
self.sp_model.Load(vocab_file)
@property
def vocab_size(self):
return len(self.sp_model)
def get_vocab(self):
vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)}
vocab.update(self.added_tokens_encoder)
return vocab
def __getstate__(self):
state = self.__dict__.copy()
state["sp_model"] = None
return state
def __setstate__(self, d):
self.__dict__ = d
try:
import sentencepiece as spm
except ImportError:
logger.warning(
"You need to install SentencePiece to use XLNetTokenizer: https://github.com/google/sentencepiece"
"pip install sentencepiece"
)
raise
self.sp_model = spm.SentencePieceProcessor()
self.sp_model.Load(self.vocab_file)
def preprocess_text(self, inputs):
if self.remove_space:
outputs = " ".join(inputs.strip().split())
else:
outputs = inputs
outputs = outputs.replace("``", '"').replace("''", '"')
if not self.keep_accents:
outputs = unicodedata.normalize("NFKD", outputs)
outputs = "".join([c for c in outputs if not unicodedata.combining(c)])
if self.do_lower_case:
outputs = outputs.lower()
return outputs
def _tokenize(self, text, sample=False):
""" Tokenize a string. """
text = self.preprocess_text(text)
if not sample:
pieces = self.sp_model.EncodeAsPieces(text)
else:
pieces = self.sp_model.SampleEncodeAsPieces(text, 64, 0.1)
new_pieces = []
for piece in pieces:
if len(piece) > 1 and piece[-1] == str(",") and piece[-2].isdigit():
cur_pieces = self.sp_model.EncodeAsPieces(piece[:-1].replace(SPIECE_UNDERLINE, ""))
if piece[0] != SPIECE_UNDERLINE and cur_pieces[0][0] == SPIECE_UNDERLINE:
if len(cur_pieces[0]) == 1:
cur_pieces = cur_pieces[1:]
else:
cur_pieces[0] = cur_pieces[0][1:]
cur_pieces.append(piece[-1])
new_pieces.extend(cur_pieces)
else:
new_pieces.append(piece)
return new_pieces
def _convert_token_to_id(self, token):
""" Converts a token (str) in an id using the vocab. """
return self.sp_model.PieceToId(token)
def _convert_id_to_token(self, index):
"""Converts an index (integer) in a token (str) using the vocab."""
return self.sp_model.IdToPiece(index)
def convert_tokens_to_string(self, tokens):
"""Converts a sequence of tokens (strings for sub-words) in a single string."""
out_string = "".join(tokens).replace(SPIECE_UNDERLINE, " ").strip()
return out_string
def build_inputs_with_special_tokens(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Build model inputs from a sequence or a pair of sequence for sequence classification tasks
by concatenating and adding special tokens.
An XLNet sequence has the following format:
- single sequence: ``X <sep> <cls>``
- pair of sequences: ``A <sep> B <sep> <cls>``
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs to which the special tokens will be added
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: list of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
"""
sep = [self.sep_token_id]
cls = [self.cls_token_id]
if token_ids_1 is None:
return token_ids_0 + sep + cls
return token_ids_0 + sep + token_ids_1 + sep + cls
def get_special_tokens_mask(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False
) -> List[int]:
"""
Retrieves sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer ``prepare_for_model`` methods.
Args:
token_ids_0 (:obj:`List[int]`):
List of ids.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`):
Set to True if the token list is already formatted with special tokens for the model
Returns:
:obj:`List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token.
"""
if already_has_special_tokens:
if token_ids_1 is not None:
raise ValueError(
"You should not supply a second sequence if the provided sequence of "
"ids is already formated with special tokens for the model."
)
return list(map(lambda x: 1 if x in [self.sep_token_id, self.cls_token_id] else 0, token_ids_0))
if token_ids_1 is not None:
return ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1, 1]
return ([0] * len(token_ids_0)) + [1, 1]
def create_token_type_ids_from_sequences(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Creates a mask from the two sequences passed to be used in a sequence-pair classification task.
An XLNet sequence pair mask has the following format:
0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 2
| first sequence | second sequence | CLS segment ID
if token_ids_1 is None, only returns the first portion of the mask (0's).
Args:
token_ids_0 (:obj:`List[int]`):
List of ids.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `token type IDs <../glossary.html#token-type-ids>`_ according to the given
sequence(s).
"""
sep = [self.sep_token_id]
cls_segment_id = [2]
if token_ids_1 is None:
return len(token_ids_0 + sep) * [0] + cls_segment_id
return len(token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] + cls_segment_id
def save_vocabulary(self, save_directory):
"""
Save the sentencepiece vocabulary (copy original file) and special tokens file to a directory.
Args:
save_directory (:obj:`str`):
The directory in which to save the vocabulary.
Returns:
:obj:`Tuple(str)`: Paths to the files saved.
"""
if not os.path.isdir(save_directory):
logger.error("Vocabulary path ({}) should be a directory".format(save_directory))
return
out_vocab_file = os.path.join(save_directory, VOCAB_FILES_NAMES["vocab_file"])
if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file):
copyfile(self.vocab_file, out_vocab_file)
return (out_vocab_file,) |
TensorFlow/Classification/ConvNets/dataprep | dataprep | imagenet_2012_validation_synset_labels | n01751748
n09193705
n02105855
n04263257
n03125729
n01735189
n02346627
n02776631
n03794056
n02328150
n01917289
n02125311
n02484975
n04065272
n03496892
n02066245
n01914609
n01616318
n02971356
n03126707
n02346627
n02091244
n07742313
n03956157
n01616318
n04380533
n02114548
n02089973
n01729977
n04435653
n02280649
n03444034
n02077923
n09835506
n03478589
n04532106
n01644900
n02666196
n04141327
n01773797
n03125729
n04049303
n02006656
n02097209
n02111277
n03950228
n03393912
n02089973
n03930630
n02640242
n01828970
n01632777
n04372370
n03485794
n02443114
n02930766
n02112018
n13040303
n04485082
n03482405
n02963159
n02093859
n01910747
n01693334
n04371430
n02526121
n01871265
n04532106
n04482393
n04370456
n02927161
n02074367
n01608432
n02966193
n01795545
n02791270
n02087394
n02116738
n02091635
n02895154
n09193705
n02088094
n04200800
n01737021
n02974003
n03032252
n02483708
n01632458
n02992529
n01698640
n02114548
n02497673
n02480855
n04147183
n02487347
n03895866
n02325366
n02033041
n07745940
n02415577
n02951585
n02087394
n04485082
n04505470
n02097658
n04591157
n01770081
n02992211
n03691459
n03594734
n01983481
n03937543
n02105412
n03843555
n02091244
n07831146
n03710637
n03733281
n03782006
n03733131
n03933933
n02980441
n04409515
n02606052
n02226429
n02883205
n02422699
n01614925
n07697537
n02123394
n04252077
n03337140
n02117135
n02107142
n04037443
n02397096
n03187595
n02319095
n07932039
n03372029
n02088466
n02319095
n04125021
n03954731
n09421951
n04487394
n02113624
n03843555
n03485407
n09332890
n03642806
n03710193
n01677366
n01950731
n07714990
n02114855
n02119022
n04086273
n04201297
n03733281
n02100877
n03016953
n03733805
n03063599
n07714990
n03854065
n04149813
n03786901
n03467068
n02087046
n04326547
n02100735
n03775546
n02111500
n02814533
n02097047
n02027492
n02109961
n02389026
n02105855
n02445715
n03259280
n07711569
n03710637
n03670208
n02128757
n04467665
n02114855
n01873310
n03476684
n02093428
n03891251
n02859443
n04125021
n01978287
n02643566
n07697537
n01560419
n03290653
n13037406
n03891332
n02883205
n02106382
n02672831
n04330267
n02489166
n02058221
n03584829
n07565083
n03125729
n02123597
n04536866
n02965783
n09428293
n02965783
n11879895
n01560419
n01775062
n03595614
n02110958
n03709823
n03777754
n02951585
n02100877
n01629819
n02909870
n02101388
n02091244
n01667114
n03998194
n01986214
n04192698
n02128757
n02793495
n09256479
n01443537
n02089973
n01981276
n02837789
n03888605
n03201208
n02480855
n03814639
n04090263
n01986214
n02415577
n01534433
n02093256
n03134739
n03016953
n12620546
n03937543
n02815834
n03776460
n10565667
n03207743
n02992529
n01631663
n03729826
n04033995
n04462240
n01443537
n02091831
n03874293
n03874599
n04238763
n07584110
n02749479
n02110185
n09193705
n04311004
n02788148
n02445715
n06874185
n04074963
n01631663
n03803284
n01828970
n02096437
n04554684
n03599486
n03595614
n02123394
n04515003
n04591157
n04560804
n02794156
n03344393
n02687172
n04328186
n04479046
n03967562
n01440764
n04465501
n03457902
n04532670
n01688243
n01749939
n01768244
n02091831
n02321529
n02939185
n02129604
n12985857
n03485794
n02408429
n01443537
n03590841
n07697537
n04154565
n03443371
n02514041
n09468604
n03769881
n02787622
n02526121
n03888605
n01622779
n01872401
n07745940
n03085013
n02445715
n02120505
n01751748
n04141327
n02443484
n02089078
n01608432
n01514668
n03160309
n04070727
n07715103
n02110958
n03976657
n03902125
n02909870
n01740131
n04532106
n03197337
n02493509
n10148035
n02172182
n02437616
n03062245
n04286575
n03018349
n02951358
n02130308
n04277352
n02096585
n04589890
n02965783
n02978881
n02804414
n02112137
n02007558
n03670208
n02894605
n03657121
n03876231
n02165105
n01669191
n02011460
n03710193
n03796401
n02916936
n03492542
n03998194
n04552348
n01824575
n01917289
n03461385
n03874293
n03272010
n02099712
n02999410
n04179913
n07831146
n02096177
n04350905
n04507155
n03743016
n02105505
n03649909
n03680355
n01910747
n03529860
n02787622
n02012849
n02011460
n02094114
n02950826
n02105855
n09288635
n01773797
n01774750
n04409515
n02497673
n02113799
n02786058
n02443484
n02981792
n03095699
n01664065
n02092002
n07711569
n02219486
n13133613
n02114548
n03529860
n02097298
n13133613
n04355933
n01537544
n01847000
n04428191
n02666196
n02268443
n03291819
n01828970
n04099969
n02747177
n07720875
n02088094
n02113624
n03710637
n03637318
n03942813
n02093859
n03794056
n02930766
n02930766
n04525038
n03796401
n03709823
n02097047
n04604644
n03938244
n01560419
n02097298
n02091635
n04136333
n07718747
n02417914
n03355925
n02445715
n02445715
n03495258
n04447861
n02111500
n03584829
n03977966
n04116512
n04019541
n04200800
n02408429
n02085936
n03992509
n02769748
n04613696
n07716906
n02085782
n07718472
n04398044
n03920288
n01860187
n03272010
n04008634
n04090263
n02028035
n01677366
n13037406
n04067472
n02095889
n04532670
n01582220
n03476684
n02395406
n04487394
n02443484
n02510455
n04550184
n02814860
n12144580
n03126707
n02486410
n02125311
n03777754
n03924679
n04613696
n07875152
n02058221
n03188531
n02777292
n02489166
n02066245
n04579432
n01630670
n02666196
n02091635
n02114548
n02356798
n03201208
n03240683
n03590841
n03018349
n02104029
n04251144
n10148035
n02169497
n02089867
n01734418
n04476259
n02843684
n04008634
n03400231
n02119022
n02137549
n03761084
n02490219
n03840681
n04346328
n01677366
n02102318
n04458633
n04476259
n04209239
n01795545
n10565667
n02114367
n02107574
n03032252
n02104365
n03133878
n04336792
n02112137
n03000684
n04553703
n02102480
n03825788
n01695060
n03250847
n07860988
n04310018
n02071294
n01945685
n01855672
n02037110
n03868863
n04229816
n12057211
n02408429
n02481823
n07716358
n04487394
n03662601
n02979186
n02910353
n04266014
n03895866
n04443257
n02917067
n04149813
n03041632
n02364673
n02999410
n04435653
n04228054
n02814860
n01531178
n03662601
n07880968
n04487081
n07614500
n03532672
n01807496
n02011460
n02074367
n04462240
n02977058
n02281406
n03041632
n04350905
n02788148
n02137549
n04562935
n04590129
n02093991
n03995372
n02111889
n04081281
n02133161
n02006656
n02107908
n04347754
n02950826
n02504013
n04560804
n02088364
n02128385
n02860847
n04399382
n02105412
n02115641
n07753592
n07880968
n03598930
n03724870
n02066245
n02128925
n04465501
n02094258
n02086646
n04141076
n04136333
n13133613
n02342885
n02281406
n03443371
n07613480
n04008634
n04141327
n04347754
n03314780
n02165456
n03930313
n04392985
n01872401
n04204238
n07831146
n02690373
n12144580
n02776631
n02877765
n02108089
n03532672
n03126707
n01560419
n02268853
n03691459
n03404251
n02364673
n02101556
n02326432
n03954731
n07831146
n03584254
n02012849
n03804744
n02128385
n01530575
n03933933
n04409515
n02823428
n01877812
n03920288
n02510455
n02112350
n03594945
n03642806
n02395406
n03452741
n02860847
n03673027
n02102040
n04505470
n04086273
n02099849
n01990800
n03781244
n04461696
n02106166
n04141076
n07717556
n02361337
n03976657
n03832673
n03109150
n01776313
n03788195
n03884397
n04019541
n01693334
n03633091
n02325366
n03623198
n02795169
n01744401
n01955084
n02002556
n07754684
n02174001
n02793495
n02095889
n02484975
n02094433
n09229709
n03207941
n02655020
n03773504
n04367480
n03933933
n01955084
n04355933
n13040303
n02786058
n04090263
n02101006
n02124075
n03720891
n07749582
n04517823
n01534433
n04335435
n03661043
n02101556
n03785016
n03133878
n02113978
n02930766
n02783161
n03958227
n02441942
n02859443
n02096437
n02447366
n07742313
n07583066
n02110063
n03146219
n12998815
n03425413
n02123394
n03594734
n02006656
n02992211
n04442312
n03032252
n01608432
n02927161
n03485794
n07583066
n03347037
n01847000
n04557648
n03478589
n01530575
n02098105
n01755581
n03045698
n02028035
n03538406
n03956157
n01871265
n13044778
n02119789
n07875152
n02107908
n02791124
n03697007
n03207743
n02791270
n02865351
n03345487
n03976467
n03124043
n04252225
n02165105
n03314780
n04040759
n02730930
n02236044
n07873807
n02006656
n02514041
n03534580
n03179701
n04366367
n02138441
n03450230
n01943899
n07836838
n03691459
n04467665
n02115641
n01742172
n02795169
n02481823
n07583066
n02749479
n01665541
n04131690
n03769881
n02009229
n04487081
n02123159
n04542943
n07760859
n02097658
n02113799
n07932039
n02097474
n03793489
n02791124
n04591713
n01735189
n01631663
n02892767
n04458633
n02277742
n07697537
n03781244
n02791270
n03854065
n04356056
n07802026
n03733131
n01980166
n02174001
n07684084
n01981276
n03874293
n03146219
n02099267
n02018207
n04398044
n03832673
n02493509
n03478589
n06359193
n02971356
n02093754
n04487081
n03929855
n03485407
n01930112
n01592084
n02088238
n04613696
n03967562
n03814639
n04311174
n04286575
n03884397
n03534580
n03793489
n02106382
n03045698
n03661043
n03814906
n02669723
n03459775
n03785016
n04584207
n03657121
n03476991
n04243546
n04560804
n03788365
n01796340
n04019541
n03496892
n07711569
n03788195
n02133161
n04548362
n02113712
n03673027
n12144580
n02481823
n02132136
n03956157
n01532829
n04493381
n02094258
n03483316
n01770081
n02006656
n02871525
n01580077
n07730033
n02097474
n02093647
n02088466
n01795545
n07716906
n03481172
n01608432
n02097209
n01629819
n07695742
n02389026
n02977058
n04090263
n04522168
n02871525
n04258138
n02127052
n04476259
n03617480
n04273569
n03485794
n06794110
n03085013
n02974003
n02869837
n02086240
n01685808
n02088466
n03584829
n01514668
n02114367
n03447447
n04435653
n03065424
n01616318
n02841315
n02655020
n03496892
n04040759
n01496331
n02094258
n03787032
n02172182
n01693334
n02168699
n03793489
n07613480
n01824575
n01665541
n04065272
n02699494
n02526121
n01774750
n03126707
n04254777
n02325366
n01665541
n02007558
n01873310
n01734418
n03271574
n01776313
n01644373
n02486410
n02106662
n03125729
n02087394
n02094433
n07684084
n04532670
n01843383
n02835271
n12985857
n04485082
n02167151
n03394916
n01664065
n04286575
n03874293
n02699494
n01601694
n01582220
n02486261
n02268853
n03947888
n13040303
n03967562
n03602883
n01882714
n04505470
n02226429
n04522168
n02481823
n02108422
n03670208
n07718747
n01688243
n02747177
n07248320
n02328150
n02963159
n02117135
n03676483
n06596364
n01775062
n03724870
n03347037
n13133613
n02319095
n03944341
n02088238
n02110185
n01443537
n06794110
n02606052
n02113186
n02704792
n03692522
n03018349
n02095314
n04523525
n02356798
n04228054
n02108000
n04371430
n01770393
n04456115
n02110958
n01631663
n02708093
n02835271
n02807133
n02280649
n02277742
n03857828
n03452741
n03388043
n06596364
n04252225
n04458633
n01689811
n03935335
n01560419
n02500267
n02319095
n02412080
n02096437
n03814639
n03494278
n01518878
n02486261
n01629819
n04606251
n03787032
n01877812
n01773157
n02104365
n02113978
n02123394
n02966687
n01728920
n02916936
n01860187
n03255030
n02011460
n02087394
n02817516
n02085620
n02437616
n02606052
n03447721
n01773157
n02497673
n04380533
n02056570
n01917289
n12267677
n04325704
n02130308
n02730930
n03933933
n02981792
n07892512
n02112018
n02398521
n02009912
n02002724
n02086079
n02100236
n03085013
n02837789
n02018795
n02106382
n02489166
n03937543
n02910353
n07836838
n15075141
n02877765
n03602883
n02233338
n13037406
n01580077
n04069434
n04371774
n03938244
n02326432
n03085013
n02804610
n04141975
n02484975
n02930766
n03000134
n02488702
n02113023
n02088632
n02783161
n02490219
n04505470
n02123394
n04357314
n02825657
n02493509
n03720891
n03673027
n03492542
n01739381
n02105056
n03481172
n03947888
n02099601
n02105505
n01514859
n07871810
n03445924
n12267677
n04536866
n03314780
n12768682
n02028035
n01980166
n02099601
n01981276
n07730033
n02909870
n04179913
n02089973
n02111277
n12057211
n01632458
n02123394
n04350905
n03937543
n02730930
n01795545
n02091244
n01632777
n03584829
n03709823
n02086646
n01824575
n03977966
n03417042
n02892201
n01806143
n02105855
n02115913
n03902125
n01774384
n07880968
n02112137
n09428293
n04116512
n02486410
n03930630
n04090263
n01843383
n07802026
n04429376
n02317335
n02027492
n01818515
n02086646
n02018207
n04371430
n03347037
n03014705
n04125021
n03764736
n02981792
n02114367
n04192698
n04330267
n03729826
n02607072
n02504458
n03769881
n02018207
n03929855
n04591157
n03947888
n04317175
n03125729
n01749939
n04399382
n02276258
n03598930
n02606052
n03089624
n02099601
n03770439
n02655020
n07745940
n02095314
n04336792
n04033995
n02112018
n02132136
n02860847
n03100240
n02966687
n02111129
n04273569
n04149813
n02092002
n03769881
n04599235
n03825788
n04118776
n04336792
n02115641
n01622779
n02909870
n02276258
n02977058
n02326432
n01608432
n03347037
n02978881
n02787622
n02093256
n02101556
n02100735
n02085782
n02342885
n03733281
n02085782
n03706229
n02002724
n13037406
n02422106
n07614500
n02113712
n04336792
n02486261
n02356798
n02268443
n04179913
n04277352
n02346627
n03089624
n02835271
n02086240
n04579432
n03180011
n04285008
n02408429
n04392985
n02091244
n02815834
n02834397
n04009552
n02488291
n03290653
n03325584
n03637318
n02730930
n02865351
n02119789
n03929855
n03676483
n04423845
n03874293
n03908618
n03598930
n02090379
n01944390
n04152593
n09288635
n02066245
n01768244
n03272010
n01531178
n03255030
n03676483
n02002556
n02749479
n02415577
n02403003
n07565083
n02981792
n01776313
n02097474
n02667093
n02096177
n03255030
n01819313
n02791124
n02279972
n04090263
n09193705
n04335435
n03733131
n03250847
n04263257
n02096585
n03976467
n02963159
n04613696
n04310018
n02107574
n03724870
n09428293
n02101006
n04372370
n03930630
n07584110
n01735189
n04599235
n02835271
n04330267
n02108915
n02110185
n07684084
n04204347
n02672831
n03742115
n04131690
n09428293
n04487394
n03710193
n09332890
n03478589
n04486054
n02951358
n09428293
n04596742
n01872401
n04505470
n04154565
n02666196
n02437616
n03724870
n02120079
n01828970
n03141823
n01698640
n03095699
n04099969
n02123045
n04482393
n04026417
n02110806
n04033901
n04041544
n02869837
n04136333
n02112350
n03388043
n03065424
n02128757
n04330267
n02879718
n02859443
n01968897
n01847000
n01871265
n02129165
n02408429
n04263257
n13054560
n02090379
n04553703
n03929660
n01990800
n03494278
n01514859
n02804610
n01773157
n02087046
n07802026
n03777754
n07720875
n01694178
n06794110
n02795169
n07583066
n02094114
n03841143
n01985128
n03776460
n02859443
n02808304
n02092339
n02441942
n02002724
n04296562
n02086910
n02690373
n01616318
n07718472
n02086240
n04049303
n04235860
n06359193
n02110958
n01518878
n02950826
n03447721
n02111129
n04517823
n03769881
n02112350
n07693725
n07747607
n02444819
n02109047
n04485082
n10148035
n03127925
n04328186
n03347037
n02102480
n07614500
n02676566
n04599235
n03534580
n02093256
n03710721
n02167151
n04116512
n04141975
n03877472
n02092339
n03042490
n04604644
n03355925
n04009552
n03598930
n02672831
n03425413
n03649909
n02099429
n01819313
n02640242
n02978881
n03670208
n02342885
n03888257
n03729826
n02457408
n02860847
n09246464
n02097298
n03649909
n04228054
n02113624
n01978287
n03895866
n03393912
n03127925
n03720891
n01774384
n04065272
n03485407
n04033901
n02488291
n12057211
n01774750
n01798484
n01537544
n07720875
n03838899
n04120489
n02264363
n02113978
n02799071
n02114367
n04332243
n03062245
n02077923
n02398521
n04435653
n01692333
n07831146
n04523525
n02342885
n07753275
n01807496
n02098413
n01744401
n07836838
n02104029
n02092339
n02092339
n02115913
n01608432
n03325584
n02066245
n03345487
n03394916
n01773797
n02113186
n02667093
n02124075
n04118538
n02134084
n02317335
n03047690
n03938244
n02219486
n07718747
n02490219
n04326547
n02690373
n07717556
n01580077
n02443484
n04443257
n04033995
n07590611
n02403003
n07768694
n03803284
n04371774
n02802426
n06794110
n04483307
n02791270
n02028035
n03764736
n07860988
n09421951
n03773504
n04152593
n04367480
n02950826
n02168699
n04458633
n01983481
n04404412
n04252225
n04596742
n02480495
n02281787
n01795545
n02089867
n02169497
n02666196
n04311004
n02879718
n03457902
n02074367
n03297495
n02481823
n04485082
n02091244
n07718747
n02102480
n04147183
n03014705
n02814860
n04532670
n02094114
n01532829
n01664065
n04090263
n03995372
n03134739
n06596364
n03710637
n01807496
n02096294
n04026417
n02165105
n03998194
n02112706
n04366367
n02177972
n04152593
n04442312
n01697457
n03775071
n07892512
n02091831
n02101388
n01749939
n03384352
n02484975
n03868242
n01753488
n02687172
n02807133
n02231487
n02018795
n04270147
n03063599
n04591713
n03895866
n03481172
n04456115
n01755581
n02319095
n02526121
n01796340
n02094433
n01558993
n04238763
n03127925
n03017168
n02692877
n04179913
n02791124
n03494278
n06596364
n01751748
n02074367
n03249569
n04357314
n07579787
n04550184
n06596364
n03761084
n07718472
n03376595
n04428191
n01773157
n07248320
n03400231
n04447861
n03854065
n01694178
n02111500
n04111531
n02090622
n03450230
n04536866
n01817953
n02843684
n03776460
n04201297
n04204238
n02094114
n04238763
n01667114
n02116738
n03709823
n04153751
n02422699
n01796340
n07836838
n02027492
n03478589
n01689811
n02110958
n03538406
n03207743
n01669191
n06794110
n02087394
n01641577
n07873807
n03314780
n04591157
n02487347
n04277352
n07749582
n03792782
n03947888
n03792782
n01669191
n02102318
n03788365
n03899768
n04392985
n01629819
n04557648
n02640242
n02325366
n07749582
n04264628
n04487081
n02978881
n03720891
n01494475
n02951358
n01828970
n04286575
n04540053
n04332243
n04367480
n03840681
n02106662
n03376595
n02113186
n03085013
n09246464
n03127747
n04367480
n03290653
n07760859
n02102973
n03290653
n01751748
n02089973
n02086910
n02112350
n03272562
n04456115
n03785016
n02110341
n01728920
n04554684
n02417914
n01756291
n03590841
n01877812
n02113186
n02093256
n02099849
n02397096
n03642806
n02231487
n04179913
n02012849
n02279972
n04447861
n04355933
n01560419
n02445715
n03770679
n03929855
n01688243
n06596364
n07930864
n01945685
n01631663
n03216828
n03995372
n02782093
n01860187
n04443257
n04579432
n07745940
n04146614
n02177972
n04392985
n01644373
n02317335
n04553703
n02138441
n13040303
n01985128
n02134418
n01945685
n02526121
n02317335
n01820546
n04501370
n01560419
n02268443
n03796401
n03916031
n02992211
n03127747
n03180011
n02102480
n04277352
n01776313
n03017168
n02111129
n02190166
n02098413
n02090721
n01776313
n09421951
n02113023
n02672831
n03764736
n04146614
n03347037
n03868242
n02667093
n02093647
n02169497
n02089973
n07747607
n02085782
n02815834
n02105412
n02086910
n04204238
n03530642
n07583066
n04039381
n02965783
n04501370
n04086273
n04263257
n02443484
n04162706
n07613480
n04525038
n04266014
n03721384
n04467665
n04523525
n04162706
n02025239
n04146614
n01677366
n04179913
n04125021
n02917067
n04392985
n04550184
n02090721
n03796401
n03014705
n04344873
n02091635
n01608432
n03690938
n04141975
n01629819
n04523525
n01955084
n01756291
n04443257
n02927161
n07880968
n07836838
n02484975
n02091032
n07714571
n03535780
n04149813
n09468604
n02033041
n03584254
n04550184
n03887697
n03838899
n02174001
n03272010
n03297495
n04074963
n03649909
n03496892
n03467068
n02268853
n03400231
n02093256
n04367480
n02091134
n04118776
n02086646
n07753592
n02504013
n02104365
n02096177
n03961711
n04069434
n03376595
n01817953
n01955084
n02107142
n03344393
n03709823
n02974003
n02090379
n04332243
n03125729
n03935335
n02814860
n01860187
n03220513
n02094114
n03877472
n02009912
n02108000
n02229544
n03697007
n03124170
n02206856
n03841143
n04153751
n01742172
n13133613
n04525305
n01930112
n02795169
n02233338
n02417914
n03935335
n01770393
n02125311
n03482405
n04604644
n02009912
n03791053
n03223299
n03032252
n04501370
n03372029
n03485794
n02110341
n04200800
n02106166
n04592741
n02950826
n04041544
n07831146
n04116512
n01514859
n03868242
n03026506
n02443484
n02701002
n04116512
n02815834
n03929855
n03676483
n01534433
n02701002
n02113978
n04371430
n03991062
n07718472
n02268853
n04264628
n02098105
n07565083
n02112706
n02094114
n02093991
n02488291
n02093859
n03047690
n01682714
n07717410
n01883070
n04562935
n01498041
n07745940
n02109525
n01644900
n01694178
n03063689
n02894605
n01682714
n03544143
n02101556
n02966687
n03485407
n03657121
n02236044
n07860988
n01677366
n07718747
n02690373
n04099969
n03814639
n02098413
n01985128
n02093647
n02504458
n01944390
n03445924
n03866082
n03355925
n02105855
n03041632
n03791053
n03954731
n07695742
n02102040
n03956157
n03983396
n02105855
n03249569
n03976467
n03843555
n02641379
n03272562
n03658185
n03976467
n02398521
n03791053
n03065424
n03759954
n03216828
n03796401
n01980166
n09193705
n01773797
n02129604
n04009552
n02980441
n03188531
n02100735
n07860988
n03929855
n04037443
n03467068
n02094114
n03899768
n04525038
n02074367
n04033901
n02012849
n02009229
n02109961
n03804744
n02396427
n02233338
n03240683
n03393912
n03777568
n02494079
n02106662
n04033995
n02231487
n04355338
n04550184
n02699494
n04118538
n03388043
n02869837
n02097047
n03063689
n01530575
n02091032
n03042490
n03930313
n02264363
n02442845
n02325366
n01883070
n01614925
n03447721
n03444034
n02979186
n02815834
n02123394
n03250847
n02883205
n04554684
n03047690
n01773157
n02172182
n03249569
n04613696
n03692522
n04044716
n12985857
n02342885
n03425413
n02895154
n01704323
n01560419
n02974003
n07695742
n03016953
n03729826
n03250847
n02927161
n02091635
n01990800
n02980441
n02676566
n02114548
n02422699
n04208210
n02109961
n04332243
n04127249
n03871628
n02391049
n01537544
n02124075
n02422106
n01775062
n03188531
n02443114
n01694178
n03063689
n02088364
n04476259
n04442312
n03792972
n07831146
n02483708
n04346328
n04591713
n03794056
n04153751
n03782006
n02058221
n04162706
n04522168
n03673027
n04483307
n03691459
n03478589
n02102318
n07749582
n07730033
n01829413
n01729977
n04501370
n09472597
n03781244
n02134084
n01742172
n03782006
n04553703
n09835506
n03804744
n02088238
n04067472
n03764736
n02992529
n03874599
n03124043
n04065272
n02782093
n03788195
n04389033
n03673027
n04389033
n03775071
n07753113
n12144580
n02013706
n02190166
n04275548
n03250847
n03947888
n01729977
n02138441
n04264628
n03967562
n03445924
n04355338
n02640242
n01440764
n12267677
n02489166
n02165105
n03599486
n03272010
n02018207
n02747177
n04487081
n02119789
n02666196
n02606052
n02086646
n04040759
n01984695
n12998815
n01751748
n04584207
n04149813
n01981276
n02841315
n03777754
n04376876
n02859443
n04389033
n01665541
n04208210
n04041544
n02071294
n13052670
n01616318
n03871628
n02028035
n03110669
n01819313
n04229816
n02769748
n03832673
n02095889
n01806143
n02708093
n07753113
n02804610
n02879718
n03595614
n02769748
n07802026
n04357314
n09288635
n07753592
n04525038
n04590129
n01981276
n01530575
n02006656
n03903868
n02095570
n03602883
n03476991
n04328186
n03617480
n03272562
n02328150
n04536866
n02814860
n03710193
n04263257
n02699494
n04418357
n01496331
n02086079
n03495258
n03417042
n03065424
n03041632
n04467665
n02085936
n03956157
n02110341
n07760859
n03467068
n02825657
n02669723
n07579787
n02097658
n03717622
n03590841
n02268443
n07697313
n02859443
n01622779
n02999410
n01877812
n01744401
n01669191
n04507155
n02108000
n10148035
n04009552
n09421951
n03457902
n02091032
n03759954
n01443537
n02011460
n01984695
n02791270
n03617480
n02089973
n02105641
n03595614
n03207941
n03146219
n04367480
n07695742
n03376595
n09835506
n02342885
n03393912
n04311004
n04589890
n02114367
n02104029
n01945685
n02094114
n01824575
n04380533
n02025239
n03218198
n02110627
n04026417
n02749479
n07613480
n02437312
n03347037
n02403003
n03942813
n03450230
n04252225
n02108000
n03837869
n02165105
n03000247
n04344873
n02504458
n02110185
n01498041
n04270147
n04239074
n03924679
n02086646
n09835506
n03424325
n04370456
n03777754
n03529860
n02102040
n01688243
n02110627
n02100735
n02102177
n04086273
n01883070
n04366367
n02107574
n02102480
n04008634
n02169497
n04141327
n02442845
n03662601
n01855032
n04589890
n02018795
n03271574
n02097298
n03445777
n02102040
n03617480
n02108422
n02097474
n02109525
n02097474
n11879895
n03223299
n02100583
n03840681
n02091032
n01843065
n03769881
n02091467
n02134418
n02109047
n04456115
n03866082
n04239074
n02484975
n04259630
n07760859
n09246464
n01484850
n02443114
n04251144
n03843555
n04131690
n07716906
n03584254
n04033901
n04146614
n03633091
n13037406
n04254680
n07583066
n03483316
n02056570
n02102177
n04355338
n01669191
n04039381
n01532829
n02978881
n03691459
n04118776
n02672831
n06785654
n07749582
n02536864
n02116738
n04239074
n02483708
n03124170
n07930864
n02018207
n04074963
n01514859
n02089867
n03804744
n04116512
n02802426
n03627232
n03787032
n02281406
n07613480
n02526121
n02860847
n01806143
n03706229
n03982430
n04009552
n01616318
n01828970
n03920288
n03680355
n02727426
n02963159
n02102973
n04209133
n01798484
n02190166
n02091635
n02089078
n04371774
n04515003
n02655020
n02104029
n01877812
n02794156
n02974003
n02096585
n04525305
n02672831
n02113712
n02917067
n02096437
n07745940
n02326432
n03314780
n02236044
n02102973
n02093428
n03297495
n03676483
n03775071
n04536866
n04554684
n03400231
n04346328
n01530575
n04133789
n03160309
n01930112
n03494278
n03063599
n03891332
n04476259
n02410509
n03417042
n07753113
n03498962
n03991062
n04086273
n01739381
n07753275
n03065424
n03476991
n07565083
n01608432
n04258138
n03803284
n02120079
n02454379
n01537544
n02492035
n02219486
n01735189
n03594734
n02442845
n04485082
n03599486
n02086079
n03995372
n04501370
n02113712
n02102480
n03599486
n04162706
n03868242
n04209133
n02791124
n01819313
n02116738
n02894605
n03764736
n03476684
n02123159
n02325366
n03457902
n02123597
n09399592
n02488291
n03788365
n01770081
n01498041
n02110341
n02834397
n02391049
n02113023
n02099712
n01739381
n02980441
n02027492
n03208938
n07734744
n02027492
n02108000
n03902125
n04044716
n09428293
n01981276
n02869837
n03425413
n03085013
n03804744
n02443114
n01983481
n02088466
n02077923
n01740131
n09468604
n02783161
n03888257
n02797295
n04252225
n01622779
n01669191
n03710637
n01669191
n01983481
n02108422
n04111531
n04179913
n04204238
n04389033
n02087046
n01872401
n02692877
n01632777
n02640242
n02927161
n02814860
n03792972
n04039381
n02480855
n03599486
n04326547
n03691459
n04592741
n03014705
n01582220
n13052670
n02802426
n01797886
n04263257
n04350905
n03372029
n02484975
n09428293
n03887697
n02112350
n03110669
n02910353
n02096294
n02102177
n02115913
n02804610
n04239074
n04005630
n04118538
n04067472
n02128757
n02097658
n02099849
n01882714
n02494079
n03379051
n02808440
n04392985
n02114548
n02206856
n03976657
n01729322
n07831146
n01883070
n02361337
n02128757
n02097130
n04447861
n13052670
n02096177
n03691459
n02134084
n02494079
n03642806
n04136333
n02268853
n02417914
n03891332
n09246464
n03032252
n02825657
n03498962
n03160309
n04026417
n04296562
n03534580
n03216828
n07880968
n03393912
n02948072
n04560804
n04152593
n04509417
n03884397
n02129604
n01944390
n04310018
n04086273
n07584110
n04258138
n04264628
n13040303
n02109525
n04462240
n02791270
n03384352
n04070727
n02108422
n03485407
n02093647
n03000134
n03089624
n07615774
n03956157
n02776631
n01729977
n03868242
n03899768
n01871265
n03180011
n03630383
n01968897
n02939185
n02097474
n04154565
n04462240
n02028035
n04041544
n02111129
n03026506
n04389033
n02808440
n03124170
n02129165
n02776631
n04259630
n03902125
n07760859
n01744401
n02128757
n02843684
n02091134
n02256656
n03814639
n02666196
n02497673
n13054560
n01914609
n01580077
n02089867
n03630383
n02025239
n02123597
n02807133
n03673027
n04317175
n15075141
n01795545
n03888257
n03062245
n04209133
n01531178
n02410509
n04162706
n03814639
n02102177
n04399382
n03220513
n06874185
n04152593
n07880968
n02066245
n01735189
n03271574
n01592084
n04355933
n02085936
n01978455
n04597913
n07871810
n02093859
n01773549
n03126707
n03452741
n02027492
n02408429
n01985128
n03670208
n04458633
n04273569
n03785016
n01751748
n03188531
n02917067
n02086240
n03770439
n03240683
n03920288
n03954731
n02109525
n03016953
n02107683
n01665541
n04310018
n03485407
n03187595
n03814639
n02095570
n01968897
n03874599
n02493509
n02130308
n02749479
n01945685
n02536864
n04154565
n02328150
n03908618
n01737021
n02408429
n02231487
n04131690
n03970156
n01530575
n04336792
n02951358
n02879718
n03944341
n03788195
n02895154
n03838899
n02037110
n04009552
n03141823
n02102973
n07730033
n01984695
n07693725
n04065272
n01631663
n02699494
n03095699
n02112350
n04019541
n09835506
n01484850
n07697313
n01729322
n03085013
n04041544
n02396427
n02879718
n03891332
n04590129
n03271574
n02454379
n01944390
n02099267
n02097658
n07720875
n02484975
n03733805
n02086240
n04204238
n03483316
n03201208
n02095570
n01630670
n03201208
n01755581
n02879718
n03065424
n02037110
n02108915
n02807133
n04023962
n01669191
n02098286
n04252225
n02115641
n02281787
n06794110
n02391049
n04486054
n01817953
n04041544
n04277352
n02107574
n09193705
n04371774
n04372370
n03724870
n03388183
n04371430
n02788148
n01817953
n02699494
n07730033
n09468604
n04254777
n04501370
n03637318
n02782093
n04152593
n01882714
n02916936
n03661043
n04336792
n02422699
n04019541
n01664065
n03325584
n03976657
n04423845
n04404412
n03527444
n02123045
n02094114
n01558993
n03062245
n02113712
n03662601
n03065424
n03388183
n03447721
n01667778
n03584254
n03000247
n07718747
n01737021
n02676566
n01795545
n07860988
n04086273
n04332243
n03447721
n01829413
n02236044
n02165105
n01796340
n02092339
n01443537
n04370456
n03961711
n07579787
n01753488
n02708093
n02111277
n01774750
n04286575
n02483708
n02002724
n02536864
n03400231
n03485794
n02480495
n02509815
n04111531
n07716358
n01968897
n04579145
n02892201
n02091134
n04118776
n03249569
n01601694
n04522168
n02441942
n03271574
n02692877
n03930313
n02100735
n04428191
n03706229
n02119789
n02111277
n01629819
n04476259
n03958227
n03240683
n02504458
n04461696
n09229709
n01728920
n02422106
n03450230
n02268853
n03902125
n03868863
n09428293
n04482393
n03680355
n01744401
n12620546
n02002556
n04136333
n02447366
n02226429
n03249569
n02281406
n03721384
n03874599
n02951585
n04074963
n02480495
n03929855
n03016953
n03376595
n07747607
n15075141
n02085620
n04141975
n03733805
n03670208
n02085620
n01491361
n03803284
n02415577
n07714571
n03929855
n13037406
n01740131
n01580077
n03891251
n02128925
n01664065
n02090379
n07920052
n02279972
n02490219
n02906734
n01914609
n01704323
n02105412
n03492542
n04482393
n02788148
n01985128
n03388549
n04251144
n02939185
n02114548
n07836838
n10148035
n03976467
n03447721
n02006656
n07802026
n04370456
n02417914
n01776313
n02112018
n03938244
n02536864
n07802026
n04501370
n02963159
n03759954
n02028035
n04044716
n02123394
n02823428
n01491361
n04008634
n01877812
n07615774
n09256479
n01833805
n04127249
n04507155
n03673027
n01882714
n03697007
n03637318
n04332243
n12267677
n07714571
n03485794
n04004767
n02795169
n02120505
n02086646
n02107908
n03888257
n01795545
n03272010
n07714571
n02097047
n03874293
n02391049
n01855672
n01871265
n04208210
n02487347
n02013706
n02096051
n03598930
n03873416
n02871525
n02102973
n03710637
n01773157
n03208938
n04325704
n02002724
n02137549
n02125311
n01440764
n01806567
n03345487
n04209239
n07860988
n07802026
n07714571
n12768682
n02108422
n01770393
n03124043
n04023962
n02105056
n04476259
n02871525
n03598930
n02206856
n03223299
n02259212
n02607072
n02834397
n02364673
n03131574
n02802426
n02117135
n04370456
n01829413
n04033901
n02123159
n02794156
n02132136
n02883205
n07720875
n03920288
n02892201
n04285008
n03345487
n03661043
n04423845
n02013706
n01924916
n03095699
n09428293
n04153751
n02865351
n03384352
n02786058
n02099429
n03014705
n02113712
n01833805
n03924679
n03937543
n02892767
n01819313
n02109047
n01694178
n01729322
n02808440
n04266014
n01978287
n04111531
n04540053
n02100735
n03935335
n04372370
n03930630
n02443114
n03854065
n03724870
n09193705
n02640242
n03967562
n07711569
n04147183
n03710721
n02965783
n02951585
n01582220
n03014705
n02643566
n01739381
n03814906
n01882714
n01729322
n02860847
n04350905
n01697457
n03220513
n04311004
n03877472
n04209239
n04149813
n03770679
n04548362
n07930864
n03661043
n03400231
n02930766
n04613696
n03866082
n01990800
n01534433
n03947888
n02492660
n01985128
n03793489
n03977966
n01795545
n04086273
n01688243
n02423022
n04277352
n03877472
n03208938
n04476259
n04550184
n03063599
n04523525
n02123597
n02708093
n02134418
n02086079
n11879895
n03676483
n02107574
n02113978
n03764736
n03642806
n01748264
n02167151
n04612504
n02817516
n02051845
n03724870
n02077923
n01443537
n03065424
n02105505
n02051845
n02087394
n01735189
n04310018
n01632458
n02509815
n02093859
n01669191
n03868242
n03400231
n02423022
n02090622
n03146219
n02397096
n03532672
n02013706
n01622779
n02483708
n03187595
n02114712
n03131574
n03476991
n03838899
n02105162
n04604644
n01689811
n02113624
n03691459
n15075141
n01773797
n01491361
n04209133
n04476259
n03444034
n02488291
n03485407
n01630670
n04599235
n02174001
n02834397
n02509815
n03538406
n03535780
n02105855
n04501370
n02098105
n03763968
n03095699
n04591713
n02363005
n03599486
n01491361
n02090622
n03590841
n03832673
n02013706
n06874185
n06596364
n04074963
n04389033
n02447366
n01631663
n02841315
n03733805
n03146219
n02974003
n03947888
n02095570
n02422106
n04049303
n02396427
n03891251
n02422106
n04486054
n02091831
n07760859
n03179701
n03947888
n03692522
n02097298
n03602883
n02974003
n02951585
n04141327
n04357314
n02786058
n02268853
n04596742
n03788365
n02111277
n02104365
n03584254
n04509417
n03494278
n02939185
n02363005
n03047690
n04366367
n04409515
n04380533
n03187595
n01882714
n03680355
n03124170
n01986214
n04004767
n01833805
n04141076
n02033041
n03109150
n04560804
n07930864
n02114548
n02877765
n02093754
n01737021
n02093647
n03794056
n01843383
n01978287
n01669191
n02870880
n02071294
n02098286
n04120489
n04239074
n01537544
n02504013
n03929855
n09193705
n03534580
n03018349
n04179913
n01735189
n01665541
n12768682
n02669723
n03930313
n04200800
n02363005
n04552348
n03992509
n02123159
n04505470
n01518878
n01742172
n02445715
n03584254
n02101556
n02398521
n02106166
n04372370
n04346328
n02109047
n03498962
n01980166
n07753275
n04447861
n09332890
n04417672
n07248320
n02412080
n03218198
n04428191
n04447861
n04557648
n01677366
n01774750
n09399592
n02859443
n04456115
n02018795
n03935335
n04465501
n02112706
n02799071
n07684084
n01614925
n02167151
n04606251
n04317175
n04311004
n02077923
n04326547
n02483708
n02963159
n07565083
n04557648
n02397096
n04133789
n02229544
n04317175
n07749582
n03803284
n04456115
n01828970
n02408429
n01632458
n03028079
n03291819
n01773797
n02096585
n02110341
n01669191
n01986214
n03742115
n01910747
n02966687
n02025239
n07615774
n02090721
n01855672
n02965783
n03924679
n11879895
n02113186
n04270147
n02804610
n06359193
n02965783
n03777754
n09399592
n01693334
n04033901
n02098413
n01981276
n03657121
n02096437
n03841143
n02123394
n02447366
n03345487
n02963159
n01580077
n03481172
n02483362
n02894605
n02109525
n04525038
n01917289
n03983396
n04462240
n04153751
n03992509
n02906734
n03290653
n02017213
n02808440
n04515003
n02422106
n02115913
n03720891
n10148035
n02794156
n02096294
n03220513
n02437312
n02058221
n04540053
n07753592
n02105641
n04325704
n04447861
n07695742
n03666591
n03642806
n01910747
n03733281
n01768244
n03888605
n13133613
n03590841
n03127925
n02488291
n04208210
n04592741
n04557648
n02169497
n01773549
n02672831
n03742115
n01983481
n02113978
n03494278
n02490219
n02488291
n03062245
n02167151
n02676566
n04392985
n03877472
n02168699
n02488291
n02840245
n03014705
n04044716
n02119022
n01824575
n02840245
n04023962
n03032252
n02486410
n03197337
n02974003
n04086273
n02441942
n03496892
n03721384
n03538406
n03041632
n02927161
n02408429
n03759954
n03690938
n01930112
n01744401
n02992529
n03873416
n07615774
n02012849
n03777568
n03676483
n01968897
n03866082
n04005630
n04285008
n02841315
n02106030
n02276258
n02422106
n03649909
n03017168
n02097474
n02948072
n02256656
n04179913
n09835506
n02111889
n02988304
n07836838
n02051845
n02971356
n02640242
n03065424
n04201297
n02281406
n02134418
n02500267
n02895154
n02870880
n03617480
n02415577
n03733131
n03594734
n04152593
n04258138
n04286575
n04336792
n02484975
n04041544
n04081281
n03291819
n04584207
n02100877
n03459775
n01498041
n04429376
n04252077
n04515003
n02108089
n03876231
n03838899
n07716358
n02025239
n02965783
n04033901
n03841143
n02102318
n03888605
n03777568
n04350905
n02870880
n04277352
n07720875
n02317335
n02504458
n02488291
n02137549
n02490219
n04428191
n03662601
n04532670
n02105412
n02091831
n04154565
n01531178
n07753275
n02117135
n01882714
n03272010
n03759954
n03866082
n03992509
n02137549
n01537544
n01494475
n03179701
n01694178
n04554684
n04204347
n11879895
n04366367
n04371430
n12057211
n02730930
n03461385
n01728572
n01688243
n04141975
n02174001
n04310018
n02077923
n02105505
n03250847
n01776313
n04532106
n02346627
n04493381
n07742313
n04335435
n02112018
n02097298
n04254120
n02231487
n03394916
n01806143
n04311004
n03216828
n07615774
n07614500
n07768694
n07248320
n03594734
n04008634
n02091134
n02606052
n04310018
n07714990
n01945685
n02326432
n01704323
n01944390
n01514668
n01514668
n01740131
n04356056
n03492542
n02643566
n03759954
n03854065
n03781244
n03125729
n02087394
n02093754
n02802426
n03527444
n07747607
n03394916
n01644373
n02823428
n02106550
n03954731
n01944390
n09472597
n03126707
n02102973
n03443371
n03529860
n02489166
n04606251
n04371774
n03197337
n04252225
n01986214
n03841143
n02111129
n04251144
n02782093
n03786901
n04542943
n03196217
n01735189
n03125729
n02089867
n04009552
n02860847
n02229544
n01871265
n03930313
n04296562
n03388549
n02437616
n02423022
n02190166
n04522168
n04136333
n02009229
n07716358
n01798484
n01990800
n04525038
n07754684
n01582220
n03673027
n02977058
n04317175
n03495258
n02692877
n02089973
n01843065
n03584254
n02802426
n02364673
n01807496
n02172182
n03742115
n02687172
n02769748
n07716358
n03028079
n02107142
n02749479
n02417914
n04296562
n01829413
n01698640
n03935335
n02096294
n02112706
n02692877
n01740131
n07754684
n04136333
n02112137
n02326432
n02113624
n07715103
n02484975
n03781244
n01630670
n02701002
n03776460
n01978455
n01755581
n01819313
n03838899
n04146614
n04251144
n02113023
n02483362
n04456115
n02101006
n02992211
n02037110
n03045698
n02963159
n03249569
n06359193
n03196217
n01693334
n02085936
n03697007
n02092002
n02099712
n02793495
n03710721
n02102318
n03895866
n02097209
n03127747
n01950731
n02106166
n01443537
n03372029
n04229816
n01990800
n04258138
n03637318
n03633091
n03770439
n01818515
n04069434
n02110063
n01664065
n02504458
n01641577
n04562935
n03825788
n03873416
n02484975
n01984695
n03761084
n02892201
n04392985
n04357314
n02097130
n03394916
n03124170
n03938244
n01582220
n04133789
n07871810
n02114855
n02445715
n03017168
n01729977
n02101006
n04153751
n07730033
n02802426
n02130308
n02096585
n01860187
n01980166
n02825657
n03450230
n04037443
n04090263
n02361337
n02823750
n02843684
n03372029
n01749939
n02808440
n03384352
n02129165
n02095570
n02916936
n02098105
n02093256
n03445777
n02111500
n04553703
n03871628
n03876231
n03062245
n03207941
n04428191
n02408429
n04005630
n02777292
n03877845
n04599235
n02514041
n04081281
n02111889
n03208938
n02105855
n10565667
n02493793
n02676566
n02219486
n04147183
n01531178
n04542943
n02492660
n04235860
n02321529
n01687978
n02066245
n01818515
n03461385
n03710637
n03854065
n01872401
n01847000
n03690938
n06596364
n07932039
n02102973
n01806567
n02106382
n15075141
n02109047
n02087394
n01774750
n02128385
n07871810
n02086240
n04209239
n07749582
n04392985
n02058221
n01644373
n03127925
n03690938
n04485082
n03388183
n02110627
n02165105
n03785016
n02259212
n02108915
n02099267
n04044716
n01990800
n01986214
n01632777
n01580077
n02106030
n01632458
n03337140
n01695060
n09399592
n04116512
n03443371
n02097658
n04039381
n02422699
n02105855
n03792782
n02229544
n01950731
n02256656
n03916031
n01534433
n03791053
n04200800
n03314780
n04120489
n04584207
n01820546
n04125021
n02930766
n02093647
n02910353
n03452741
n03482405
n04380533
n01622779
n07768694
n03042490
n03461385
n04285008
n04540053
n02099267
n12057211
n04118776
n04162706
n12620546
n01534433
n01675722
n02089078
n03290653
n02883205
n07697537
n03393912
n02113186
n03014705
n04435653
n03590841
n03773504
n02782093
n02980441
n04239074
n04228054
n03877845
n04023962
n04404412
n02088238
n03617480
n03670208
n09229709
n02971356
n04553703
n01748264
n02091467
n07697537
n02113186
n07615774
n02328150
n02883205
n07579787
n01514668
n03877845
n02108915
n07760859
n02125311
n03899768
n01924916
n02487347
n02979186
n03594945
n03895866
n02441942
n13040303
n03710193
n03709823
n03544143
n02843684
n02085782
n02088466
n01910747
n04599235
n01847000
n02423022
n03476991
n02690373
n07730033
n03733281
n02129604
n02027492
n04443257
n03977966
n03992509
n02108422
n07875152
n03793489
n03127925
n04579145
n02395406
n02119022
n03706229
n03902125
n03777568
n02125311
n04458633
n02672831
n01784675
n02138441
n04328186
n02120505
n01644373
n03544143
n01818515
n03877472
n04044716
n04009552
n03220513
n04067472
n02172182
n02823750
n02317335
n04467665
n02229544
n04049303
n02116738
n07584110
n02018795
n03930313
n02480495
n02172182
n09399592
n01530575
n02971356
n02105641
n01698640
n04553703
n02280649
n01807496
n02504458
n03617480
n03884397
n02011460
n02704792
n03393912
n01667114
n03598930
n01775062
n07717410
n04118776
n03218198
n03255030
n02111129
n02892201
n03444034
n03692522
n02364673
n07718747
n04418357
n04235860
n03000684
n03929660
n03670208
n01560419
n02494079
n03197337
n01737021
n07697313
n02127052
n03764736
n04270147
n02097474
n04204347
n03291819
n03134739
n02086240
n03691459
n01924916
n04550184
n02093754
n03110669
n02643566
n02108422
n02795169
n02483362
n03983396
n02093647
n02815834
n04069434
n03930313
n02326432
n02086079
n03958227
n04258138
n03498962
n03697007
n03126707
n02980441
n03530642
n02086910
n02087394
n02280649
n04285008
n02093256
n01950731
n03733131
n04277352
n02086240
n03544143
n03782006
n01632777
n02086646
n03297495
n09246464
n02123597
n02687172
n04487081
n02236044
n03710193
n02607072
n02788148
n01776313
n04376876
n02102973
n07873807
n03372029
n02104029
n02669723
n01693334
n12985857
n03785016
n02066245
n01698640
n04086273
n03047690
n04026417
n01773797
n03742115
n02018207
n01978455
n02988304
n03595614
n02965783
n02992529
n01773157
n03417042
n03376595
n04435653
n07711569
n03970156
n02877765
n04111531
n09256479
n02641379
n04179913
n02113023
n03977966
n04525038
n02190166
n04070727
n02111277
n02128757
n01784675
n02412080
n03146219
n03485794
n01773157
n02119022
n02704792
n01737021
n03697007
n03450230
n01770081
n03792782
n02089867
n02817516
n03141823
n01773157
n07860988
n02317335
n04442312
n04428191
n04049303
n12620546
n04591157
n03980874
n03314780
n02514041
n03376595
n01774384
n01774384
n04579432
n04336792
n01872401
n02483708
n03127925
n03314780
n03843555
n01770081
n02480855
n04118776
n01910747
n03126707
n02233338
n02114855
n02808304
n02107683
n03590841
n01737021
n01514859
n04346328
n02102480
n02093754
n09472597
n09332890
n03630383
n02492035
n04026417
n02110185
n03125729
n04465501
n07695742
n03775546
n02930766
n07753275
n07684084
n04486054
n01677366
n03127747
n02917067
n04347754
n02704792
n07583066
n07714990
n02111500
n03085013
n02233338
n03977966
n03876231
n07760859
n03623198
n02268853
n07730033
n02097047
n02981792
n01984695
n04584207
n01665541
n01734418
n02100877
n03109150
n02099712
n01855672
n02486410
n02099267
n03804744
n04179913
n02091032
n04200800
n04127249
n01833805
n01855672
n02909870
n04423845
n03345487
n04456115
n04517823
n07714990
n03492542
n01531178
n07892512
n01534433
n03982430
n04116512
n02097130
n04612504
n03146219
n02097130
n04517823
n07684084
n01978455
n02236044
n01798484
n04200800
n01985128
n09468604
n02268853
n02090622
n03000684
n04447861
n04154565
n02840245
n03126707
n02391049
n04532106
n01728572
n03124043
n01773549
n02480855
n07860988
n02105056
n03888605
n02116738
n02804610
n02113799
n03899768
n01729322
n07873807
n02116738
n02795169
n02256656
n07720875
n03584829
n02097209
n02092002
n07614500
n03599486
n02825657
n02966687
n04428191
n02488702
n01774384
n03908618
n03814639
n02444819
n02825657
n02325366
n03394916
n02077923
n03709823
n04579432
n03967562
n01514668
n04548280
n03899768
n02892201
n01704323
n01484850
n03535780
n03775546
n03337140
n01514859
n01580077
n01580077
n04509417
n03977966
n02115641
n07697313
n07753275
n04542943
n02910353
n02087046
n04443257
n03788365
n04429376
n01484850
n02843684
n04479046
n01990800
n09193705
n02115641
n01773549
n09246464
n03956157
n03065424
n02174001
n01824575
n02099267
n02093647
n03133878
n01580077
n01622779
n03271574
n07768694
n04376876
n01877812
n03110669
n01728920
n04141327
n04389033
n02096294
n02492035
n03876231
n07716906
n02097474
n02086240
n02708093
n02105641
n01984695
n03125729
n03944341
n03450230
n02109525
n04389033
n07760859
n01704323
n04540053
n02823428
n02115641
n03733281
n02093754
n01532829
n07802026
n09472597
n02091134
n03041632
n04372370
n01608432
n04265275
n02804414
n03109150
n04328186
n02107312
n03100240
n03250847
n03393912
n02090622
n02840245
n02870880
n04562935
n02397096
n03995372
n02106662
n02096177
n02493509
n02965783
n01981276
n01990800
n01698640
n02088238
n02107908
n09399592
n02790996
n02091134
n04252225
n02447366
n03179701
n02123394
n02974003
n03124170
n03045698
n03271574
n04067472
n01494475
n01984695
n02321529
n03062245
n07892512
n02123045
n02099849
n02672831
n03854065
n02825657
n01644900
n07745940
n04366367
n09288635
n03447447
n03124043
n12267677
n02091244
n02111277
n02088632
n12985857
n04517823
n03594945
n04049303
n03908714
n03697007
n07714571
n01986214
n03014705
n04238763
n02950826
n01755581
n02108089
n02111500
n02028035
n03425413
n02276258
n03690938
n03478589
n04579432
n04209133
n02492035
n04479046
n03131574
n04026417
n01981276
n01514668
n02643566
n03791053
n02870880
n04235860
n06596364
n04019541
n09246464
n03065424
n13054560
n04597913
n02111500
n04252077
n03857828
n02100236
n04442312
n02363005
n04040759
n03127925
n04033995
n03662601
n02966193
n03761084
n03838899
n04081281
n04243546
n04252077
n04487081
n04417672
n03662601
n03476991
n01829413
n07614500
n02701002
n07754684
n04258138
n01744401
n03259280
n02676566
n03017168
n01817953
n04049303
n01692333
n02108551
n03134739
n02410509
n03871628
n04525305
n02093754
n04461696
n04523525
n11939491
n04612504
n03706229
n02167151
n01582220
n03692522
n03595614
n02823428
n03950228
n04399382
n03877845
n04596742
n04005630
n03724870
n03445924
n07614500
n01883070
n03710637
n04120489
n03127925
n03249569
n02879718
n04562935
n03630383
n02106662
n02097474
n02114855
n09332890
n02096051
n03995372
n03016953
n03447447
n10565667
n07579787
n02102040
n02097298
n01514668
n04332243
n03770679
n02102040
n01616318
n01694178
n02817516
n02086240
n03787032
n01582220
n02097130
n03690938
n02825657
n02106662
n02490219
n02514041
n03958227
n03658185
n03187595
n02107908
n07734744
n02093859
n02011460
n04447861
n02640242
n02793495
n02514041
n01534433
n02132136
n02108422
n01768244
n04399382
n01734418
n02037110
n02444819
n03272562
n02906734
n01740131
n03325584
n03598930
n02277742
n03443371
n03447721
n02097130
n04347754
n03903868
n03529860
n06785654
n01985128
n02892767
n02074367
n02445715
n03131574
n02892201
n02114548
n02096294
n03787032
n03776460
n02870880
n04347754
n03930313
n02095889
n02124075
n01641577
n07753592
n02100583
n04591157
n02488291
n03690938
n03791053
n02860847
n04612504
n01677366
n02112350
n03062245
n02909870
n09428293
n01860187
n02999410
n13044778
n04070727
n02105855
n01950731
n04443257
n02110341
n04265275
n04273569
n03000247
n01675722
n03838899
n13040303
n03016953
n03793489
n02119022
n04366367
n03388549
n06874185
n02980441
n03676483
n04065272
n02102040
n04501370
n01740131
n04162706
n04325704
n01443537
n02672831
n02101006
n04417672
n01990800
n02133161
n02264363
n04548280
n03935335
n02906734
n01985128
n02107574
n03125729
n03208938
n02074367
n03133878
n02085782
n02607072
n03388043
n02096585
n07693725
n02786058
n01443537
n01873310
n02791124
n04325704
n03530642
n04147183
n02484975
n02091635
n03100240
n02879718
n02093991
n11879895
n01737021
n13054560
n01945685
n04356056
n02342885
n04192698
n04536866
n04435653
n01829413
n01496331
n03887697
n03770679
n12057211
n12985857
n04266014
n02916936
n04429376
n02229544
n03763968
n03595614
n02837789
n02109047
n02106030
n03180011
n02102973
n02865351
n02074367
n02169497
n02087046
n03141823
n02124075
n02437312
n07892512
n01776313
n02641379
n01644900
n03042490
n03630383
n03785016
n07730033
n03544143
n02007558
n02109047
n02910353
n02107312
n02389026
n01698640
n03633091
n04442312
n07248320
n04525038
n03459775
n03297495
n03676483
n03476991
n02097658
n03888257
n02115913
n01532829
n02085936
n01532829
n02107312
n02403003
n03933933
n02483362
n02105162
n02066245
n01518878
n01685808
n03782006
n07695742
n09835506
n04141076
n02454379
n02107683
n03874293
n02177972
n02106166
n04590129
n03388549
n04399382
n02096585
n02093256
n02319095
n04560804
n02089973
n03223299
n02091244
n02089867
n04335435
n03825788
n02056570
n01669191
n02113978
n03141823
n02640242
n02841315
n04146614
n03400231
n02490219
n03791053
n07880968
n02025239
n03873416
n02437616
n03220513
n02089973
n03045698
n02100735
n04228054
n06785654
n04554684
n03595614
n03933933
n03954731
n02110806
n02056570
n04476259
n03032252
n02445715
n03895866
n02317335
n04479046
n02782093
n02172182
n02417914
n03041632
n04507155
n02672831
n02108000
n07714990
n03532672
n02123597
n03218198
n02091134
n02825657
n02916936
n03874599
n03876231
n03160309
n04118538
n03259280
n03670208
n07745940
n03733805
n01669191
n03404251
n07718747
n07831146
n02403003
n02883205
n02415577
n01784675
n02492035
n03599486
n01877812
n01877812
n03498962
n04355338
n03617480
n03404251
n02277742
n02169497
n02113624
n04067472
n04465501
n04335435
n02444819
n09421951
n04591157
n01622779
n03425413
n02346627
n04162706
n03874293
n02138441
n04005630
n03769881
n03942813
n04285008
n02114855
n02114712
n02708093
n03124170
n01498041
n07613480
n02363005
n03355925
n13054560
n03180011
n04552348
n02423022
n04525038
n02504013
n02107312
n02091467
n02101006
n03721384
n07695742
n02823428
n04589890
n04584207
n04111531
n03160309
n01531178
n02123394
n02777292
n04208210
n01667114
n01667114
n04597913
n03529860
n03450230
n02123045
n12768682
n01924916
n02536864
n04442312
n02747177
n07831146
n02951358
n03857828
n03482405
n03028079
n04040759
n02417914
n01689811
n03188531
n04070727
n07720875
n02168699
n11939491
n01704323
n03223299
n01930112
n02747177
n03903868
n02093428
n01728572
n03459775
n04409515
n03977966
n03220513
n04355933
n03662601
n03916031
n07836838
n07714571
n03891332
n02105251
n03028079
n02117135
n02096585
n04458633
n02883205
n01818515
n01641577
n04070727
n02093428
n03494278
n03255030
n03769881
n07716358
n03877845
n07760859
n03495258
n04370456
n02091134
n03874293
n03026506
n03259280
n02097209
n03873416
n07760859
n02108422
n01872401
n01981276
n04153751
n02110185
n02095570
n01496331
n04285008
n03075370
n02815834
n09256479
n02092339
n02808304
n09428293
n02101006
n02412080
n04285008
n03954731
n04311004
n03476991
n01518878
n02687172
n02342885
n02346627
n02883205
n03457902
n02097658
n02504458
n03930313
n02087394
n02802426
n03272010
n02102318
n02091467
n02099849
n04552348
n02443114
n02276258
n03642806
n02342885
n03916031
n02125311
n02837789
n02130308
n04509417
n03207941
n03877845
n13052670
n02317335
n03444034
n03179701
n04371774
n03924679
n02950826
n02110958
n02113978
n02109961
n02363005
n02090622
n07930864
n03857828
n03763968
n07684084
n02497673
n02102480
n04275548
n04264628
n02058221
n01687978
n02877765
n01748264
n02028035
n02909870
n04332243
n09835506
n04192698
n03877845
n03832673
n04179913
n03623198
n02107908
n04548362
n01641577
n02992211
n04326547
n02783161
n03743016
n01729977
n04146614
n01695060
n03649909
n02087394
n03424325
n01688243
n03223299
n01914609
n02091032
n02095570
n07720875
n02606052
n03584829
n02110185
n03220513
n07745940
n01824575
n02099601
n11939491
n07749582
n03457902
n01784675
n02112018
n03733131
n04328186
n04037443
n03717622
n01694178
n02871525
n02808440
n04560804
n02097474
n02137549
n01981276
n02443114
n02101006
n04550184
n12985857
n02236044
n02488291
n04532106
n03895866
n03617480
n03417042
n03903868
n03584254
n02389026
n04435653
n02492035
n01796340
n03447721
n03447447
n03595614
n04579145
n02777292
n04147183
n02006656
n03843555
n02504458
n03444034
n03673027
n04417672
n10148035
n04179913
n03792972
n04552348
n02281406
n02326432
n02493509
n03314780
n03485407
n01980166
n04442312
n03602883
n01986214
n02108915
n02492660
n03384352
n04367480
n04467665
n02814860
n01728572
n03733281
n03216828
n02494079
n03733805
n02279972
n01692333
n02091635
n04487081
n03866082
n03208938
n07714990
n02906734
n02807133
n02095570
n03594945
n03492542
n02442845
n01833805
n02395406
n06874185
n02490219
n02071294
n02447366
n01537544
n02281787
n02268443
n03775546
n04429376
n03832673
n04398044
n04370456
n02128757
n04162706
n04146614
n04482393
n07860988
n02167151
n02095889
n02487347
n01632777
n02992211
n02097658
n02107683
n03980874
n07753592
n02037110
n03388183
n01695060
n04258138
n02802426
n03425413
n02403003
n03868242
n02006656
n02667093
n02607072
n02093647
n02536864
n04591713
n02669723
n03733805
n03259280
n03709823
n04483307
n03877472
n02113023
n04133789
n06359193
n03903868
n03089624
n02013706
n04266014
n02504013
n02101006
n02124075
n01774750
n02112350
n02526121
n03485407
n03496892
n02655020
n07714571
n02087394
n03160309
n02091831
n03047690
n04612504
n02859443
n04033995
n02950826
n03187595
n01592084
n07892512
n04507155
n01692333
n01981276
n02823750
n04251144
n04548362
n07565083
n04209133
n01877812
n04486054
n09421951
n02231487
n02113799
n02098413
n04081281
n02999410
n02107312
n02346627
n01675722
n02795169
n03649909
n04090263
n03871628
n01877812
n03670208
n03866082
n03496892
n07248320
n04162706
n02098413
n04069434
n03938244
n02101006
n02325366
n03388549
n03393912
n01739381
n02108089
n03000134
n03124170
n02037110
n02098105
n01986214
n03314780
n10148035
n04200800
n03457902
n02091831
n02835271
n03642806
n02101388
n02128757
n04004767
n02091635
n04311004
n04328186
n01829413
n02108000
n03877845
n03935335
n01744401
n01531178
n13044778
n02699494
n01775062
n02088364
n04239074
n03781244
n02442845
n03028079
n09421951
n12768682
n02454379
n03065424
n02113023
n01873310
n03594945
n03792782
n03529860
n02174001
n02487347
n01692333
n02837789
n04487394
n02509815
n03970156
n02445715
n02666196
n02009912
n01797886
n07583066
n02111500
n03461385
n04371774
n04296562
n02978881
n02066245
n02129604
n03761084
n09229709
n01774750
n02108915
n01797886
n04482393
n03792782
n02095314
n01693334
n04560804
n04376876
n07718747
n01532829
n03888605
n02980441
n01494475
n02093754
n07802026
n04562935
n02165456
n02356798
n03977966
n03124170
n02797295
n04201297
n04392985
n04579432
n02106550
n02782093
n04252077
n04326547
n02454379
n02437312
n01729977
n02123045
n04229816
n02077923
n03788195
n02124075
n02051845
n02087394
n02096437
n02403003
n02769748
n04392985
n02134084
n02840245
n04273569
n03125729
n03967562
n03961711
n03961711
n07579787
n04270147
n02965783
n02006656
n03995372
n03444034
n02814860
n04070727
n04208210
n04486054
n03729826
n02120079
n04591713
n02808304
n02105641
n03770439
n04228054
n02094114
n03400231
n02106166
n03868863
n02089078
n03954731
n04355338
n02669723
n04200800
n04266014
n03929855
n02107312
n04023962
n03958227
n01677366
n02791124
n03485407
n02129165
n03075370
n01558993
n02988304
n04355933
n02134418
n01675722
n07920052
n02321529
n02018795
n03992509
n03868863
n03796401
n02892767
n04254120
n03785016
n04591157
n01518878
n06794110
n01930112
n02951585
n07711569
n01496331
n02788148
n03207743
n03794056
n04332243
n04356056
n07873807
n02667093
n03271574
n02794156
n02493793
n03527444
n02951585
n03240683
n02109961
n01795545
n03599486
n04599235
n01644900
n07880968
n04317175
n02840245
n02408429
n07248320
n04285008
n02096585
n02704792
n04560804
n03785016
n02927161
n03697007
n07930864
n07248320
n02028035
n02123597
n02676566
n07583066
n02871525
n02134084
n02091032
n04462240
n02117135
n02009912
n09193705
n09472597
n02834397
n03764736
n01753488
n03895866
n02112018
n02165105
n02837789
n03457902
n04522168
n04023962
n04536866
n04005630
n02110627
n02708093
n04554684
n01514668
n02090379
n07836838
n02108089
n03095699
n04366367
n04039381
n07802026
n03100240
n03255030
n04235860
n02980441
n03218198
n01514668
n03000684
n02088094
n02815834
n03657121
n03891251
n02808440
n02916936
n03661043
n04243546
n04065272
n03666591
n04604644
n04509417
n03937543
n04509417
n02109961
n04251144
n02869837
n02113712
n02492660
n02841315
n07734744
n04456115
n02640242
n03929855
n04266014
n01644900
n02807133
n03814639
n01514859
n01784675
n04023962
n02256656
n01695060
n03532672
n04070727
n03742115
n03482405
n01773797
n03388183
n03792782
n09246464
n03394916
n13052670
n03498962
n02356798
n02966193
n01798484
n03394916
n04476259
n03854065
n03950228
n02708093
n02206856
n03026506
n04004767
n03691459
n01682714
n02095570
n02480855
n03424325
n01531178
n03868863
n02883205
n02795169
n04399382
n02840245
n02808304
n01695060
n02110063
n01601694
n04229816
n02927161
n03187595
n02454379
n04483307
n01986214
n02104029
n04485082
n02808304
n03384352
n02107574
n02927161
n03924679
n01685808
n02364673
n04389033
n07718472
n01558993
n03047690
n03595614
n02071294
n03028079
n01806143
n03814639
n02007558
n04525038
n02128385
n02391049
n04372370
n03769881
n02100877
n09288635
n03950228
n02786058
n03788365
n01667114
n02119789
n02279972
n02033041
n02086910
n01749939
n03337140
n07693725
n02492660
n02442845
n02917067
n03733281
n07920052
n02490219
n02111277
n02123394
n02128757
n02992211
n03424325
n03942813
n04399382
n04417672
n01828970
n03854065
n02325366
n02492035
n03220513
n02087046
n03602883
n01983481
n01498041
n02834397
n03791053
n04604644
n07730033
n01675722
n02105056
n04039381
n02835271
n02787622
n04591157
n02484975
n04044716
n02977058
n03000247
n03602883
n02112018
n04584207
n03733281
n04209133
n02106662
n01740131
n03983396
n04141327
n03476684
n03337140
n04311174
n02510455
n03476991
n04456115
n03141823
n04009552
n03461385
n01797886
n01734418
n02108915
n04251144
n04192698
n04525038
n03995372
n01985128
n07930864
n02514041
n02098413
n03388183
n02095889
n02992529
n07920052
n03249569
n02667093
n03393912
n03743016
n03876231
n02138441
n07875152
n02099601
n01630670
n02099429
n03706229
n03992509
n03141823
n03109150
n02504013
n02992529
n01943899
n03796401
n01675722
n04141327
n07697537
n04141327
n02871525
n04254680
n07836838
n03133878
n02346627
n03649909
n02090622
n03124170
n04458633
n04525305
n03666591
n02699494
n03680355
n01692333
n02480495
n03109150
n02342885
n02776631
n04596742
n03018349
n04525305
n01824575
n01882714
n02115641
n02788148
n04335435
n02085936
n02782093
n03095699
n03127925
n09468604
n07717410
n03417042
n12998815
n02113023
n07742313
n04296562
n07714571
n02107312
n01806143
n04033995
n02025239
n03930313
n02641379
n03804744
n07745940
n02097658
n07930864
n03089624
n02492035
n02791124
n02172182
n02865351
n01739381
n03950228
n02099429
n01644900
n02788148
n01622779
n02027492
n04254120
n03929855
n02814533
n02226429
n07715103
n03840681
n02256656
n01833805
n12267677
n01687978
n04592741
n04592741
n07873807
n02110627
n02277742
n04266014
n01776313
n02794156
n02093428
n04311004
n03920288
n03047690
n03992509
n02112350
n04591157
n03017168
n03459775
n01667778
n01820546
n03485794
n02804610
n03602883
n03666591
n01872401
n04589890
n02730930
n02090379
n03670208
n02892201
n03372029
n03062245
n02486410
n04562935
n01697457
n02099429
n04111531
n01728920
n04153751
n02113624
n01770393
n04266014
n02017213
n03483316
n01742172
n02480855
n01739381
n01768244
n03908714
n02006656
n02089867
n03026506
n01558993
n03980874
n03775546
n01980166
n09399592
n02804610
n04336792
n02027492
n04251144
n02100735
n03788365
n13040303
n02328150
n15075141
n07802026
n01532829
n03594734
n02676566
n04404412
n02346627
n02843684
n02108000
n02871525
n02606052
n03982430
n02165456
n02823750
n01871265
n02730930
n03770679
n04505470
n03404251
n01883070
n02979186
n02093991
n01630670
n04120489
n01443537
n04371774
n03866082
n01833805
n03527444
n03998194
n03873416
n02930766
n03776460
n06596364
n02321529
n04392985
n03796401
n04483307
n02526121
n02396427
n02113023
n03443371
n07747607
n01980166
n02058221
n02167151
n02769748
n03127925
n02190166
n03272562
n02097130
n04560804
n02086240
n04326547
n02095314
n01843383
n02107312
n03954731
n02281406
n02105641
n03075370
n02883205
n01829413
n02099849
n02112137
n07684084
n03095699
n02408429
n10565667
n02641379
n02259212
n02128757
n03344393
n01665541
n04004767
n07734744
n02088364
n02100583
n02672831
n01820546
n03376595
n04070727
n02981792
n03709823
n02206856
n01537544
n01776313
n04579145
n02492035
n02804414
n02113799
n02104365
n03483316
n09256479
n03642806
n07590611
n02094433
n02089973
n02497673
n01968897
n02090721
n02167151
n02974003
n02514041
n03781244
n02408429
n02279972
n04311174
n01990800
n02804610
n03146219
n13040303
n07930864
n04423845
n02437616
n03388043
n04487394
n04201297
n02704792
n01729322
n04371430
n03937543
n03216828
n02486261
n02666196
n04612504
n03180011
n03240683
n03627232
n01877812
n04486054
n02782093
n02814533
n02119022
n03788195
n07720875
n02096051
n03903868
n02105162
n04125021
n03272010
n03794056
n02058221
n03457902
n04584207
n03785016
n04311004
n03837869
n02101556
n03840681
n03425413
n03496892
n02127052
n01980166
n03770439
n04398044
n02105412
n03032252
n03594734
n02096437
n10148035
n01443537
n04125021
n03649909
n02939185
n01737021
n02510455
n02398521
n02490219
n03595614
n04277352
n03649909
n07716906
n02808440
n03124170
n03538406
n03376595
n02860847
n01797886
n04243546
n03673027
n04462240
n03595614
n04579432
n01558993
n04081281
n04136333
n03223299
n03197337
n02094114
n03452741
n04392985
n02666196
n02786058
n09332890
n03759954
n04125021
n03000684
n04597913
n01768244
n02099601
n07716358
n03530642
n01860187
n02012849
n02814860
n02110063
n03160309
n02091032
n15075141
n02127052
n02699494
n04447861
n02109961
n03532672
n04099969
n03594945
n02101556
n04200800
n02100236
n04149813
n07920052
n04149813
n02097209
n03793489
n09428293
n03840681
n02799071
n04332243
n01807496
n04479046
n02101388
n02099849
n02085620
n02655020
n02802426
n04204347
n02094433
n02814533
n04398044
n04090263
n02051845
n04548362
n04259630
n04209133
n04596742
n02114855
n02091635
n01795545
n02231487
n07831146
n02110341
n01728920
n02802426
n01978455
n03388043
n03041632
n03976657
n02443484
n01735189
n04310018
n02009229
n02325366
n03075370
n04149813
n03891251
n02125311
n04074963
n02105855
n04525038
n02002724
n03924679
n03947888
n03544143
n01704323
n02177972
n04509417
n07754684
n03961711
n02364673
n07614500
n04239074
n02825657
n02391049
n03447721
n03042490
n04442312
n02098105
n03388043
n03692522
n04428191
n02100236
n04591157
n03729826
n03775071
n02480855
n03697007
n02088094
n02012849
n02119789
n02085782
n03424325
n01872401
n01631663
n02788148
n01698640
n02672831
n04162706
n04591157
n02128385
n02992529
n03443371
n03792782
n04200800
n04069434
n02490219
n03868242
n04277352
n03770439
n01773157
n04026417
n03492542
n02107908
n04548362
n03379051
n01582220
n02109047
n04579145
n02114548
n04152593
n02769748
n04296562
n02097209
n01983481
n04366367
n03657121
n02879718
n02119789
n03947888
n02342885
n04152593
n04370456
n03032252
n07880968
n04328186
n02107574
n02017213
n01945685
n04550184
n01514859
n04479046
n07695742
n03481172
n07747607
n02437312
n03742115
n01924916
n01608432
n04584207
n02825657
n12144580
n01689811
n04228054
n02113624
n07697313
n04367480
n04026417
n01616318
n02643566
n04228054
n01443537
n04252077
n01734418
n02490219
n02814533
n01796340
n03160309
n04355933
n03666591
n02443114
n03595614
n02948072
n03786901
n04380533
n01824575
n02018207
n02111500
n03188531
n03417042
n13037406
n02869837
n03627232
n07716906
n02130308
n02422106
n03544143
n02108551
n03314780
n01694178
n02437312
n02978881
n04243546
n02823428
n03916031
n01616318
n01496331
n15075141
n02071294
n03095699
n04525305
n02483362
n02109047
n02930766
n03792972
n04507155
n02091032
n01744401
n03929660
n01632458
n02090622
n13037406
n01580077
n03028079
n04366367
n03000247
n02088094
n04376876
n02110341
n03983396
n02791124
n02977058
n03384352
n03042490
n02643566
n04522168
n02804414
n07760859
n02445715
n01728920
n04285008
n01697457
n03961711
n03134739
n01882714
n07716358
n02364673
n02536864
n07880968
n03662601
n02699494
n04133789
n04141076
n04366367
n02892201
n02100877
n01695060
n07747607
n02971356
n02804414
n01665541
n02422699
n03065424
n07693725
n04336792
n07932039
n04311174
n07715103
n02268853
n02096585
n01981276
n04133789
n02814860
n03388183
n01631663
n02447366
n01560419
n02319095
n04370456
n04152593
n02939185
n01534433
n02909870
n01537544
n07565083
n02106030
n01630670
n02837789
n03633091
n01614925
n13052670
n02104029
n02877765
n02106166
n02011460
n03590841
n02130308
n01968897
n02397096
n02966193
n02129165
n03393912
n03133878
n03743016
n03947888
n02133161
n02102480
n02457408
n02111889
n02364673
n02980441
n02138441
n03908714
n04599235
n03220513
n01729977
n02808304
n03223299
n03444034
n03538406
n03384352
n02607072
n07684084
n07697537
n07565083
n02939185
n04483307
n01843065
n03272010
n04370456
n03627232
n03259280
n01698640
n01775062
n02769748
n04428191
n04326547
n02090721
n02051845
n03124170
n02422106
n02134418
n09399592
n03447721
n04090263
n04584207
n03884397
n02356798
n02105641
n03786901
n02835271
n02090379
n03379051
n04389033
n01847000
n02125311
n02089078
n01498041
n01749939
n02102177
n04023962
n03788365
n02127052
n04326547
n01641577
n02484975
n07768694
n03777754
n04487394
n07873807
n02089078
n02112137
n03733281
n04141975
n02105251
n04040759
n13052670
n07684084
n03179701
n03804744
n03127747
n01748264
n02408429
n03126707
n03595614
n04235860
n02117135
n03938244
n02497673
n03425413
n04192698
n03980874
n01774384
n04591157
n02403003
n01729322
n02834397
n03527444
n03763968
n04120489
n02100735
n01955084
n02483362
n02510455
n01817953
n03868242
n02483362
n04418357
n01968897
n03691459
n01882714
n02883205
n01829413
n02870880
n02396427
n01843383
n10148035
n02699494
n01580077
n04238763
n03496892
n07684084
n02950826
n03445777
n01798484
n03877845
n04239074
n01622779
n02099712
n02837789
n07730033
n09835506
n04532106
n03976467
n03854065
n01756291
n07892512
n15075141
n02971356
n02113023
n04023962
n02108551
n02002724
n09288635
n03457902
n03124170
n01484850
n04548362
n03201208
n01734418
n02090622
n03929660
n03868863
n02480855
n02028035
n01692333
n02206856
n03970156
n07768694
n04376876
n02089973
n03976467
n03134739
n03788195
n04399382
n04023962
n03393912
n12620546
n03085013
n02277742
n03272562
n01698640
n04039381
n02877765
n03680355
n01873310
n04039381
n02980441
n04376876
n01729322
n02795169
n01530575
n04515003
n02794156
n02165105
n03594945
n02093991
n02256656
n02105412
n03216828
n02110806
n03297495
n02112137
n03710721
n02110185
n09421951
n02480855
n04336792
n02510455
n02087046
n02110627
n04005630
n02536864
n04277352
n01774750
n02667093
n04554684
n02823750
n03196217
n01496331
n01855032
n02128757
n03764736
n02981792
n03876231
n04458633
n03888257
n01860187
n04326547
n09421951
n07880968
n02500267
n01770081
n03584254
n07711569
n09468604
n01614925
n03788365
n04560804
n01729977
n03717622
n02410509
n02437312
n03000684
n01632777
n02028035
n07873807
n01630670
n03388183
n02110185
n02098413
n02107142
n04209133
n07932039
n03992509
n04612504
n01986214
n04270147
n06874185
n02909870
n02168699
n03785016
n01532829
n04264628
n02484975
n02799071
n04209133
n07584110
n01560419
n02117135
n07684084
n03814906
n03908618
n02279972
n02098413
n02097658
n04154565
n02125311
n02018795
n02168699
n02096177
n03047690
n02747177
n03788365
n02128385
n03000134
n03775546
n04204238
n04604644
n03980874
n03598930
n01855672
n02090721
n07715103
n02443114
n02102177
n04258138
n04591713
n03297495
n01667778
n04350905
n04589890
n06794110
n03884397
n04367480
n03877845
n10148035
n03492542
n04116512
n03785016
n01968897
n02111889
n04579432
n03492542
n02111277
n03535780
n03786901
n02113799
n04347754
n03535780
n02963159
n03249569
n03617480
n04070727
n02108000
n03075370
n03355925
n04418357
n02783161
n02112137
n03179701
n02114367
n02098286
n02119022
n03000684
n01695060
n15075141
n02877765
n02107683
n03721384
n02107142
n02092339
n02687172
n02396427
n01629819
n03272010
n10148035
n04141076
n04044716
n04277352
n02364673
n04141975
n01819313
n03775546
n03379051
n01756291
n03785016
n04476259
n04612504
n01632777
n03838899
n02007558
n01440764
n02088094
n01735189
n02356798
n02095889
n09229709
n02132136
n02091635
n07754684
n03146219
n03467068
n03047690
n02408429
n02086910
n02012849
n04522168
n01943899
n12144580
n01820546
n01824575
n01677366
n03868242
n03814639
n02091635
n04033901
n02074367
n04597913
n07880968
n01871265
n03000684
n01983481
n07753592
n04235860
n02229544
n03814906
n03527444
n04532106
n02447366
n04179913
n04116512
n01631663
n04037443
n03947888
n02708093
n03874293
n04612504
n04589890
n02097130
n03089624
n03670208
n04579145
n03344393
n07614500
n04462240
n01751748
n04201297
n07802026
n02795169
n07613480
n07747607
n02115913
n02493793
n03770679
n02268443
n02009912
n04423845
n01530575
n01685808
n07715103
n03016953
n03355925
n04554684
n04366367
n03207941
n03887697
n04336792
n03759954
n03595614
n02480855
n04525038
n04355338
n02129165
n03255030
n02843684
n04493381
n02992211
n03814906
n04239074
n06794110
n03977966
n02979186
n03207941
n07875152
n01798484
n02484975
n02127052
n02133161
n03929660
n02966687
n12985857
n01873310
n07584110
n02088094
n01748264
n02101006
n03450230
n03657121
n03991062
n02013706
n03742115
n03595614
n04591713
n03891251
n01943899
n03065424
n04127249
n03584829
n02018207
n02089973
n03773504
n01751748
n02119022
n02276258
n04086273
n01877812
n02917067
n02168699
n02107574
n03954731
n02443114
n02101556
n01943899
n03457902
n01644900
n01770081
n03495258
n02606052
n02109047
n01532829
n02099429
n02100735
n03216828
n04204347
n02095889
n03794056
n02104365
n03595614
n01630670
n03223299
n04389033
n01796340
n02098286
n02109525
n04509417
n01580077
n04209239
n01675722
n07718747
n02787622
n04553703
n02100877
n02708093
n01687978
n01944390
n02807133
n03908714
n12620546
n04009552
n04591713
n02112350
n02168699
n03773504
n03127747
n03393912
n03617480
n02704792
n03590841
n03445924
n02486261
n03803284
n03954731
n02971356
n03000247
n03887697
n02894605
n04286575
n02172182
n01873310
n04118538
n04357314
n02113624
n02667093
n03141823
n04423845
n03742115
n02085620
n02727426
n04606251
n02088466
n03109150
n03134739
n02361337
n03832673
n02087394
n02177972
n04347754
n07718747
n03710721
n03970156
n04229816
n01601694
n02606052
n03425413
n03447447
n04336792
n04486054
n04201297
n07614500
n02226429
n01622779
n04435653
n09288635
n02790996
n02108000
n03961711
n03417042
n03017168
n03840681
n02509815
n04019541
n01692333
n01843065
n03461385
n04296562
n02493509
n03133878
n02110627
n07932039
n02091831
n03249569
n02091467
n03680355
n07714990
n02412080
n03250847
n03447721
n02916936
n02107683
n02492035
n03404251
n02102177
n07932039
n04557648
n04372370
n03891251
n02974003
n15075141
n02444819
n04462240
n02100236
n02108551
n04515003
n02002556
n02794156
n04204238
n04090263
n04584207
n02120505
n03773504
n02165456
n07684084
n04311174
n02002556
n02106382
n01695060
n02783161
n02422699
n03982430
n02397096
n03976657
n02692877
n03841143
n03710637
n04259630
n02099601
n03942813
n12998815
n11939491
n04399382
n03065424
n01644373
n04462240
n03992509
n03534580
n02398521
n02095889
n02808440
n04264628
n02786058
n04399382
n03933933
n04487081
n01873310
n04409515
n02108089
n02091831
n07734744
n04552348
n04162706
n02123045
n13040303
n02492035
n03657121
n02488291
n02027492
n02769748
n07753113
n03814639
n01704323
n02276258
n04557648
n03478589
n04435653
n03535780
n04371774
n02823750
n02124075
n07695742
n03337140
n03884397
n01917289
n07720875
n07742313
n04019541
n02130308
n02102040
n02104365
n02963159
n01687978
n07754684
n02328150
n02791124
n04286575
n04606251
n03814639
n09246464
n02009229
n01665541
n04399382
n04429376
n04033995
n04238763
n09256479
n01632458
n04004767
n04111531
n03710637
n02107908
n04008634
n02106382
n02086079
n07871810
n02105505
n02013706
n03733131
n07875152
n03376595
n03594945
n01776313
n03016953
n04243546
n04252225
n03709823
n02939185
n02107574
n02097047
n02109525
n03916031
n02116738
n07579787
n02018795
n03967562
n03075370
n12998815
n01818515
n02190166
n02701002
n01685808
n12267677
n02107683
n07695742
n02085782
n03692522
n02086646
n03623198
n03534580
n02133161
n07584110
n03980874
n03710721
n03838899
n04311174
n03976467
n02966687
n03785016
n02097658
n04442312
n04380533
n03042490
n03982430
n02510455
n02408429
n02093859
n07718472
n02086079
n02834397
n03670208
n01728572
n02444819
n02091467
n04325704
n04332243
n03223299
n01734418
n03496892
n01697457
n03884397
n03483316
n04285008
n01795545
n03220513
n02007558
n01532829
n02236044
n06596364
n04111531
n03032252
n03814639
n04317175
n04033995
n02086079
n07684084
n01829413
n02128757
n03983396
n04487081
n02190166
n04523525
n04328186
n04116512
n03450230
n04228054
n02102177
n03873416
n02488702
n02226429
n02018207
n04044716
n03394916
n01818515
n01910747
n03584829
n03240683
n04133789
n03095699
n04325704
n02606052
n02102318
n02106382
n03424325
n02906734
n01818515
n04548362
n04086273
n07590611
n02033041
n04501370
n02486261
n03793489
n02974003
n09428293
n02088466
n04355933
n02113712
n02777292
n02490219
n02105056
n02071294
n02655020
n03425413
n02808440
n02493509
n03384352
n02108422
n04350905
n07695742
n02077923
n03476991
n03857828
n02494079
n01440764
n02277742
n02509815
n07730033
n01774384
n02951585
n02892201
n02488702
n02782093
n03854065
n04517823
n03467068
n07920052
n03180011
n02111129
n02361337
n03544143
n07717556
n03291819
n02110063
n03825788
n02110185
n02108422
n01744401
n04204347
n01744401
n02086079
n01773549
n03498962
n02979186
n01694178
n04265275
n04371774
n01669191
n01582220
n02128925
n02747177
n02108551
n02105056
n02107312
n01532829
n01698640
n03661043
n02834397
n03956157
n01739381
n02500267
n02317335
n02951358
n02105505
n07718747
n04192698
n04536866
n03710637
n02346627
n03476684
n02086910
n02747177
n02096177
n04548280
n01630670
n01682714
n04275548
n03538406
n02113712
n09421951
n01560419
n04252225
n02423022
n01697457
n02389026
n03595614
n02415577
n04004767
n02672831
n03018349
n03998194
n03089624
n04273569
n02058221
n03544143
n02395406
n03535780
n03450230
n03888605
n13052670
n01910747
n01843065
n03982430
n03447721
n01955084
n01630670
n03803284
n02120079
n03372029
n02504458
n03874599
n02011460
n02108089
n03627232
n02492660
n04399382
n02412080
n03325584
n03706229
n02500267
n02123159
n04238763
n02883205
n13044778
n07836838
n02799071
n01917289
n04273569
n04552348
n01795545
n02011460
n03944341
n02356798
n04264628
n02859443
n02108915
n02108422
n04591713
n02099849
n07693725
n01795545
n04596742
n03868242
n03958227
n02093991
n03134739
n01917289
n02099712
n03314780
n11879895
n10148035
n02018795
n02747177
n04542943
n03141823
n02797295
n01704323
n02777292
n02769748
n04033995
n01860187
n02321529
n01917289
n03785016
n03956157
n03100240
n04041544
n02165105
n03947888
n03891251
n03709823
n02988304
n02106030
n02095570
n02814860
n03649909
n03110669
n02444819
n04044716
n04487394
n02422106
n04069434
n02165456
n02098105
n02106382
n02280649
n02002556
n01980166
n02091032
n09229709
n03642806
n03770679
n02172182
n07892512
n01944390
n04462240
n02114548
n02403003
n03899768
n09472597
n03530642
n02974003
n02777292
n02093428
n01829413
n02097298
n01882714
n01833805
n03481172
n02094114
n03218198
n02640242
n02422699
n03297495
n04592741
n01644373
n02066245
n03028079
n04399382
n03355925
n03187595
n02071294
n01494475
n02119789
n02963159
n03976657
n03759954
n02916936
n02120079
n03109150
n04370456
n02817516
n01734418
n02415577
n03691459
n04023962
n02114712
n03995372
n06359193
n01943899
n01860187
n02859443
n02268443
n02488702
n03110669
n03250847
n02165105
n02102480
n03026506
n04465501
n03733131
n01910747
n04277352
n03065424
n01644900
n02951358
n04399382
n02326432
n03529860
n03764736
n02444819
n02093256
n02091134
n02091635
n11879895
n03657121
n04613696
n03452741
n04596742
n02097474
n02672831
n01968897
n02486410
n02488291
n02356798
n07749582
n04033995
n03000684
n04428191
n02089078
n04005630
n03476991
n02817516
n04371774
n12144580
n12144580
n03950228
n02009912
n03425413
n04141975
n02790996
n01818515
n07583066
n04116512
n03417042
n01739381
n01944390
n03447721
n03891332
n01689811
n04081281
n02892767
n04590129
n01632777
n02086910
n01742172
n04579145
n02814860
n04458633
n04487394
n02088632
n03942813
n04162706
n07613480
n02098413
n04037443
n02457408
n04461696
n02110185
n03887697
n03344393
n04336792
n04209239
n02480495
n02102480
n04040759
n03372029
n03017168
n02087046
n02110185
n04131690
n02133161
n02749479
n02092002
n04612504
n03388183
n03417042
n02168699
n07248320
n02012849
n03791053
n02027492
n07768694
n02115913
n02093428
n01630670
n02226429
n01514859
n07716358
n02860847
n04041544
n02105505
n02107683
n03394916
n03384352
n04536866
n02107312
n04487081
n02447366
n02113186
n03777754
n03496892
n09421951
n02097298
n02112706
n02128757
n02169497
n03933933
n02109961
n04254120
n04562935
n02457408
n02093754
n15075141
n02788148
n01751748
n02837789
n06359193
n01630670
n03908618
n07754684
n02013706
n03680355
n02788148
n06794110
n02102040
n01496331
n03482405
n02107312
n13054560
n03843555
n01644373
n02894605
n01818515
n03899768
n02134084
n01692333
n02948072
n03743016
n07583066
n02279972
n07760859
n03868863
n02422699
n02825657
n02480855
n02226429
n04033901
n01817953
n04285008
n04550184
n04476259
n02100877
n09835506
n02410509
n03207743
n03877845
n03947888
n01774750
n02641379
n04584207
n02481823
n07768694
n02130308
n04147183
n04596742
n02395406
n07754684
n04252225
n04118538
n09256479
n07742313
n02769748
n03888257
n03658185
n04067472
n02481823
n03255030
n03903868
n03124043
n03874599
n06596364
n04355933
n04613696
n04357314
n02814860
n02099601
n01806567
n02396427
n02106166
n03769881
n02113023
n04146614
n02640242
n02966193
n02841315
n02481823
n03724870
n03998194
n04522168
n02747177
n02317335
n04067472
n02129165
n07714571
n03992509
n03379051
n04141975
n02028035
n02085936
n04540053
n02112137
n03977966
n03637318
n03887697
n09468604
n03424325
n04584207
n01917289
n07579787
n03325584
n01829413
n04540053
n03127925
n01558993
n02027492
n03424325
n03109150
n06794110
n01773797
n03188531
n02106382
n03788365
n02123159
n01773797
n02229544
n02727426
n02823428
n02454379
n02106030
n01924916
n12998815
n04179913
n04099969
n07684084
n03450230
n04435653
n02422106
n03637318
n03018349
n04429376
n03868863
n02110806
n02226429
n02006656
n03843555
n06359193
n01860187
n01694178
n02138441
n03630383
n04009552
n02101006
n03496892
n03447721
n07920052
n07873807
n01729977
n03220513
n01614925
n02134084
n03908618
n03763968
n03544143
n02797295
n04392985
n01728920
n03876231
n03259280
n03325584
n04296562
n02909870
n02493793
n02112706
n02776631
n02447366
n01514859
n03954731
n03344393
n04125021
n03930630
n04116512
n02441942
n03344393
n02125311
n02643566
n03840681
n02106662
n03325584
n07695742
n01491361
n03814906
n03075370
n02098286
n02666196
n07718472
n02948072
n01698640
n03777754
n07714571
n01945685
n03085013
n03445777
n04380533
n01986214
n03673027
n03710193
n02441942
n01734418
n02105412
n03447447
n04591157
n02727426
n04486054
n02510455
n03958227
n01978455
n04461696
n03908618
n04522168
n02107908
n07715103
n04009552
n03457902
n03447447
n01820546
n02692877
n03874599
n02101388
n02115641
n03532672
n03127925
n04081281
n02814533
n02916936
n02483708
n02791124
n04505470
n04417672
n03876231
n01829413
n09246464
n01728920
n02363005
n07754684
n07717556
n03000247
n01873310
n02091635
n07831146
n02794156
n03825788
n03476991
n04033901
n02607072
n02123394
n03534580
n01770081
n02011460
n02843684
n02109525
n03916031
n04418357
n03710637
n03075370
n01644900
n04254680
n07768694
n04228054
n04258138
n04357314
n07836838
n03000134
n04310018
n03000134
n02098413
n02108000
n04252077
n02457408
n04483307
n02105505
n03125729
n02091467
n03868242
n02106166
n03240683
n02917067
n02105056
n04525305
n01753488
n02978881
n03977966
n02486261
n04162706
n02120079
n03709823
n03127747
n02089973
n03089624
n03814906
n01534433
n04613696
n03325584
n04505470
n03325584
n02115641
n03630383
n01930112
n04204238
n03063689
n02233338
n03916031
n02786058
n02113799
n03935335
n04179913
n03690938
n02442845
n01819313
n01534433
n01753488
n02823750
n01491361
n03124043
n01749939
n02328150
n03272562
n02094258
n04597913
n01773549
n03724870
n01871265
n01751748
n04039381
n03733805
n02783161
n02948072
n02397096
n02233338
n02093647
n03016953
n04344873
n02640242
n01677366
n02106166
n07745940
n03710637
n03529860
n02988304
n04350905
n02105056
n01630670
n12998815
n02094258
n03481172
n04515003
n04418357
n03075370
n04273569
n01592084
n03290653
n04487394
n02109047
n02259212
n04604644
n03976467
n04023962
n02910353
n03394916
n02106662
n01882714
n03494278
n01770393
n03445924
n02102177
n02110958
n02089973
n01924916
n02113799
n01817953
n02091134
n01697457
n03443371
n04482393
n01749939
n01985128
n04116512
n03452741
n03220513
n02510455
n03761084
n02916936
n02089867
n02281406
n03445777
n03642806
n03255030
n09428293
n01774750
n03220513
n04254777
n13037406
n04235860
n07875152
n01877812
n02086240
n03876231
n02484975
n03595614
n03733805
n02099712
n03884397
n03016953
n02088632
n04086273
n02797295
n04392985
n03124043
n02102480
n02100583
n01855032
n02667093
n01945685
n03250847
n01644373
n04147183
n02641379
n02342885
n03666591
n03000134
n03197337
n02807133
n03394916
n01797886
n02443114
n02056570
n02916936
n04090263
n01756291
n03724870
n02747177
n04553703
n01983481
n04479046
n07920052
n01631663
n01981276
n02097474
n02268443
n01944390
n02108422
n04487081
n07734744
n02091244
n02835271
n01824575
n02056570
n03773504
n01688243
n03345487
n03345487
n02486410
n03271574
n03485407
n02483362
n02113712
n02786058
n04579145
n02948072
n03595614
n03594734
n01491361
n01729977
n04033995
n04597913
n01871265
n02992211
n02361337
n04070727
n02007558
n03110669
n09399592
n02009912
n03249569
n02415577
n02190166
n02701002
n03042490
n01871265
n02091467
n03208938
n02105505
n04589890
n02138441
n04591157
n03344393
n01622779
n01924916
n02137549
n04328186
n07590611
n01776313
n04389033
n02058221
n03786901
n02865351
n02536864
n04154565
n02108422
n07583066
n03770439
n04235860
n03594945
n02096051
n03590841
n04525038
n02264363
n04592741
n02364673
n01735189
n02977058
n02488291
n07871810
n03062245
n04557648
n03837869
n01770081
n04273569
n03290653
n03124043
n02971356
n02423022
n02094114
n01695060
n01917289
n02814533
n03250847
n02110063
n02666196
n02488291
n02504013
n02130308
n01695060
n03089624
n02906734
n02791124
n09835506
n07695742
n06874185
n04229816
n02408429
n02087394
n03297495
n02058221
n03763968
n01491361
n03781244
n03873416
n02111277
n13052670
n02119022
n02108000
n02791124
n03028079
n02906734
n02112350
n02102318
n04118776
n02823428
n04435653
n03786901
n02105505
n01514859
n02860847
n01871265
n07742313
n01695060
n01735189
n03141823
n02692877
n04254680
n02483708
n02011460
n02927161
n02113978
n02106166
n03770679
n02169497
n04482393
n02277742
n04485082
n01984695
n03658185
n01697457
n09428293
n02102480
n04501370
n04141975
n01614925
n02089078
n03935335
n02486410
n01843065
n01984695
n02363005
n04536866
n04141076
n01950731
n03445777
n02102040
n07715103
n09256479
n03781244
n02090379
n02129165
n04532670
n02939185
n04259630
n03788365
n03461385
n04606251
n04428191
n02488702
n01518878
n02107142
n01622779
n02483708
n07753113
n07930864
n01984695
n03476684
n02655020
n03376595
n01806143
n04286575
n02490219
n02640242
n04141975
n03938244
n02100735
n04041544
n02108915
n03769881
n02108551
n02110185
n02086646
n03388043
n07697313
n02098105
n04597913
n04090263
n02492660
n02795169
n02086240
n02097130
n02346627
n01622779
n01978287
n01924916
n02655020
n02787622
n02108551
n03717622
n07697313
n02105505
n07753113
n04204347
n02909870
n01828970
n02018795
n07836838
n01775062
n07716358
n01675722
n02807133
n02493793
n02091467
n02804414
n12144580
n02823428
n09229709
n03379051
n02791270
n01828970
n03832673
n04366367
n03877845
n03372029
n03961711
n03916031
n03788365
n04265275
n01806143
n04008634
n02794156
n03777754
n01630670
n07860988
n04239074
n04270147
n03761084
n04270147
n04487081
n02481823
n02395406
n02093859
n03991062
n04264628
n04258138
n06359193
n02074367
n07614500
n02865351
n07718747
n04074963
n04482393
n03347037
n02110063
n07836838
n02090379
n03595614
n03482405
n13052670
n04023962
n03991062
n04548280
n02056570
n02794156
n13133613
n02100877
n03272010
n02107683
n04149813
n04152593
n02002556
n03954731
n01968897
n03388043
n03764736
n02690373
n02966193
n01518878
n02128385
n03197337
n02092002
n03110669
n03478589
n02457408
n02870880
n02011460
n02093428
n03063689
n03337140
n04356056
n02963159
n04435653
n03871628
n02110627
n02088238
n03160309
n03983396
n02992529
n03843555
n01773549
n02389026
n09468604
n04505470
n02109961
n02794156
n03854065
n04355338
n02094433
n13133613
n03272010
n01667778
n03494278
n12768682
n02481823
n03085013
n03179701
n01667778
n02102040
n02112706
n02951585
n02108089
n02099601
n07860988
n04033995
n03388183
n02127052
n02107142
n03814639
n04004767
n02099712
n01582220
n02102177
n02100735
n03958227
n02481823
n01773549
n03131574
n04540053
n03424325
n03871628
n02116738
n09229709
n02797295
n02704792
n02825657
n02115913
n03888605
n02009229
n03063689
n07734744
n02669723
n02101556
n03045698
n04532106
n03961711
n04372370
n02655020
n02094433
n02088466
n04005630
n12144580
n02892767
n02091244
n03110669
n03759954
n03594945
n03594945
n04462240
n07711569
n03259280
n04482393
n02018207
n03134739
n03832673
n04467665
n04285008
n02169497
n03796401
n02099267
n02909870
n02105412
n04265275
n01728572
n04336792
n02834397
n02804414
n04548362
n03109150
n02895154
n03929660
n01685808
n02111500
n04033995
n01768244
n02002556
n03887697
n04069434
n03594734
n02500267
n07714990
n02137549
n03014705
n02447366
n01537544
n07802026
n03895866
n04330267
n03602883
n02795169
n04153751
n03782006
n02489166
n03447721
n03417042
n04550184
n02500267
n02112706
n03347037
n02088364
n02640242
n03983396
n02817516
n01695060
n13133613
n02095314
n03887697
n02892767
n07697313
n11939491
n04332243
n02667093
n02643566
n02493509
n04251144
n02730930
n04118776
n02097209
n04335435
n03016953
n03691459
n04037443
n02100583
n02104029
n02088466
n09193705
n03495258
n02095314
n03355925
n07613480
n02971356
n04153751
n01945685
n01697457
n04532106
n02895154
n04548362
n04485082
n02002724
n02999410
n03976467
n02951358
n03874293
n02442845
n04229816
n01614925
n02769748
n04461696
n02486410
n03916031
n04562935
n02098413
n02097474
n03584829
n02606052
n02123394
n03871628
n04311004
n02865351
n01601694
n02111129
n04509417
n01882714
n03908714
n02102973
n03983396
n02093859
n03775071
n02667093
n02906734
n07873807
n04277352
n04153751
n01675722
n01601694
n04263257
n01582220
n03000134
n04263257
n04286575
n06359193
n02445715
n03179701
n04275548
n02444819
n02002724
n03124170
n02018795
n02776631
n12144580
n03041632
n02101556
n04435653
n04254120
n04505470
n03297495
n02093256
n03529860
n01734418
n04462240
n02089867
n03259280
n03804744
n02484975
n03372029
n02992529
n01629819
n03814639
n04004767
n02280649
n04275548
n04023962
n03476684
n01843383
n02490219
n03450230
n02088238
n02129165
n07716906
n02006656
n07615774
n04033901
n02101388
n02412080
n02871525
n01689811
n02447366
n02951585
n03325584
n04238763
n01817953
n07753275
n03803284
n03724870
n01694178
n04613696
n03961711
n04553703
n04493381
n04507155
n03388183
n04483307
n02840245
n01739381
n03837869
n03980874
n02093647
n02992529
n03983396
n02110958
n01688243
n02100236
n01873310
n04525038
n03496892
n04350905
n02115913
n01824575
n04443257
n01729322
n03197337
n09421951
n07614500
n03445777
n03680355
n04579145
n03345487
n03062245
n02655020
n02769748
n03930630
n03956157
n04332243
n03690938
n04153751
n04456115
n02883205
n01631663
n02841315
n02480495
n02396427
n04357314
n01695060
n02101556
n03947888
n04367480
n03958227
n01924916
n02111129
n02939185
n01829413
n02108915
n03388183
n02410509
n04273569
n02119789
n04505470
n02094258
n02231487
n02916936
n02441942
n04039381
n02883205
n02098413
n01496331
n03534580
n07714990
n04286575
n03000247
n03691459
n03376595
n01729322
n12144580
n04192698
n03998194
n02979186
n02102973
n02110627
n01728572
n03272010
n03786901
n04033901
n02097047
n03947888
n07873807
n02097047
n07754684
n02276258
n02104365
n01734418
n03976467
n02825657
n01694178
n01682714
n02747177
n03710193
n09288635
n02510455
n02319095
n02088364
n02129604
n04326547
n03871628
n02096177
n09246464
n03127925
n02488702
n06785654
n02066245
n12998815
n01632777
n02091244
n01742172
n03908618
n04536866
n03841143
n01917289
n02276258
n03457902
n04041544
n03259280
n02236044
n02090379
n04127249
n03873416
n02415577
n03590841
n02094258
n03884397
n01978287
n02172182
n01990800
n04476259
n03871628
n03584829
n04118776
n02509815
n02102480
n01729977
n02776631
n03125729
n02948072
n01774384
n01695060
n07734744
n01990800
n02445715
n03017168
n02606052
n04612504
n02119789
n02113978
n03706229
n02115913
n02655020
n02640242
n03478589
n03891251
n02892201
n02676566
n01877812
n02037110
n07745940
n02090721
n04548280
n02971356
n03042490
n02865351
n04310018
n07802026
n01843065
n01944390
n03443371
n01496331
n13044778
n03196217
n02111889
n09288635
n03777568
n03970156
n02027492
n09332890
n04326547
n04458633
n02093428
n03992509
n03908618
n03290653
n04311004
n03764736
n04465501
n03345487
n04099969
n02843684
n02361337
n02066245
n02099601
n03259280
n02105641
n01755581
n03937543
n03249569
n02124075
n03761084
n02834397
n03891251
n07753275
n04389033
n03599486
n04392985
n01582220
n03642806
n01749939
n01944390
n03146219
n09428293
n02112350
n03249569
n02085936
n03240683
n04597913
n03249569
n02256656
n07248320
n04376876
n03089624
n04118538
n02966687
n03891332
n01773157
n02948072
n01685808
n04371430
n02107312
n01749939
n02085936
n02091831
n02098105
n02708093
n02120505
n01601694
n06874185
n02319095
n01616318
n01775062
n13040303
n03796401
n04482393
n03272562
n03478589
n02190166
n02910353
n02951358
n01749939
n12985857
n04254120
n03944341
n03743016
n01855672
n04228054
n03642806
n03956157
n04162706
n02992211
n01883070
n03045698
n02018207
n01872401
n04239074
n07932039
n04392985
n02641379
n01484850
n01742172
n04376876
n04550184
n03733805
n04371774
n04317175
n03873416
n02361337
n02002556
n02168699
n02098413
n02104365
n03841143
n02074367
n04344873
n07615774
n04149813
n02321529
n12144580
n02509815
n03938244
n01978455
n03047690
n04252077
n02487347
n03141823
n02666196
n02123045
n02486410
n02492660
n03796401
n02112350
n07730033
n03950228
n04162706
n02895154
n02105641
n03404251
n02007558
n01739381
n02481823
n04409515
n02443114
n02879718
n03345487
n02268853
n12620546
n03930313
n04380533
n01518878
n04596742
n03680355
n02074367
n01667778
n03376595
n04366367
n02097047
n02101006
n01873310
n03876231
n04507155
n02086910
n04370456
n02687172
n03724870
n02966193
n02776631
n03089624
n04456115
n03325584
n01770081
n04428191
n01667778
n02132136
n02105162
n03743016
n04367480
n02098105
n03000134
n02100236
n02011460
n02097047
n02177972
n04493381
n03874293
n02017213
n03908714
n02361337
n02669723
n02119022
n02105505
n03884397
n02190166
n03216828
n02410509
n02101556
n02098286
n03250847
n02117135
n03929660
n04332243
n03891332
n02018207
n01498041
n03977966
n02892767
n03781244
n02094433
n02112137
n02910353
n03791053
n01773157
n03599486
n11939491
n01496331
n02950826
n09246464
n02099429
n02108551
n02895154
n09229709
n07932039
n03721384
n03529860
n02113186
n03929660
n02086646
n02787622
n02676566
n02006656
n02104365
n03045698
n03100240
n03599486
n03924679
n03937543
n02869837
n02123394
n01980166
n04355933
n03133878
n03709823
n06794110
n02110341
n01796340
n02978881
n03495258
n03452741
n02091032
n04442312
n04118776
n01630670
n03662601
n02174001
n04606251
n02107142
n03814906
n03457902
n02085782
n03598930
n02094258
n03000247
n02966193
n02489166
n04367480
n02110063
n07753275
n07715103
n04485082
n03075370
n02098105
n13054560
n02730930
n03670208
n02281787
n04462240
n02510455
n02814860
n04482393
n03498962
n09229709
n02097130
n04265275
n04004767
n02093647
n01443537
n01704323
n02096437
n03394916
n04423845
n02108422
n03706229
n02869837
n01737021
n03930313
n04039381
n02113186
n02403003
n02037110
n03637318
n02823750
n01677366
n02093256
n02096294
n06596364
n03220513
n02106030
n02917067
n02090622
n04141076
n01749939
n02981792
n02111889
n02116738
n09246464
n02791124
n02091244
n02119022
n02445715
n03216828
n03095699
n03481172
n04442312
n02802426
n09428293
n03065424
n02363005
n12057211
n02422106
n02999410
n03207743
n03786901
n02363005
n02417914
n01698640
n03063599
n04409515
n03891251
n03794056
n02101388
n04044716
n02226429
n01818515
n01558993
n02110806
n03337140
n03627232
n04204238
n07873807
n03930630
n04311174
n01616318
n04330267
n04179913
n04501370
n02687172
n02086079
n03976467
n03950228
n01773797
n03197337
n02640242
n01440764
n02342885
n02389026
n02895154
n02056570
n04584207
n03042490
n09421951
n01616318
n03384352
n07248320
n03590841
n03903868
n02129165
n02123159
n03837869
n03630383
n02119789
n07768694
n02102973
n03788195
n01682714
n02130308
n03495258
n03770439
n02398521
n02965783
n02033041
n02088094
n02939185
n01914609
n04147183
n03720891
n02105641
n01843383
n01818515
n02730930
n02109961
n04398044
n04131690
n01914609
n03481172
n04317175
n03344393
n04557648
n02120505
n02109961
n02128385
n02391049
n03041632
n09246464
n03666591
n02111129
n02974003
n02643566
n03492542
n02090622
n02389026
n01735189
n03478589
n03785016
n03854065
n03207743
n04399382
n02108422
n04428191
n07760859
n03888605
n02704792
n03697007
n03657121
n04141975
n04008634
n02799071
n02018795
n02877765
n07613480
n11939491
n02108089
n02098413
n01440764
n01776313
n03804744
n01817953
n02788148
n03400231
n03899768
n02027492
n02028035
n02087394
n04392985
n01944390
n04204238
n03995372
n02437616
n03000684
n03146219
n01496331
n02128925
n02025239
n03903868
n06596364
n01990800
n03877845
n02704792
n01773549
n03271574
n02667093
n01514668
n02089867
n02410509
n09193705
n04204238
n02110806
n02823428
n01807496
n07753592
n02835271
n04579432
n03763968
n01667114
n01770393
n02364673
n03777568
n04204238
n04252077
n01496331
n02877765
n01532829
n02640242
n04483307
n04332243
n03197337
n02094433
n03995372
n03485407
n02085782
n04591157
n07930864
n02086079
n01983481
n04162706
n02981792
n02447366
n03733805
n02097298
n04120489
n04442312
n07714990
n02823428
n02788148
n02791270
n11879895
n03776460
n02834397
n03657121
n02423022
n03785016
n03888257
n02018207
n01742172
n04154565
n02536864
n03447721
n02229544
n04540053
n04266014
n03457902
n03425413
n02504013
n02107312
n02177972
n02489166
n04330267
n03791053
n04311004
n02422699
n02319095
n04606251
n04229816
n02101556
n04592741
n03666591
n02088094
n02017213
n03759954
n02128925
n03544143
n03188531
n03459775
n04254680
n03496892
n02483362
n02906734
n07753275
n02879718
n02641379
n02814860
n03400231
n02966687
n09246464
n02114712
n02087046
n02115913
n03424325
n03529860
n01943899
n04238763
n03146219
n02747177
n02233338
n13044778
n03109150
n02112350
n03180011
n02091831
n03134739
n03133878
n01740131
n02125311
n02398521
n02219486
n04086273
n02091244
n02099849
n02119789
n04039381
n02094114
n04562935
n03938244
n07693725
n12998815
n04542943
n02389026
n03417042
n01440764
n02095889
n02090379
n02493509
n02672831
n01534433
n02794156
n02396427
n02117135
n03782006
n04336792
n03042490
n03075370
n02488291
n04332243
n02708093
n02097209
n02356798
n03837869
n04355338
n03584829
n03041632
n06359193
n03041632
n03888257
n03717622
n04235860
n04275548
n01592084
n03388549
n01669191
n07760859
n02090622
n01440764
n01729322
n02480495
n07871810
n04505470
n04418357
n03404251
n03676483
n02165105
n04008634
n03958227
n02480855
n02823750
n07579787
n02009912
n07734744
n03372029
n01440764
n02102177
n03840681
n07753275
n03026506
n01601694
n03047690
n02086079
n02979186
n02089078
n02397096
n12985857
n02808304
n04118538
n04229816
n09428293
n07880968
n04548280
n03804744
n01622779
n02110063
n02814860
n02128385
n01824575
n01496331
n04286575
n03599486
n03857828
n03866082
n03495258
n02526121
n02098105
n02102973
n03124043
n04357314
n07768694
n03000134
n03970156
n04040759
n02112706
n04008634
n04040759
n06794110
n02086646
n02066245
n03884397
n03967562
n04125021
n02910353
n02236044
n01981276
n07871810
n02099849
n03146219
n04146614
n09193705
n02113023
n02100236
n13044778
n03584829
n03180011
n02027492
n03240683
n02526121
n01494475
n02492660
n01774750
n07768694
n02113712
n03666591
n12998815
n03657121
n02110806
n03717622
n02087394
n02692877
n02497673
n04507155
n02114855
n04332243
n02100877
n04332243
n02110627
n03424325
n02104365
n01943899
n03535780
n02883205
n01667778
n01986214
n02666196
n02966687
n02097658
n03866082
n04239074
n02488702
n01735189
n04090263
n04008634
n03742115
n03877472
n03788195
n03794056
n01768244
n02797295
n02009229
n03085013
n02119789
n04557648
n02099267
n03424325
n03666591
n01667778
n07875152
n01514668
n02492660
n03482405
n04033901
n04044716
n03290653
n12057211
n02981792
n01496331
n02483362
n03314780
n04099969
n02669723
n02113799
n02074367
n02094258
n03866082
n04540053
n02777292
n03782006
n02105251
n03761084
n01955084
n02643566
n02106662
n01580077
n01828970
n02690373
n03063599
n02114548
n03014705
n03724870
n02088364
n07716358
n03724870
n03937543
n02091635
n02106382
n07613480
n13133613
n04591157
n02396427
n03776460
n02108089
n02017213
n04350905
n02107683
n04228054
n01773549
n03888257
n02488291
n04493381
n01817953
n01641577
n02012849
n01797886
n02787622
n02910353
n04067472
n03100240
n02087046
n03733131
n02643566
n02916936
n02480495
n02815834
n02086079
n02814860
n02114712
n07742313
n01728920
n02356798
n13044778
n01798484
n04613696
n02108915
n02109047
n03272010
n04008634
n02097209
n01843065
n02999410
n04086273
n03888257
n02123394
n04356056
n09468604
n01601694
n03950228
n04344873
n02672831
n12768682
n02110341
n10148035
n02114367
n04409515
n03240683
n04285008
n07831146
n03584254
n01855672
n02489166
n03216828
n03297495
n04086273
n01514859
n01629819
n02643566
n02113023
n02791270
n03983396
n07880968
n02268853
n03970156
n02091831
n02268853
n02167151
n03742115
n03947888
n04591157
n03729826
n02988304
n03717622
n02391049
n02096585
n02219486
n02093647
n02002556
n02504458
n01665541
n03938244
n03776460
n02093256
n02056570
n02096051
n02488702
n07693725
n01796340
n02950826
n01828970
n03534580
n03394916
n04404412
n03895866
n01944390
n04554684
n02444819
n03623198
n04263257
n04099969
n02105855
n03584829
n04442312
n01514668
n02088364
n01943899
n02091831
n02071294
n03461385
n04485082
n01630670
n01873310
n02011460
n02113978
n01629819
n07711569
n04023962
n01631663
n02815834
n01797886
n03662601
n02704792
n02494079
n02124075
n03530642
n03424325
n02974003
n01685808
n02086910
n04004767
n03720891
n04200800
n01755581
n04118776
n02058221
n03124170
n03584829
n01978455
n02100583
n03131574
n03467068
n02490219
n02978881
n02096051
n04254120
n03028079
n04371774
n02105641
n02397096
n04258138
n03297495
n02108000
n02096585
n02090721
n02786058
n02025239
n01784675
n03393912
n01755581
n02437616
n02219486
n03388549
n02769748
n03384352
n03998194
n02699494
n04277352
n03637318
n02415577
n03788365
n01943899
n02009229
n04325704
n04532670
n01498041
n03793489
n04141076
n04525038
n04548362
n02012849
n02093754
n03534580
n04532670
n02859443
n02027492
n04070727
n03673027
n11879895
n02643566
n04606251
n04613696
n03680355
n01860187
n04251144
n01739381
n02098413
n04019541
n02101556
n03201208
n04532106
n02879718
n02951585
n04604644
n04275548
n02097474
n03482405
n07734744
n03868242
n04332243
n04589890
n03788365
n03649909
n02090721
n02672831
n02109525
n02112018
n07615774
n02102480
n03125729
n01632458
n04252225
n01824575
n02666196
n03832673
n02105641
n07768694
n03871628
n03127925
n03344393
n02096177
n03887697
n03424325
n03014705
n03796401
n03617480
n04065272
n03982430
n04479046
n03763968
n02486410
n07742313
n02687172
n03794056
n04254680
n03661043
n02837789
n02454379
n01560419
n04443257
n07613480
n02110806
n01818515
n02099712
n03384352
n04366367
n03676483
n02892767
n02110627
n02096294
n01667778
n02870880
n03425413
n01751748
n04275548
n03187595
n02437312
n03623198
n01796340
n09472597
n04523525
n02486261
n01531178
n02493509
n02979186
n03584829
n03924679
n02099601
n03259280
n04229816
n01872401
n04579432
n01855672
n01622779
n02509815
n04525305
n04131690
n02484975
n09193705
n02097658
n02877765
n02749479
n06596364
n01806567
n02093428
n01773157
n03207941
n03947888
n01818515
n02092339
n02276258
n03207743
n02794156
n02106166
n03529860
n04493381
n02086079
n02011460
n03961711
n03680355
n04263257
n01819313
n02102177
n04254120
n03888257
n03729826
n04136333
n04346328
n02107908
n02447366
n03125729
n03476684
n02443114
n03788195
n03710637
n03657121
n03633091
n03141823
n07802026
n02113978
n01665541
n01744401
n02834397
n03633091
n04335435
n02011460
n02099712
n03527444
n03180011
n02408429
n02123394
n03980874
n04070727
n03445777
n04465501
n03530642
n03291819
n04252077
n01689811
n02058221
n02112137
n01950731
n01682714
n02231487
n07684084
n03481172
n02963159
n07768694
n03977966
n02165456
n02939185
n04258138
n02123045
n02128757
n02037110
n02128925
n02483362
n03483316
n04273569
n04208210
n03942813
n03291819
n03467068
n02091467
n02113624
n03950228
n03786901
n04228054
n03649909
n01629819
n02104365
n02865351
n02097047
n03902125
n02231487
n04033995
n02172182
n01632777
n02494079
n02391049
n02093256
n03992509
n03710721
n03272010
n03124043
n02422699
n02492035
n02410509
n04120489
n02793495
n03594734
n03841143
n03124043
n04265275
n02088466
n02123159
n03461385
n01675722
n02965783
n07753113
n07614500
n04154565
n03590841
n02361337
n07720875
n01843383
n04162706
n02134418
n03271574
n01494475
n01729977
n01689811
n01582220
n02655020
n03594945
n02099712
n02110627
n02441942
n02791124
n02007558
n03891332
n02791270
n02037110
n02127052
n01910747
n01829413
n04523525
n02417914
n04465501
n01860187
n03935335
n03908714
n02018207
n02006656
n07802026
n03950228
n07590611
n02092002
n04423845
n02790996
n04252225
n03666591
n02109961
n03930630
n02860847
n04552348
n02092339
n09229709
n02791270
n07579787
n03196217
n02500267
n02790996
n01622779
n02484975
n02669723
n02280649
n11879895
n03769881
n02167151
n02403003
n03717622
n02093991
n03942813
n04254680
n04443257
n01860187
n09229709
n02028035
n02087394
n01986214
n02115641
n02640242
n04328186
n03908618
n04154565
n02797295
n02097209
n02125311
n07932039
n02102973
n03529860
n01980166
n02443114
n03733131
n07718472
n03255030
n02009912
n02087394
n03218198
n02106550
n03888605
n01704323
n02091635
n03710721
n02325366
n02112350
n03207743
n03980874
n03042490
n07590611
n02096051
n02408429
n02091244
n03773504
n01491361
n02120505
n02607072
n02487347
n02504458
n04204347
n02037110
n02790996
n02107312
n04044716
n02002556
n02727426
n04606251
n02091831
n03598930
n03089624
n01807496
n07613480
n04404412
n04542943
n09229709
n03467068
n01943899
n11939491
n02086646
n02095314
n02328150
n02992529
n02281787
n04008634
n07697313
n03347037
n02012849
n02099429
n04179913
n02106662
n03841143
n07768694
n07880968
n02111129
n04456115
n04330267
n01629819
n04146614
n03710193
n03250847
n02808304
n03018349
n01943899
n02398521
n03388549
n02097658
n03529860
n02782093
n01592084
n04311174
n02823750
n04067472
n02422699
n03832673
n04367480
n04557648
n02051845
n01882714
n02012849
n03796401
n01735189
n09256479
n03529860
n11939491
n03673027
n01669191
n03742115
n02692877
n02328150
n07715103
n02268443
n02268853
n01770393
n07718747
n07714571
n01695060
n01843065
n03404251
n02823750
n04264628
n03478589
n02643566
n01514859
n02086646
n01692333
n03841143
n03977966
n04136333
n02089973
n02097298
n04311174
n01677366
n01930112
n02128925
n03710721
n02909870
n02027492
n04252077
n03544143
n09332890
n04118776
n04553703
n02488702
n02109525
n04443257
n01728572
n03384352
n04136333
n07718472
n03773504
n04273569
n02730930
n02259212
n03125729
n01748264
n03095699
n02504458
n04579432
n02231487
n04442312
n03447447
n02939185
n02110341
n04458633
n03492542
n02841315
n04285008
n02787622
n01514668
n03877472
n04486054
n04238763
n02480495
n07871810
n01968897
n03954731
n03584829
n03379051
n02123394
n03259280
n07920052
n02113712
n02092002
n02727426
n04149813
n01775062
n03457902
n03791053
n02106550
n09288635
n01742172
n02219486
n04332243
n02490219
n04033901
n03590841
n04344873
n07753592
n02085936
n03447721
n01580077
n02120505
n02504458
n03633091
n02113023
n02109525
n11879895
n03445924
n01882714
n02089867
n04604644
n03697007
n02814533
n02094114
n01631663
n02105251
n02948072
n04200800
n01820546
n03125729
n03290653
n02102480
n04525038
n03347037
n03950228
n02319095
n03160309
n03787032
n02107574
n04487394
n04548280
n07697537
n01580077
n03599486
n04599235
n01735189
n04612504
n02786058
n03000247
n02906734
n13054560
n02132136
n02939185
n02101006
n04141975
n04127249
n07565083
n01641577
n02017213
n02095889
n02096585
n03461385
n02231487
n04493381
n02092339
n04332243
n02497673
n02119022
n02099601
n04311004
n03920288
n02704792
n02091032
n03240683
n03538406
n04560804
n01440764
n02776631
n02013706
n02099849
n01532829
n02110341
n01944390
n03218198
n02099712
n04429376
n03249569
n02422106
n04254777
n04009552
n03617480
n03337140
n01692333
n02493509
n12144580
n03095699
n03781244
n03782006
n02099429
n09428293
n04179913
n02105251
n07716358
n04357314
n03895866
n02948072
n03888257
n03447447
n07248320
n01537544
n02487347
n03982430
n02910353
n07892512
n09468604
n03857828
n03290653
n03388043
n03843555
n04423845
n04404412
n04347754
n01537544
n02992529
n02101388
n02056570
n02093859
n02105412
n03933933
n02704792
n03063599
n12267677
n04482393
n01443537
n03670208
n04590129
n07565083
n04111531
n03188531
n02114712
n04409515
n03272010
n02107312
n02112018
n03676483
n03770439
n13133613
n04259630
n02105641
n04049303
n02807133
n03249569
n02099267
n04065272
n07716906
n02087394
n01669191
n04376876
n01847000
n02123597
n04131690
n02033041
n04357314
n01530575
n02841315
n01698640
n04179913
n01824575
n02092002
n02058221
n03617480
n04146614
n02097130
n09399592
n02892201
n02116738
n04204347
n04522168
n04136333
n01531178
n02346627
n02168699
n01980166
n07711569
n03347037
n04208210
n02823750
n02124075
n02509815
n03404251
n02088364
n01798484
n02009912
n03814639
n02172182
n03840681
n02002556
n03888257
n03065424
n03325584
n02317335
n02281406
n03658185
n02095570
n03920288
n03710637
n02123597
n03877472
n04357314
n07802026
n04067472
n02437616
n03482405
n01532829
n04553703
n03065424
n02058221
n07718472
n04252225
n02096585
n02097658
n04525305
n12057211
n04259630
n02490219
n04285008
n01534433
n01622779
n04067472
n04557648
n03888257
n02096051
n01632458
n02808304
n12985857
n01756291
n02111500
n02963159
n02790996
n03630383
n07714990
n04589890
n02128757
n02786058
n02951358
n03763968
n02356798
n01818515
n02607072
n07717410
n03877472
n04069434
n02483362
n04479046
n02268853
n10148035
n02815834
n02116738
n04501370
n03131574
n02099712
n02108915
n04209239
n03770439
n02226429
n12144580
n02906734
n02783161
n02667093
n04239074
n02110063
n01582220
n07768694
n01774750
n03787032
n12057211
n03764736
n01795545
n03623198
n01443537
n02892201
n03868242
n03384352
n02403003
n03658185
n03485794
n02085782
n04328186
n03388183
n04344873
n07716358
n02097047
n01737021
n01695060
n02098286
n04258138
n03127747
n07565083
n01667114
n03929660
n03476684
n03785016
n04041544
n02100236
n03854065
n03529860
n02097209
n02100236
n04540053
n02002556
n03495258
n02834397
n04346328
n03485407
n02835271
n01729977
n02802426
n03781244
n02793495
n02892767
n02086240
n02490219
n02119022
n06359193
n03207743
n01980166
n04467665
n04332243
n03598930
n04523525
n03877472
n03976657
n02256656
n02097130
n02606052
n04037443
n02793495
n03929855
n04118776
n02727426
n01833805
n02536864
n03710721
n03459775
n04311004
n02113712
n02480495
n03041632
n02966193
n03476684
n07716358
n04310018
n07579787
n02493793
n02094433
n07734744
n01744401
n03770679
n04523525
n02364673
n03355925
n07715103
n02403003
n01644900
n01518878
n02815834
n04251144
n02690373
n02124075
n04553703
n04081281
n02408429
n01704323
n02640242
n03478589
n04447861
n07875152
n04209133
n07734744
n04487081
n02177972
n02892767
n02113624
n03016953
n07753275
n02319095
n07745940
n02108000
n02028035
n02504458
n02106550
n07754684
n03063599
n03787032
n02098105
n03467068
n02089867
n02093428
n07718747
n07831146
n03496892
n03961711
n01924916
n01883070
n01704323
n03733281
n03791053
n02930766
n03478589
n01980166
n01985128
n09472597
n03967562
n02087394
n01914609
n02497673
n03924679
n03706229
n02108089
n15075141
n03977966
n07715103
n03187595
n02236044
n04599235
n03529860
n04023962
n02092339
n02977058
n07584110
n07730033
n03272010
n03676483
n02493509
n09468604
n02091467
n03534580
n03125729
n04467665
n01665541
n04330267
n02917067
n03196217
n02009229
n03042490
n01632458
n03100240
n02965783
n02172182
n03920288
n03109150
n07747607
n02093859
n02655020
n03658185
n03584254
n02110806
n04596742
n02113799
n01530575
n03345487
n02917067
n03788195
n02105162
n15075141
n04317175
n04251144
n02112018
n04326547
n03838899
n01955084
n02417914
n02099849
n02317335
n03095699
n02699494
n04554684
n03729826
n04005630
n02108422
n03127925
n02123045
n03832673
n02504013
n01806567
n04069434
n04023962
n04111531
n02097209
n02105056
n02097209
n03376595
n02095314
n01756291
n03773504
n01980166
n06794110
n04074963
n02747177
n02108551
n03255030
n03891251
n03935335
n03673027
n02111277
n03188531
n02100236
n02992529
n02607072
n02095889
n02002556
n02834397
n02134084
n07716906
n02804414
n02134084
n04008634
n02509815
n04254120
n04147183
n04204238
n03908714
n04162706
n03197337
n11879895
n03787032
n04111531
n02978881
n02102177
n03379051
n04371774
n01704323
n03710721
n01518878
n03016953
n02106382
n04540053
n01558993
n02105412
n02981792
n03028079
n03782006
n02086079
n04192698
n02233338
n03649909
n03496892
n02276258
n03832673
n04070727
n03899768
n03017168
n03485794
n04591157
n02493509
n02093754
n02107683
n04208210
n02992529
n03124043
n03876231
n03691459
n01667778
n07730033
n04252225
n04208210
n02860847
n01742172
n02094114
n03000134
n07860988
n01775062
n03958227
n03045698
n03759954
n02086240
n03676483
n04532670
n02100583
n02793495
n01855032
n04275548
n04409515
n03733131
n03710193
n07760859
n03854065
n01629819
n02840245
n03691459
n03452741
n03297495
n03877472
n02125311
n04037443
n02526121
n01698640
n04591713
n02860847
n02412080
n01728572
n04152593
n02879718
n02699494
n02115913
n03000134
n02326432
n02966193
n04326547
n04049303
n04501370
n07590611
n02088466
n01665541
n03141823
n02037110
n02110958
n03481172
n07860988
n02509815
n02869837
n03930313
n03492542
n02480855
n02486261
n03495258
n03478589
n03063599
n04525038
n02109525
n02787622
n01592084
n02437616
n13040303
n04118776
n02104365
n02927161
n03532672
n03814639
n01910747
n01737021
n03877845
n07579787
n09288635
n01981276
n03133878
n02667093
n02747177
n02500267
n04370456
n01601694
n03769881
n04372370
n02114712
n02326432
n03134739
n03041632
n01685808
n02233338
n01614925
n03982430
n03929855
n04069434
n04367480
n03961711
n03201208
n02092002
n04370456
n04376876
n02395406
n03717622
n04317175
n02088094
n02950826
n01697457
n04591157
n01784675
n03930630
n04251144
n02802426
n07697537
n01689811
n12998815
n04550184
n04486054
n01667778
n03916031
n01795545
n02790996
n01910747
n02085936
n03938244
n03976467
n02325366
n03527444
n02268443
n03290653
n03444034
n02105056
n02096437
n03457902
n03843555
n02500267
n02088094
n02769748
n04525038
n02606052
n04487081
n02486261
n03492542
n03733131
n02120505
n07745940
n02112137
n07579787
n02105505
n03452741
n10148035
n04125021
n04026417
n02089867
n03995372
n02177972
n03903868
n04409515
n01943899
n02100236
n03124170
n03197337
n02361337
n04325704
n03920288
n03825788
n02101388
n11879895
n03443371
n02071294
n07880968
n03769881
n03902125
n02110806
n03637318
n04019541
n03840681
n02342885
n03476684
n02094114
n04023962
n03706229
n02730930
n02877765
n04548362
n02088632
n04285008
n07873807
n03903868
n04501370
n04118538
n02025239
n03530642
n02018207
n03476684
n03602883
n02948072
n02102040
n02123394
n01944390
n02268853
n04590129
n01530575
n02117135
n03691459
n02504013
n03179701
n04357314
n04399382
n03218198
n02865351
n03598930
n02113978
n03697007
n01843383
n02074367
n02264363
n01742172
n02123045
n02795169
n03721384
n02129165
n03544143
n04522168
n12985857
n02814860
n02110958
n02100735
n13044778
n02817516
n07730033
n04429376
n04033995
n04367480
n03729826
n02493793
n04141975
n01740131
n01914609
n02134418
n01739381
n02687172
n02483362
n13037406
n01742172
n02396427
n02397096
n01689811
n09399592
n04347754
n02865351
n04344873
n02111889
n02939185
n04033995
n02037110
n01773157
n03599486
n02093647
n01532829
n02097209
n02492660
n04009552
n04033901
n02099429
n02056570
n02098413
n02992211
n03788195
n03207743
n03444034
n03814639
n04485082
n01981276
n01978455
n03461385
n01688243
n02277742
n03388043
n02871525
n02101556
n03131574
n02236044
n07248320
n03041632
n02095314
n04344873
n02119022
n02172182
n13054560
n01978287
n03532672
n04536866
n02105412
n04118538
n02443484
n01695060
n02909870
n02441942
n02017213
n02799071
n04147183
n04589890
n02056570
n02486261
n03345487
n04328186
n02328150
n04476259
n04346328
n04273569
n03290653
n03627232
n02791124
n02012849
n02259212
n02090379
n03627232
n03764736
n02817516
n04326547
n03065424
n02909870
n01675722
n04522168
n13133613
n02655020
n04209133
n02783161
n03796401
n03250847
n01872401
n01682714
n01873310
n01631663
n04005630
n02843684
n02769748
n02804610
n03782006
n01978455
n02097298
n02787622
n07716906
n02111129
n02123045
n02279972
n02497673
n02980441
n02111129
n03297495
n04487081
n04370456
n01667778
n03710193
n02096294
n03876231
n03938244
n02950826
n04311174
n04081281
n01687978
n04371774
n06794110
n02281406
n04326547
n02395406
n02096051
n02113186
n04070727
n02206856
n02690373
n01729977
n03000684
n01514859
n03197337
n03445924
n04604644
n02280649
n02090379
n02012849
n01534433
n07734744
n03838899
n02177972
n04423845
n03899768
n02098105
n03633091
n02701002
n04371430
n02114367
n03947888
n01820546
n02088238
n03929855
n04612504
n02963159
n02966193
n02037110
n03982430
n02107574
n02966193
n04355933
n03372029
n02113978
n04398044
n02087046
n02106166
n04465501
n03179701
n10565667
n03492542
n01735189
n02120079
n02105251
n01873310
n02110063
n03388183
n02444819
n02687172
n01871265
n02445715
n04590129
n12985857
n01819313
n03938244
n02443114
n04380533
n04277352
n02444819
n02536864
n02111277
n02948072
n03938244
n07753113
n01440764
n09193705
n02509815
n01770393
n01828970
n03794056
n03902125
n02097474
n07714571
n02107908
n01698640
n04590129
n02481823
n04418357
n02504013
n02815834
n01530575
n03131574
n02104365
n04204238
n02454379
n04147183
n02077923
n02488291
n02342885
n02097474
n07716358
n03337140
n04417672
n01694178
n04311004
n06785654
n07768694
n04149813
n01560419
n03970156
n04125021
n09428293
n04258138
n03720891
n04086273
n02804610
n03642806
n03133878
n02974003
n01629819
n03983396
n04154565
n02483362
n04019541
n03065424
n04040759
n06596364
n04131690
n01770393
n04550184
n02120079
n03255030
n02326432
n03344393
n12985857
n01675722
n01729322
n02112137
n04398044
n02013706
n04162706
n04069434
n03630383
n02840245
n01644900
n03680355
n04229816
n09193705
n02788148
n04462240
n03775546
n06596364
n02090721
n03388183
n04252077
n03042490
n01843065
n02111129
n01616318
n04409515
n10148035
n01677366
n02655020
n02107683
n02105162
n03888257
n02128925
n03868863
n04069434
n01773797
n03792782
n03792782
n01560419
n07742313
n13054560
n02981792
n03916031
n03623198
n04146614
n11879895
n01675722
n02097130
n04423845
n02089973
n04592741
n01968897
n07718747
n02992529
n07753275
n07745940
n02108422
n02804414
n02342885
n03379051
n02457408
n02437312
n03787032
n02091032
n02002556
n03666591
n03717622
n07831146
n03208938
n02840245
n03891332
n04589890
n03887697
n04141076
n03770439
n02113023
n02009912
n02823750
n04252077
n02396427
n02099601
n02279972
n01843383
n02749479
n04228054
n04590129
n01773797
n02027492
n02093428
n02259212
n01910747
n02088364
n02093754
n07860988
n02093428
n01494475
n03888605
n04589890
n02092339
n07584110
n02190166
n02096051
n04023962
n02484975
n03980874
n02870880
n01807496
n02090721
n02011460
n02033041
n01514668
n02094114
n02687172
n02013706
n04523525
n07718747
n02361337
n07720875
n04005630
n04509417
n07613480
n01622779
n03131574
n01631663
n02701002
n03014705
n02607072
n01560419
n03197337
n09193705
n02099849
n03000134
n02480495
n03733805
n07802026
n01749939
n03956157
n01955084
n03445777
n02927161
n02105162
n02088238
n06794110
n09332890
n02823428
n03773504
n03657121
n04044716
n07760859
n03207941
n07717410
n01664065
n03291819
n01580077
n02132136
n01687978
n09332890
n04590129
n04487081
n03838899
n01981276
n03899768
n04004767
n03207743
n02106166
n07873807
n04039381
n03388549
n03977966
n03384352
n02114367
n07695742
n02105412
n04591157
n01729322
n02066245
n03938244
n03240683
n07880968
n03782006
n02086646
n01632777
n02793495
n02281406
n02443484
n03208938
n04350905
n03179701
n03658185
n02480855
n01737021
n09256479
n04357314
n03424325
n02807133
n01855032
n01828970
n03980874
n02107683
n03895866
n07768694
n02090721
n02110958
n02669723
n04599235
n02105641
n02692877
n02927161
n01582220
n02325366
n04039381
n02790996
n07760859
n02114712
n02099712
n04275548
n04366367
n02687172
n02113624
n02454379
n04120489
n03785016
n02279972
n04209239
n01677366
n01682714
n01601694
n02483708
n07718747
n04344873
n02483362
n07717556
n01981276
n02699494
n03160309
n02123597
n03970156
n01669191
n01756291
n02606052
n02795169
n03478589
n02259212
n06785654
n02114712
n04311174
n03891332
n04443257
n01687978
n04259630
n02128925
n02526121
n03447721
n04239074
n03877472
n03710637
n07711569
n04153751
n01682714
n03598930
n04131690
n01819313
n02085620
n02113023
n03133878
n07768694
n04579432
n04532670
n03976467
n04326547
n02951358
n02279972
n03000247
n03837869
n09288635
n03196217
n03733805
n02111889
n04286575
n01985128
n02105056
n02783161
n03902125
n02643566
n04553703
n03787032
n02799071
n02137549
n03445777
n03240683
n02093256
n01847000
n01978455
n02089973
n03482405
n06874185
n02280649
n02129604
n02892767
n02480495
n02106662
n12144580
n03599486
n02066245
n02454379
n01873310
n03690938
n02389026
n02264363
n02966193
n02500267
n03538406
n01843065
n04254680
n04346328
n03961711
n03970156
n03207941
n03791053
n02085936
n03954731
n03857828
n02807133
n02443114
n02219486
n03670208
n04263257
n03110669
n01795545
n03467068
n02115913
n02119789
n04487081
n02791124
n04201297
n04265275
n01784675
n02814533
n02417914
n07932039
n02606052
n01768244
n04311004
n03662601
n02607072
n01773549
n02085620
n02730930
n04347754
n02051845
n01914609
n03729826
n02129165
n01537544
n03888605
n03764736
n04579145
n01630670
n01950731
n03599486
n03786901
n04243546
n04040759
n03594945
n01632458
n02823750
n04442312
n02859443
n01629819
n04254777
n04039381
n01641577
n04553703
n03443371
n04467665
n03991062
n02219486
n02799071
n04026417
n03930313
n02096585
n03534580
n07753113
n03868863
n01773549
n03720891
n02727426
n02096177
n03272562
n02100236
n03450230
n03697007
n02927161
n01798484
n02865351
n01631663
n02100236
n03871628
n03394916
n03983396
n03908714
n02641379
n07892512
n01877812
n01824575
n02106030
n02100583
n03424325
n02106166
n01682714
n04456115
n01784675
n03868242
n02100877
n04033901
n04266014
n04332243
n02443114
n04487081
n01774750
n02129165
n01984695
n03769881
n02422106
n04328186
n02108915
n02088364
n02795169
n01773157
n03063689
n04326547
n01644900
n09229709
n02133161
n03016953
n02085620
n07565083
n02317335
n04485082
n02125311
n04591157
n02396427
n04347754
n02129604
n02422699
n02123597
n03388183
n03590841
n02807133
n03676483
n03255030
n02174001
n04536866
n02104029
n02817516
n02087046
n02085782
n02115641
n02086910
n02834397
n03201208
n02086240
n02454379
n02422699
n02106662
n04560804
n02699494
n02871525
n04591157
n04149813
n03920288
n02099267
n02105412
n01667778
n03535780
n02085936
n03344393
n03871628
n02268853
n02276258
n03773504
n04505470
n02895154
n01740131
n02101388
n01847000
n04111531
n02280649
n04509417
n01496331
n02264363
n02109525
n03372029
n03903868
n01796340
n02988304
n02486261
n07932039
n03841143
n02089867
n02099429
n03062245
n02799071
n03485794
n03944341
n02090379
n04370456
n04125021
n03929855
n02110063
n02794156
n04141076
n02085936
n04606251
n02099712
n01773549
n02992529
n03347037
n02120505
n02727426
n03483316
n04479046
n03544143
n03888605
n04548362
n13037406
n04044716
n02259212
n02835271
n01797886
n02823428
n04086273
n02127052
n03133878
n03733281
n02676566
n02667093
n04026417
n07932039
n04252077
n03976467
n04366367
n03443371
n04346328
n02112018
n03781244
n03459775
n03876231
n01534433
n03017168
n02808304
n07730033
n02169497
n02514041
n04458633
n02002556
n03980874
n03131574
n01807496
n04330267
n01773549
n02123159
n04204347
n02395406
n02321529
n03124043
n03617480
n01910747
n01784675
n03733131
n07875152
n04599235
n09428293
n07565083
n02206856
n03127747
n02086240
n04146614
n04532670
n03259280
n02104365
n01855032
n04366367
n02977058
n02444819
n02088632
n04562935
n03891251
n07718747
n02783161
n03929855
n01872401
n07693725
n02859443
n04370456
n02259212
n02231487
n04065272
n02361337
n02395406
n02094433
n01833805
n02097474
n03868242
n04041544
n02493793
n02174001
n02085620
n12620546
n02412080
n02808440
n02489166
n04069434
n03763968
n03721384
n04522168
n03527444
n04147183
n02277742
n03743016
n02490219
n01443537
n01534433
n02965783
n02106382
n02007558
n03908618
n04357314
n02108089
n01980166
n03642806
n04090263
n02093256
n02841315
n01695060
n04152593
n04532670
n04201297
n03476684
n02236044
n02769748
n03187595
n02841315
n04081281
n07873807
n04548362
n03595614
n04532670
n03047690
n04552348
n01806143
n04542943
n07717556
n03782006
n02107574
n04118776
n04523525
n04141327
n03000684
n02124075
n02667093
n03976467
n02965783
n06785654
n04548280
n03840681
n04243546
n03447721
n03720891
n03825788
n02791270
n02870880
n03535780
n02165456
n02132136
n04044716
n03970156
n03692522
n01744401
n04418357
n02167151
n02790996
n03903868
n02860847
n02417914
n01985128
n02281787
n10148035
n02974003
n03777754
n03445777
n04532106
n02085782
n03452741
n03670208
n03866082
n02105162
n03220513
n03529860
n04376876
n01440764
n03498962
n02687172
n01665541
n04344873
n02489166
n03384352
n02443484
n03976657
n04540053
n01817953
n02098105
n02655020
n01756291
n02099267
n04141327
n07734744
n03690938
n02133161
n10148035
n03461385
n03840681
n02099267
n03908618
n02483708
n03710637
n02804610
n02906734
n07836838
n03930313
n02786058
n01795545
n02804610
n02095570
n03447721
n04311004
n04229816
n04208210
n03710193
n03584829
n04355338
n03146219
n02085620
n04522168
n02106030
n03908618
n02113624
n04429376
n02100877
n02894605
n02088632
n02490219
n02264363
n04204238
n07717556
n02699494
n13040303
n02782093
n04238763
n03935335
n02111889
n04147183
n02089078
n03598930
n04131690
n01534433
n04039381
n02113023
n03649909
n02804610
n02950826
n07695742
n03899768
n03662601
n02100877
n06359193
n04270147
n03527444
n04023962
n03207743
n03691459
n02086646
n04456115
n04335435
n04493381
n03355925
n02128757
n03710637
n02749479
n04111531
n02669723
n04591157
n02106550
n04069434
n01669191
n03496892
n01855672
n03803284
n04371774
n02965783
n01955084
n03710637
n04147183
n03792782
n04597913
n04266014
n02790996
n02099601
n03627232
n02219486
n07760859
n02877765
n07715103
n02259212
n07747607
n04376876
n01748264
n04317175
n02687172
n13037406
n02321529
n02981792
n02992211
n03891332
n01944390
n02398521
n07753275
n01687978
n03325584
n01806143
n01795545
n02256656
n13133613
n06785654
n02236044
n04033901
n02892767
n03792972
n07753592
n01580077
n03535780
n03602883
n02423022
n03599486
n02279972
n02655020
n03637318
n02108000
n03355925
n04486054
n01986214
n03014705
n04599235
n02107312
n04522168
n03782006
n02091244
n04238763
n01641577
n02268853
n07711569
n03662601
n02102318
n01677366
n02097209
n03763968
n03786901
n02509815
n02086910
n06794110
n07920052
n03379051
n02346627
n02018795
n02480495
n07711569
n04532670
n02099712
n02110806
n03759954
n02123597
n04154565
n03347037
n02077923
n02514041
n01616318
n02641379
n04086273
n02097298
n02930766
n01983481
n03995372
n03891332
n03218198
n02058221
n01729322
n02799071
n01820546
n04127249
n02834397
n02097209
n03196217
n03216828
n02096585
n04229816
n11879895
n03977966
n03876231
n03908618
n03255030
n02106662
n02488702
n02978881
n03868242
n03710721
n03494278
n02363005
n02939185
n07768694
n04505470
n02028035
n02894605
n07717410
n07745940
n04429376
n04344873
n02727426
n01753488
n02110806
n03661043
n01806567
n01955084
n03467068
n02110063
n03902125
n03450230
n01692333
n02114855
n01644900
n07742313
n07565083
n04505470
n02088364
n03733131
n02105056
n02606052
n03179701
n07715103
n02641379
n03259280
n07873807
n04584207
n02110063
n03218198
n02494079
n01644373
n04332243
n02115913
n02120079
n09229709
n02481823
n04235860
n02113799
n02823428
n04371774
n02442845
n01498041
n03944341
n09332890
n02091134
n02690373
n02788148
n02869837
n04204238
n01675722
n02236044
n02280649
n12144580
n01882714
n04120489
n02999410
n03692522
n01729322
n04532670
n03337140
n02966193
n07742313
n03793489
n04355933
n03220513
n02445715
n04443257
n04026417
n02823428
n03976467
n02102177
n03773504
n04487394
n02085936
n07614500
n02089078
n02206856
n04147183
n04501370
n02422699
n02085782
n02097130
n03929660
n01751748
n02099849
n01924916
n01692333
n04275548
n03991062
n01824575
n03218198
n02018207
n03530642
n03782006
n03697007
n07734744
n01820546
n02280649
n02115913
n04325704
n02104029
n03250847
n11879895
n03709823
n03271574
n04483307
n04525038
n02835271
n02102318
n04285008
n01491361
n01742172
n02077923
n01728572
n01914609
n03388549
n03085013
n02395406
n03868863
n04033901
n02011460
n02123159
n02391049
n04039381
n01695060
n02129165
n03944341
n04462240
n02403003
n03920288
n03649909
n04515003
n03372029
n02091467
n04372370
n02129165
n01753488
n02113712
n03445777
n04525305
n01768244
n02493509
n03743016
n12998815
n03770439
n02777292
n02097298
n01687978
n04179913
n02749479
n03627232
n03207743
n03476991
n07745940
n01883070
n03792972
n03769881
n02011460
n02870880
n02123045
n04040759
n07684084
n02111277
n01877812
n04019541
n03197337
n02494079
n03187595
n02687172
n02883205
n07754684
n09399592
n02791270
n03063689
n03902125
n02415577
n02086240
n02093991
n02802426
n03782006
n03478589
n02128385
n02894605
n02115641
n02011460
n02951358
n02128757
n02871525
n02346627
n03450230
n09229709
n02417914
n01796340
n02128925
n04486054
n02749479
n02346627
n01930112
n02091032
n02963159
n01944390
n02793495
n02018207
n04153751
n02790996
n02129165
n03538406
n02965783
n03179701
n03160309
n01644373
n01770393
n02109961
n01873310
n03085013
n01735189
n04370456
n02018207
n02018795
n02110627
n03804744
n03534580
n07760859
n01631663
n04482393
n02917067
n07753592
n03447447
n02112706
n03947888
n02927161
n04228054
n03259280
n07753275
n07753592
n02948072
n07697313
n01984695
n11879895
n02125311
n12998815
n03976657
n02096294
n04264628
n04548362
n02276258
n03891251
n03127925
n02834397
n03854065
n02979186
n07920052
n02110627
n02095314
n04049303
n02965783
n02895154
n02013706
n04044716
n03709823
n02138441
n02777292
n01943899
n07892512
n02091831
n03743016
n01514668
n04243546
n02105251
n03032252
n01855032
n04612504
n03770679
n03866082
n02091134
n03443371
n03777568
n03773504
n02480855
n07745940
n02391049
n01910747
n02277742
n03938244
n02788148
n01440764
n03425413
n03895866
n03950228
n02133161
n01843065
n02992211
n02834397
n02066245
n03337140
n07716358
n03584829
n02095314
n02093991
n02974003
n02025239
n04596742
n02916936
n01768244
n03720891
n02056570
n02102177
n04557648
n02268853
n02098105
n01514859
n04141975
n02071294
n03188531
n04254777
n03709823
n03095699
n04517823
n03733131
n07693725
n03476684
n03724870
n03983396
n02342885
n02510455
n03874293
n02823428
n04356056
n01494475
n04251144
n02894605
n02097658
n04273569
n02123045
n03250847
n01687978
n02012849
n03733131
n02096294
n02279972
n01641577
n03804744
n02871525
n04479046
n07697313
n02786058
n01924916
n07932039
n02099712
n03271574
n02488702
n02927161
n02815834
n02877765
n04560804
n03297495
n04590129
n03944341
n03980874
n02105056
n01734418
n03947888
n02363005
n06596364
n07753275
n02930766
n02093859
n03207941
n01818515
n03657121
n01629819
n03063689
n03255030
n02808440
n02981792
n09246464
n04591713
n03492542
n04517823
n03240683
n07716358
n07717556
n02814533
n01843383
n03691459
n02134418
n02110185
n02093754
n02807133
n07684084
n02091244
n03873416
n02113624
n02094433
n02917067
n03450230
n03888605
n01616318
n04435653
n02111277
n02006656
n02363005
n02497673
n07753592
n07711569
n01693334
n03954731
n04033995
n04208210
n02817516
n07754684
n02256656
n13052670
n04417672
n11939491
n02443114
n03445777
n02093859
n07684084
n03026506
n04081281
n02002724
n02317335
n03584829
n04039381
n03062245
n02091134
n07745940
n02092002
n03991062
n02843684
n03961711
n04069434
n01558993
n07745940
n04486054
n04347754
n02011460
n02808304
n02109961
n04229816
n04409515
n04116512
n03857828
n02445715
n03920288
n02488702
n03126707
n07932039
n02835271
n03445924
n01797886
n03476684
n03658185
n01943899
n02951358
n03532672
n02966193
n02988304
n02229544
n02095570
n02841315
n04536866
n02268853
n03445924
n03803284
n04254777
n02443484
n03133878
n02799071
n13133613
n02102040
n02107908
n03947888
n04487394
n03599486
n03452741
n02097298
n04417672
n02493793
n02325366
n07747607
n03188531
n04482393
n02088632
n04461696
n03249569
n07693725
n02096437
n01773797
n02105162
n02843684
n02950826
n02492660
n04366367
n01981276
n03207941
n02966193
n03534580
n02112018
n01688243
n04584207
n02415577
n01847000
n02514041
n02488291
n02749479
n04380533
n02510455
n02526121
n07745940
n03930313
n03877845
n01755581
n01667114
n02108000
n02699494
n02363005
n02100877
n03770439
n02114712
n02100735
n02108000
n02028035
n02108551
n02484975
n07718747
n03498962
n01665541
n02894605
n04118776
n02119022
n04258138
n04604644
n02115641
n07768694
n12267677
n03908714
n03876231
n07717556
n11879895
n01688243
n03208938
n12267677
n02669723
n02965783
n02276258
n01631663
n04487394
n02825657
n01749939
n04037443
n04041544
n03376595
n04532670
n02104365
n02233338
n02793495
n03770439
n01910747
n04154565
n01980166
n03793489
n02025239
n02480495
n03781244
n04399382
n07871810
n04065272
n02017213
n01943899
n04067472
n03761084
n02094433
n03538406
n02494079
n04147183
n04141076
n04589890
n01601694
n02123394
n06874185
n02114548
n03637318
n03710193
n04536866
n09399592
n03452741
n03594945
n07860988
n03085013
n02814533
n03461385
n04252077
n02859443
n04033901
n01530575
n03476684
n04069434
n02105056
n02128385
n01694178
n01688243
n03372029
n04465501
n02808440
n04235860
n02177972
n13044778
n02096177
n01770081
n01669191
n02481823
n07880968
n03888605
n02117135
n02096437
n02397096
n01592084
n03769881
n03026506
n02107574
n02114367
n03124170
n03733281
n03692522
n02037110
n02167151
n01930112
n03995372
n03355925
n03676483
n03000247
n02966193
n02910353
n01682714
n02910353
n02510455
n02106550
n02120079
n03841143
n04229816
n02447366
n02091467
n04456115
n03937543
n01818515
n04086273
n02865351
n03109150
n02808304
n03483316
n01560419
n07930864
n04392985
n04592741
n04192698
n02089973
n03485794
n07613480
n02951585
n01494475
n01443537
n02097298
n02877765
n02101388
n03271574
n03041632
n03895866
n02865351
n02091134
n02027492
n03201208
n03983396
n02364673
n02134084
n02165105
n01773549
n04127249
n04275548
n01883070
n02112706
n03776460
n02108000
n02397096
n04525305
n02113624
n02268853
n02091134
n03476991
n02815834
n04525305
n03857828
n03272010
n04523525
n04335435
n03595614
n07932039
n03345487
n03877472
n04485082
n02794156
n03877472
n03492542
n02114712
n02883205
n02106662
n03417042
n03617480
n02978881
n02101556
n04039381
n02105641
n02098413
n04552348
n02823750
n07753113
n02110063
n09332890
n09468604
n02457408
n01537544
n02497673
n09229709
n04311004
n02776631
n02692877
n03623198
n04328186
n03697007
n02102177
n01687978
n03207743
n03733131
n02099429
n03769881
n02099601
n02787622
n03000134
n03895866
n02127052
n04136333
n02106662
n13044778
n01981276
n03680355
n03372029
n03908618
n03877472
n04346328
n04557648
n04270147
n04428191
n02870880
n03297495
n02871525
n02391049
n02123045
n01871265
n02071294
n02119022
n04592741
n02509815
n03424325
n02514041
n02101006
n02747177
n01950731
n02172182
n04336792
n04356056
n04252077
n01740131
n04613696
n04023962
n04485082
n02128925
n02086079
n03983396
n02134084
n02133161
n02128925
n04517823
n07875152
n02128385
n04204347
n02077923
n03272010
n02840245
n02105641
n01817953
n04146614
n04554684
n03796401
n04039381
n02788148
n04483307
n02493793
n03692522
n03075370
n03733281
n04238763
n02815834
n03065424
n02672831
n03602883
n04346328
n02066245
n03444034
n03594734
n15075141
n12144580
n07579787
n02992529
n04515003
n02107142
n02117135
n01734418
n01693334
n02105505
n02992211
n02869837
n13133613
n02666196
n04041544
n03857828
n04418357
n02113978
n01744401
n02797295
n02699494
n02489166
n02098286
n04243546
n02134418
n02106662
n03670208
n04090263
n02692877
n03467068
n04238763
n03788365
n03657121
n02906734
n02326432
n02676566
n02607072
n03627232
n02894605
n03538406
n04136333
n01632458
n04125021
n03134739
n01697457
n03924679
n04243546
n09256479
n02493793
n07871810
n02177972
n01917289
n02088466
n04069434
n03891251
n02113799
n07711569
n01833805
n04270147
n04259630
n02859443
n04270147
n02110063
n03042490
n03290653
n02002724
n02100583
n01608432
n03710193
n03777754
n02971356
n04482393
n13037406
n01768244
n03929855
n03016953
n07584110
n02113023
n04447861
n02128925
n02988304
n04201297
n02006656
n01807496
n03658185
n03394916
n07716358
n07579787
n02102177
n01729322
n03775071
n04482393
n02415577
n02607072
n02909870
n03255030
n03344393
n02325366
n02102480
n02102177
n04423845
n02130308
n03785016
n02787622
n04200800
n02087046
n04487394
n04152593
n04065272
n07831146
n02843684
n07248320
n03498962
n02128757
n04523525
n02999410
n03697007
n02097209
n11939491
n04141327
n07248320
n04461696
n02110185
n02483708
n03902125
n02168699
n02834397
n02108915
n02963159
n03841143
n02120505
n02111129
n02112350
n03793489
n03649909
n04090263
n02727426
n04033995
n01608432
n02364673
n02895154
n07730033
n02423022
n02999410
n07579787
n02086079
n01631663
n02494079
n04118776
n03467068
n03476684
n03954731
n03775546
n02981792
n01873310
n01980166
n04049303
n04099969
n02965783
n02281787
n02823750
n02655020
n02403003
n02951358
n02028035
n02504458
n03814639
n02085620
n04486054
n03761084
n07930864
n04522168
n04347754
n01644373
n02992211
n04483307
n02102973
n04467665
n03026506
n03026506
n07697537
n01532829
n04442312
n02108551
n01824575
n04254777
n03109150
n01728920
n04380533
n02795169
n04493381
n03141823
n01817953
n04026417
n02909870
n01601694
n02834397
n03376595
n02909870
n07711569
n03891251
n01806567
n03854065
n03814906
n02808304
n04153751
n07768694
n04532106
n02102973
n02346627
n13133613
n02129604
n02443484
n03792972
n02804414
n02097298
n02708093
n01748264
n03992509
n04591713
n02105162
n03840681
n02276258
n02100583
n02408429
n03770679
n07717556
n02280649
n02006656
n04560804
n04285008
n03868863
n02088238
n02799071
n04560804
n02108551
n02487347
n01614925
n04505470
n04090263
n03661043
n01675722
n01531178
n01632458
n01695060
n04254777
n04355933
n03743016
n04259630
n01534433
n02110958
n02112350
n02488702
n02687172
n09246464
n02071294
n02497673
n03871628
n07717556
n02105412
n02999410
n02105412
n04208210
n04589890
n03379051
n03404251
n03014705
n04146614
n03938244
n02107142
n03452741
n01667114
n04311174
n01667778
n03127747
n02105412
n09399592
n07716906
n03673027
n03197337
n03450230
n02113186
n01775062
n04380533
n06359193
n03483316
n02172182
n03496892
n03843555
n04476259
n02110806
n04467665
n04548280
n01518878
n02281787
n02093647
n04404412
n04356056
n03840681
n03995372
n02326432
n02777292
n01776313
n03220513
n02795169
n02074367
n01968897
n07693725
n02906734
n03777754
n02497673
n03126707
n04259630
n03729826
n04026417
n01855032
n02808440
n04346328
n03930313
n04560804
n03127925
n07684084
n04417672
n02172182
n02325366
n03899768
n01644900
n02113186
n03710637
n03857828
n02114548
n04326547
n02643566
n02092002
n03124170
n02281406
n01806567
n04254680
n03344393
n01532829
n02116738
n02116738
n02094258
n03690938
n03272562
n03110669
n03786901
n07920052
n04355933
n01978455
n01806143
n01944390
n03450230
n02088364
n03956157
n02437312
n03590841
n04344873
n02277742
n02111277
n01784675
n04483307
n02132136
n04019541
n01693334
n01608432
n01667114
n02236044
n03775546
n01739381
n02100583
n02090622
n01729322
n04350905
n02056570
n04612504
n04505470
n12057211
n03837869
n01531178
n04376876
n02454379
n02124075
n02395406
n02114367
n03481172
n02109047
n07715103
n04154565
n02423022
n01756291
n02108089
n02493793
n03602883
n02168699
n01978455
n02097298
n02447366
n04229816
n07583066
n03207743
n07248320
n02100583
n02823750
n01608432
n04418357
n01833805
n03930630
n03425413
n02788148
n03637318
n04265275
n02281787
n04335435
n02093428
n06359193
n03944341
n04041544
n04515003
n02106550
n02097130
n02837789
n07753275
n04026417
n03673027
n03887697
n03110669
n03769881
n01532829
n02006656
n04296562
n04347754
n01828970
n03125729
n03877472
n02096051
n04483307
n02398521
n03770679
n02106662
n03775546
n04347754
n02676566
n03690938
n07831146
n04398044
n01985128
n02109047
n03785016
n03494278
n03792972
n02114367
n03777754
n04090263
n02132136
n03134739
n01491361
n09332890
n03803284
n02120079
n03075370
n02104365
n03884397
n02790996
n01751748
n07695742
n02123045
n03759954
n03733131
n12998815
n03223299
n07745940
n04532106
n02111889
n02708093
n01944390
n01534433
n02361337
n02113624
n02090721
n02093256
n02025239
n04355933
n03452741
n01530575
n01443537
n04209239
n02037110
n04154565
n03594945
n04465501
n07714990
n03868863
n01819313
n04026417
n04553703
n02112706
n01980166
n02797295
n03888257
n02342885
n03216828
n03388043
n03804744
n02138441
n01689811
n04553703
n02231487
n04208210
n03372029
n02096177
n04429376
n03272010
n02493509
n03127747
n02786058
n03777568
n04238763
n03535780
n03938244
n02408429
n02097658
n02123159
n03891251
n02165105
n02437312
n02114712
n04540053
n04270147
n02113186
n02281406
n03899768
n04442312
n04023962
n02963159
n02102973
n01860187
n03297495
n03733805
n03980874
n04336792
n04366367
n02412080
n02966687
n03763968
n02098286
n01756291
n03929855
n03944341
n03271574
n04026417
n07754684
n01985128
n07753113
n01675722
n02106166
n02116738
n03916031
n04065272
n03110669
n07747607
n02009912
n03950228
n03483316
n07716358
n03216828
n09835506
n03393912
n02526121
n03770439
n02002724
n02871525
n01776313
n04355933
n03450230
n02025239
n02107312
n04606251
n03063599
n01795545
n04254777
n02120079
n01833805
n02099601
n13052670
n02676566
n03457902
n03720891
n03793489
n01775062
n01978287
n10565667
n02916936
n03599486
n02110958
n01443537
n04204238
n02672831
n07717410
n04209239
n01491361
n02963159
n03424325
n03697007
n03344393
n03445777
n02999410
n02441942
n04525038
n02403003
n07684084
n03125729
n02095570
n01796340
n03599486
n07747607
n04507155
n07768694
n04501370
n07734744
n02676566
n01871265
n03680355
n02088466
n10565667
n02110958
n02096437
n01498041
n02130308
n07836838
n03884397
n04065272
n02033041
n02607072
n13040303
n02808304
n03095699
n03485407
n02395406
n04560804
n02676566
n04589890
n02110958
n02837789
n01669191
n02123045
n07579787
n01667778
n12998815
n04613696
n02951585
n03623198
n03764736
n02892767
n02102318
n04040759
n02123045
n03062245
n02701002
n03201208
n04266014
n01873310
n04597913
n03595614
n07716906
n02988304
n03445924
n02860847
n02095889
n02115913
n01756291
n02114548
n02457408
n03995372
n01614925
n02107312
n03930630
n03017168
n03535780
n01985128
n02177972
n03045698
n13133613
n04398044
n02099267
n01829413
n02114712
n02104029
n01440764
n04263257
n04251144
n03584254
n03874599
n06359193
n04070727
n04209133
n04065272
n01748264
n02980441
n02093754
n02097658
n03187595
n01742172
n04590129
n03188531
n02504013
n02017213
n02979186
n02843684
n04040759
n01667778
n01820546
n02116738
n04243546
n04090263
n03888605
n01985128
n02823750
n04141975
n03376595
n02108915
n03372029
n02423022
n01728920
n02102973
n01580077
n02492660
n07716906
n02096294
n03259280
n03884397
n02102973
n03666591
n02486410
n02102480
n02105162
n09246464
n02823750
n04152593
n03196217
n01818515
n04591157
n04328186
n01742172
n01753488
n02971356
n09428293
n02927161
n03180011
n04099969
n02795169
n02895154
n03929660
n01910747
n03854065
n02747177
n03803284
n02123394
n04264628
n04243546
n02123159
n01983481
n02526121
n12267677
n06785654
n04606251
n01855672
n02281406
n04296562
n01773549
n02127052
n02090622
n02088094
n04125021
n01728920
n03595614
n02090622
n04285008
n03874293
n02823428
n02028035
n02077923
n02017213
n03903868
n02127052
n04317175
n02107683
n01984695
n03995372
n02090721
n02089867
n10148035
n01737021
n01883070
n01819313
n03958227
n03841143
n03459775
n03777568
n03417042
n02110185
n03388549
n03924679
n02672831
n02165456
n03207743
n04136333
n02971356
n04039381
n04162706
n02791124
n03124170
n01843065
n04428191
n03874599
n02102480
n04487394
n01883070
n02966193
n01494475
n02110341
n07716358
n07248320
n02814860
n04133789
n02443114
n02110063
n04509417
n02108089
n04548362
n01748264
n03710637
n02091467
n02110341
n02113624
n01819313
n02939185
n03272562
n02787622
n12267677
n04141327
n02110958
n01687978
n04429376
n01729322
n02093647
n07920052
n01910747
n02107908
n03895866
n02086079
n02895154
n13037406
n03876231
n04590129
n01692333
n03717622
n02109525
n04355338
n03777568
n03314780
n03887697
n04141975
n01978287
n04597913
n04141975
n02782093
n03868242
n02002724
n03196217
n04153751
n01629819
n02808440
n02058221
n01531178
n02114712
n03494278
n04204347
n03793489
n03483316
n04209239
n03776460
n04336792
n02114548
n02667093
n02834397
n04456115
n03394916
n04346328
n01776313
n02124075
n02356798
n03895866
n02963159
n01883070
n03355925
n02226429
n03417042
n02106550
n02101388
n04200800
n02011460
n02112706
n04326547
n01985128
n03110669
n03804744
n04141327
n11939491
n02105251
n03201208
n07754684
n01632777
n04553703
n04149813
n02481823
n03947888
n01534433
n03457902
n02776631
n04209239
n04523525
n04074963
n02233338
n03930313
n03249569
n03884397
n01601694
n04560804
n02514041
n03417042
n07880968
n03594734
n03344393
n02088632
n02106662
n02108551
n01744401
n02483708
n02971356
n02909870
n02841315
n03496892
n02100583
n03476684
n07718472
n01641577
n06596364
n03954731
n04357314
n04259630
n07695742
n04423845
n03249569
n04111531
n02895154
n04149813
n02114712
n04252225
n03770679
n02837789
n04428191
n02361337
n02100236
n01728920
n03594945
n02268443
n07875152
n07695742
n02108551
n01531178
n01980166
n02106382
n03658185
n02988304
n04141076
n02906734
n02012849
n02786058
n01614925
n02206856
n01631663
n03100240
n03047690
n03180011
n02895154
n02782093
n03595614
n09332890
n07749582
n04258138
n03095699
n02096177
n01728920
n03538406
n01806143
n02088238
n04501370
n09229709
n04423845
n02397096
n02133161
n02088238
n02264363
n02101006
n04515003
n02870880
n04548280
n04461696
n03028079
n02268853
n03874599
n01877812
n02699494
n12985857
n02454379
n04326547
n02089867
n01560419
n02093256
n04204347
n04347754
n02086240
n04286575
n04482393
n03840681
n04065272
n02480855
n02749479
n03492542
n02096437
n02317335
n02174001
n04525305
n04039381
n07753592
n13037406
n02494079
n04258138
n02229544
n01843383
n01728920
n04330267
n02325366
n02808304
n04462240
n03874293
n03482405
n01629819
n03781244
n04392985
n04258138
n03160309
n02096585
n01614925
n02017213
n04133789
n04277352
n02106030
n04428191
n03400231
n03249569
n01514668
n10148035
n02397096
n07697313
n07802026
n03887697
n07248320
n01855032
n03908618
n02086910
n04254680
n02104365
n03445777
n02011460
n07695742
n04344873
n01667778
n02091244
n01534433
n02097474
n02701002
n03208938
n03676483
n03770439
n01755581
n02108915
n01753488
n02102480
n03633091
n03662601
n01770393
n07590611
n04264628
n03998194
n02396427
n02102040
n01770393
n04162706
n02281406
n12768682
n01945685
n03483316
n01978287
n02119022
n02169497
n03991062
n04465501
n07614500
n01990800
n01534433
n03770679
n09288635
n03188531
n09256479
n04259630
n02110627
n04560804
n02113978
n02095889
n04599235
n03259280
n02111277
n02794156
n04328186
n04254680
n03661043
n03599486
n02097130
n02033041
n02071294
n03937543
n09288635
n03709823
n02489166
n03673027
n01828970
n04532106
n03496892
n01924916
n04548280
n02319095
n02395406
n02782093
n04554684
n02086240
n03916031
n02791270
n07717410
n04238763
n02730930
n01514859
n01748264
n02988304
n03461385
n03272562
n04330267
n07860988
n02276258
n07871810
n02097474
n02999410
n04037443
n01614925
n04033901
n03944341
n02655020
n01608432
n03874599
n03594945
n04252225
n07892512
n03717622
n03763968
n02110627
n02795169
n03000134
n02494079
n03042490
n03100240
n07875152
n02802426
n02484975
n09229709
n02747177
n06596364
n04557648
n02123394
n02002724
n02167151
n02504013
n01616318
n03770439
n04428191
n02051845
n04579145
n02093754
n12267677
n01641577
n02963159
n02807133
n04590129
n03467068
n01629819
n02443484
n02088238
n02412080
n03532672
n04591157
n04486054
n02692877
n02727426
n04371774
n04273569
n03733131
n03544143
n02104365
n02109961
n03447447
n01872401
n03961711
n02116738
n01688243
n01749939
n03141823
n02509815
n12985857
n01829413
n02109047
n02526121
n02097658
n03216828
n02870880
n04266014
n04355338
n03633091
n01910747
n02006656
n03445924
n02906734
n04099969
n02099712
n02229544
n04443257
n02687172
n04273569
n02489166
n03924679
n12985857
n02167151
n02321529
n02102040
n02870880
n01693334
n02097298
n01882714
n04040759
n03791053
n02979186
n02454379
n03131574
n04141327
n02981792
n02974003
n02090721
n04131690
n02106030
n02493793
n02963159
n04596742
n11879895
n03457902
n02823750
n01774750
n03788365
n02389026
n02823750
n02493509
n07583066
n01682714
n03899768
n02279972
n07747607
n01692333
n04243546
n04317175
n02106550
n01664065
n01677366
n02093754
n04346328
n02106550
n02127052
n03666591
n03877845
n03125729
n03786901
n03775071
n02412080
n01518878
n03720891
n01735189
n02356798
n02110806
n03047690
n04462240
n02951585
n01558993
n03065424
n02860847
n02486410
n02398521
n04346328
n02106030
n02445715
n04153751
n02509815
n01828970
n04069434
n07714571
n13044778
n01955084
n03662601
n01664065
n02708093
n02408429
n03920288
n02190166
n02091635
n04229816
n01773549
n02106662
n02009912
n01558993
n02127052
n02843684
n02174001
n03345487
n01990800
n03584254
n02389026
n02389026
n04069434
n03032252
n07749582
n02110627
n02807133
n02012849
n03208938
n02107142
n03995372
n02927161
n03888257
n02802426
n09193705
n07716906
n03345487
n02088094
n03297495
n02871525
n02363005
n02206856
n02445715
n02783161
n02948072
n09421951
n02410509
n02808304
n03903868
n02110063
n03724870
n07836838
n04141975
n02487347
n02112137
n02804610
n07734744
n04462240
n03372029
n02177972
n02085620
n01917289
n04070727
n02823428
n02860847
n04392985
n02791124
n01847000
n01784675
n02093991
n03457902
n02939185
n04493381
n03271574
n02509815
n03793489
n02690373
n03983396
n02927161
n03018349
n03908618
n02110341
n03776460
n02124075
n04335435
n03127747
n02948072
n03085013
n02442845
n02916936
n01688243
n02879718
n02097298
n04589890
n02607072
n02948072
n04525038
n02100735
n02814533
n03000134
n03478589
n02037110
n04235860
n02112137
n04435653
n04273569
n03794056
n01910747
n01748264
n01883070
n04200800
n04590129
n03443371
n02791124
n03075370
n03673027
n01742172
n03476684
n01484850
n01675722
n02978881
n03938244
n02106166
n01729977
n04118776
n04209239
n03376595
n04008634
n02095889
n01855032
n03376595
n04456115
n02879718
n04238763
n02268443
n02794156
n02105505
n01914609
n03899768
n02676566
n02099601
n02106382
n04264628
n04501370
n03594734
n03895866
n04332243
n04008634
n02492035
n01773797
n04228054
n02110958
n06359193
n02403003
n04409515
n03337140
n02483708
n02106166
n04209133
n02114367
n03743016
n03201208
n03207941
n02804414
n04487081
n01945685
n02606052
n03388043
n03661043
n02804610
n04235860
n02795169
n03476991
n03444034
n03942813
n04026417
n03337140
n02108422
n04033995
n03041632
n02134418
n04554684
n03733131
n02116738
n03786901
n03937543
n04147183
n04131690
n03400231
n02125311
n02410509
n01775062
n02814533
n02110185
n04008634
n04597913
n01883070
n07714990
n02112350
n02437616
n03662601
n02074367
n04239074
n03063689
n07831146
n02869837
n03920288
n13052670
n03016953
n02788148
n04613696
n02113023
n03866082
n02992529
n04479046
n04467665
n04540053
n02927161
n03992509
n04347754
n03495258
n03633091
n02105251
n02231487
n02102318
n02667093
n01749939
n02133161
n03372029
n02486261
n04004767
n02088466
n07579787
n02791270
n03131574
n02391049
n01664065
n02099429
n01776313
n03920288
n02109047
n02317335
n04612504
n03584254
n03457902
n02051845
n03047690
n04507155
n02704792
n01748264
n02017213
n03450230
n02841315
n04070727
n02992211
n03404251
n02092339
n12768682
n07873807
n03041632
n03379051
n04435653
n04146614
n02012849
n03443371
n04152593
n04507155
n03447447
n04252225
n03770439
n13037406
n01748264
n04550184
n03207941
n07716906
n03595614
n07875152
n04560804
n04479046
n03127925
n07248320
n02342885
n02088466
n03485407
n09399592
n04039381
n04548280
n02099267
n04254777
n06785654
n02190166
n03868242
n04141076
n02980441
n03868863
n02437312
n02096177
n02701002
n03259280
n02834397
n15075141
n07880968
n02096585
n09256479
n02091032
n03457902
n02099849
n02398521
n02129165
n03404251
n01774384
n03977966
n02980441
n02137549
n03920288
n01770081
n03891332
n03196217
n02782093
n02510455
n03535780
n04263257
n02790996
n03146219
n01601694
n03379051
n03188531
n02790996
n04596742
n01560419
n03376595
n12768682
n02504013
n03388043
n02231487
n03134739
n03775071
n02509815
n07695742
n02325366
n09835506
n04418357
n04483307
n04069434
n03991062
n02487347
n03223299
n02817516
n03207743
n02110627
n04604644
n02112350
n02109961
n03534580
n03208938
n03125729
n03947888
n04154565
n01860187
n02328150
n02777292
n02112018
n02113978
n02033041
n07871810
n10148035
n01981276
n07860988
n03492542
n04005630
n02093428
n04355933
n02108089
n03841143
n02704792
n02277742
n03874599
n04371774
n01775062
n03461385
n02096585
n02093754
n02011460
n02814533
n02787622
n02114367
n01641577
n03992509
n04265275
n02096051
n07745940
n02422106
n01496331
n03188531
n07614500
n02101006
n02101006
n13040303
n02085936
n03961711
n02093991
n07714571
n01986214
n01669191
n01984695
n03297495
n02108422
n03249569
n04398044
n03775546
n01986214
n04579432
n07714571
n01945685
n02640242
n06785654
n04116512
n02099429
n09229709
n01682714
n01749939
n02007558
n01498041
n04507155
n02124075
n02101006
n02104029
n02676566
n02606052
n04238763
n02101388
n02107312
n03347037
n02493509
n02396427
n04065272
n03840681
n04515003
n02091635
n02325366
n04033901
n01675722
n03788365
n13037406
n03527444
n01695060
n04328186
n07590611
n01728572
n02119022
n02974003
n02410509
n07892512
n07730033
n04330267
n03868863
n02018207
n02500267
n02980441
n01843065
n02093859
n02094114
n07768694
n04154565
n02123394
n03843555
n02123159
n02107574
n01795545
n02917067
n02071294
n03895866
n03179701
n03950228
n04259630
n02165105
n02120079
n02804610
n02279972
n01728920
n02978881
n03710637
n01872401
n03160309
n02442845
n09256479
n02950826
n02841315
n04357314
n02865351
n04111531
n07747607
n03594945
n03763968
n04606251
n03895866
n02113978
n04554684
n04344873
n04254120
n01740131
n03976467
n07753275
n02443484
n02939185
n02977058
n13037406
n07747607
n04467665
n01784675
n04536866
n02123159
n02119789
n04548362
n02111129
n06794110
n04239074
n03733805
n02088466
n03764736
n01914609
n02105505
n02412080
n04254680
n04523525
n07697537
n01728920
n02794156
n02113978
n13040303
n01514859
n04398044
n02364673
n01924916
n02007558
n03803284
n02795169
n03916031
n02088238
n02086646
n03063689
n01806143
n04366367
n03109150
n04523525
n04208210
n01978287
n03272010
n03146219
n03933933
n04525305
n03124043
n02510455
n01687978
n01824575
n04613696
n06359193
n03110669
n03388183
n03691459
n02280649
n03133878
n02085782
n02087046
n02090721
n02497673
n04344873
n04330267
n01514859
n02488702
n04525038
n07711569
n01978455
n01768244
n02105855
n04604644
n02281406
n01739381
n01693334
n02113978
n07749582
n03786901
n01883070
n09246464
n03841143
n03482405
n12998815
n03938244
n04238763
n03929855
n02892201
n02486261
n02676566
n01843065
n01728920
n03379051
n02823750
n02776631
n02488291
n02317335
n02002724
n01755581
n03110669
n04019541
n03095699
n04004767
n03877845
n02120505
n02113624
n07695742
n03127747
n03041632
n01744401
n02098286
n02100735
n02264363
n04456115
n02219486
n02129165
n04275548
n03874599
n03706229
n01770081
n02988304
n02105505
n02130308
n02113799
n06596364
n02028035
n01784675
n04266014
n02422106
n03271574
n01622779
n04229816
n02988304
n02977058
n03594734
n03196217
n04008634
n03947888
n03032252
n02037110
n03424325
n03873416
n03379051
n02096437
n03887697
n04154565
n03803284
n06794110
n03956157
n03297495
n03444034
n09256479
n02317335
n03871628
n04192698
n07873807
n02793495
n03764736
n02483362
n01773797
n03788195
n03032252
n04311174
n02111889
n03970156
n04447861
n02018795
n03666591
n03314780
n02229544
n02172182
n02486410
n02607072
n02276258
n04254777
n02403003
n02094114
n09246464
n02114367
n03788365
n03297495
n02492660
n04326547
n03201208
n04286575
n03492542
n03877472
n01910747
n01608432
n02490219
n03710637
n04344873
n02951358
n01498041
n01729322
n04409515
n04146614
n03873416
n02090721
n04081281
n03976467
n02837789
n04409515
n03759954
n02168699
n03127925
n03970156
n01665541
n03160309
n04251144
n04311174
n02098413
n02480855
n01773549
n02489166
n03494278
n02229544
n01729977
n04552348
n04033995
n01882714
n04366367
n03271574
n03666591
n02093428
n02791124
n03384352
n03498962
n03709823
n02422699
n02085782
n04133789
n02486261
n12985857
n04372370
n03857828
n04367480
n04612504
n04399382
n01632458
n03717622
n02514041
n02018207
n07615774
n02098413
n03691459
n02108915
n07920052
n04228054
n04493381
n04081281
n03832673
n13052670
n04584207
n04252225
n01608432
n02708093
n04398044
n02087046
n04599235
n02177972
n02326432
n02490219
n03761084
n02101556
n04599235
n04467665
n02097658
n01978287
n04612504
n02397096
n03018349
n02391049
n07584110
n02457408
n01776313
n02120079
n02727426
n02791270
n04590129
n02058221
n03599486
n03788365
n02098105
n02097047
n03794056
n02966193
n01494475
n02514041
n01773157
n07613480
n09332890
n02086910
n02071294
n02105412
n02966193
n02481823
n04228054
n02825657
n03775071
n02096177
n02328150
n01768244
n03028079
n03534580
n01484850
n09428293
n03788365
n02106550
n03782006
n04258138
n03710637
n02097298
n03721384
n02391049
n02013706
n02840245
n03249569
n02454379
n02865351
n02206856
n02093991
n01877812
n03485407
n02101388
n03014705
n04456115
n03976657
n03188531
n02342885
n02096437
n02102318
n03376595
n03271574
n02177972
n03594945
n03126707
n02099712
n01692333
n02966687
n03930313
n01667778
n07716906
n01580077
n03804744
n02111277
n03100240
n04548280
n02814533
n04204347
n04141327
n02066245
n02096585
n02102480
n03125729
n03272010
n03980874
n07753592
n02105412
n02443114
n04579432
n02101556
n03995372
n02950826
n01534433
n02088238
n07715103
n02795169
n01484850
n01753488
n02607072
n01530575
n01692333
n04153751
n02111500
n03131574
n03803284
n02437312
n02974003
n02776631
n04125021
n09428293
n02843684
n03047690
n02417914
n03998194
n03110669
n02445715
n04525305
n03998194
n01514668
n02321529
n02088466
n01644373
n07714571
n04357314
n03991062
n02088094
n02687172
n02110185
n02089078
n09468604
n02408429
n04389033
n03706229
n02488702
n03992509
n02417914
n04086273
n07613480
n04270147
n03887697
n01601694
n02123159
n01518878
n07836838
n04443257
n01592084
n03109150
n02264363
n02808304
n04252225
n01630670
n04507155
n03047690
n03344393
n02981792
n03680355
n07579787
n02526121
n01984695
n04485082
n03814639
n02977058
n03866082
n04404412
n04116512
n03100240
n03127925
n01847000
n02051845
n02177972
n02106030
n03770679
n03535780
n03676483
n01843383
n01873310
n02085936
n02328150
n03089624
n02102318
n02500267
n04040759
n04552348
n02101006
n07749582
n03884397
n02111129
n03662601
n03250847
n02129604
n03461385
n03970156
n04317175
n03958227
n07714990
n01980166
n03929660
n03314780
n01855032
n03630383
n01817953
n02095889
n04505470
n02727426
n03598930
n02105855
n02115913
n03110669
n10148035
n02106550
n02086079
n04380533
n10565667
n03249569
n02095889
n02492660
n07873807
n02797295
n04209239
n02786058
n02837789
n02841315
n02704792
n03935335
n04562935
n02099429
n02112137
n03325584
n04442312
n04033995
n07614500
n02108089
n03710721
n03100240
n02093859
n02906734
n04254777
n07871810
n02422106
n04049303
n03961711
n02777292
n04443257
n04597913
n02927161
n03424325
n03032252
n02795169
n02123394
n01498041
n01751748
n03793489
n03345487
n02091635
n02123159
n02107142
n02484975
n03666591
n03085013
n04325704
n03208938
n04562935
n04152593
n09472597
n07875152
n04597913
n04099969
n03976657
n02028035
n03796401
n02917067
n02110958
n02730930
n02802426
n02917067
n02704792
n07760859
n02123597
n01981276
n01688243
n03400231
n02088238
n07753275
n02100583
n01955084
n02777292
n01534433
n03908714
n02120079
n04465501
n02641379
n02098286
n01534433
n02917067
n04371774
n02110958
n03538406
n03443371
n03902125
n03075370
n04336792
n02091831
n02510455
n02097047
n03908618
n02817516
n02111889
n01531178
n02481823
n03110669
n02095570
n03982430
n03444034
n07714571
n07932039
n01768244
n02837789
n03637318
n04141975
n01910747
n03873416
n03018349
n02114548
n07717556
n03494278
n03924679
n02012849
n02361337
n02398521
n03443371
n07615774
n02009912
n02395406
n02777292
n02783161
n02445715
n03743016
n03891332
n04542943
n15075141
n02091244
n02114367
n03404251
n03000134
n01667114
n03763968
n02233338
n09428293
n03793489
n04258138
n04023962
n01667778
n03899768
n13133613
n03599486
n03042490
n04467665
n03633091
n02437616
n02835271
n03791053
n04486054
n07717410
n07613480
n01728920
n03400231
n02790996
n02676566
n04562935
n02264363
n04141975
n03089624
n03954731
n03467068
n02690373
n02102040
n01985128
n04116512
n02497673
n04392985
n03937543
n02006656
n01773549
n02704792
n02999410
n07930864
n02011460
n02107312
n02910353
n01795545
n04111531
n02894605
n01614925
n02793495
n02100877
n03761084
n02504013
n02408429
n07583066
n01744401
n03447447
n03125729
n01978287
n04346328
n03742115
n02483708
n13054560
n02096177
n03920288
n02837789
n03877472
n02165105
n03937543
n03982430
n03787032
n07880968
n04371774
n04146614
n03394916
n03903868
n02687172
n01494475
n02536864
n02129165
n07920052
n01496331
n02009912
n02692877
n02101006
n03271574
n04371774
n01496331
n04557648
n02027492
n02125311
n03376595
n01872401
n04346328
n02091134
n04238763
n01776313
n01796340
n01770081
n03141823
n01665541
n04133789
n02096437
n02096051
n10565667
n04542943
n03447447
n09421951
n02113624
n03160309
n02504458
n01774750
n03871628
n04590129
n12057211
n03481172
n03000247
n04090263
n04141076
n01914609
n03775071
n02869837
n04509417
n04371430
n02097209
n04613696
n02669723
n02883205
n01748264
n01955084
n04204238
n03743016
n02177972
n03868863
n04133789
n02168699
n04041544
n02115913
n02259212
n02096177
n02277742
n04493381
n02093859
n03160309
n04120489
n09246464
n04005630
n03938244
n03208938
n04033901
n02835271
n04049303
n02951585
n04229816
n01755581
n01734418
n01843065
n02114367
n09288635
n04147183
n03196217
n04367480
n03467068
n01491361
n02091831
n04154565
n07875152
n07873807
n02690373
n02730930
n04389033
n02879718
n03223299
n01784675
n03447721
n01742172
n01728572
n12985857
n03376595
n03089624
n03887697
n04270147
n01930112
n02814533
n07802026
n07920052
n03425413
n06596364
n03134739
n02108422
n12998815
n07753113
n02056570
n09256479
n04238763
n02951585
n04033901
n01833805
n01737021
n01694178
n06785654
n02500267
n02085782
n03825788
n03899768
n01843383
n02782093
n01855672
n04239074
n04604644
n07583066
n03041632
n02777292
n03627232
n03884397
n02328150
n04005630
n02093859
n01749939
n03000134
n04037443
n03888257
n01824575
n07875152
n02526121
n07920052
n02102040
n02869837
n02099849
n04356056
n01749939
n02442845
n04487081
n02087046
n04201297
n02094433
n02480495
n02096585
n01518878
n04141975
n02981792
n01632458
n02093647
n02018207
n04040759
n01820546
n03840681
n03832673
n02051845
n01883070
n03534580
n02028035
n03857828
n01682714
n04049303
n02096585
n04254120
n02071294
n03868863
n02206856
n04086273
n02177972
n02085782
n03942813
n01496331
n04355933
n02790996
n04265275
n03976467
n02279972
n02086240
n01824575
n09421951
n02123159
n02086079
n07717410
n02422106
n02236044
n01608432
n03062245
n07734744
n01983481
n04542943
n01773797
n02526121
n01688243
n01990800
n02169497
n01768244
n01770393
n03977966
n02096585
n03532672
n07711569
n01734418
n04326547
n09332890
n04584207
n02114712
n02093754
n03495258
n01616318
n02326432
n04507155
n03527444
n01981276
n02097298
n03958227
n02165105
n07718472
n04591157
n04286575
n04208210
n02120505
n04265275
n04147183
n03271574
n02128385
n02110958
n03888257
n02730930
n01978455
n02843684
n03590841
n03065424
n03854065
n01739381
n01773797
n03976657
n04116512
n02092339
n01817953
n02119789
n01748264
n02169497
n03125729
n02091467
n07714571
n02704792
n02085936
n02108915
n03314780
n02086646
n07697537
n03584829
n03773504
n04204347
n01796340
n03930313
n02033041
n02236044
n02895154
n02708093
n02115641
n04209239
n01735189
n03201208
n09468604
n03047690
n04254777
n06596364
n03627232
n01532829
n01694178
n04081281
n03495258
n02788148
n01775062
n04355933
n03017168
n04599235
n03785016
n07871810
n03980874
n02071294
n04493381
n04372370
n02087046
n04584207
n04086273
n02092339
n02817516
n03240683
n12998815
n03075370
n02804414
n01833805
n01695060
n04596742
n04398044
n02106382
n04204238
n02219486
n02437312
n04335435
n01531178
n04201297
n03920288
n03759954
n03792782
n02412080
n04536866
n03874293
n02708093
n02437312
n04509417
n01990800
n04579145
n02480495
n04371430
n02105056
n03930630
n03481172
n02808440
n07932039
n04428191
n02971356
n02090379
n03857828
n02988304
n02115913
n04599235
n04033901
n11879895
n03014705
n02002724
n02445715
n02870880
n02951585
n02129604
n02123394
n01860187
n03788195
n03729826
n01665541
n01531178
n04442312
n02777292
n13044778
n07720875
n02027492
n02480855
n04447861
n02403003
n03874599
n01622779
n02860847
n03884397
n13040303
n03796401
n03388549
n03970156
n02112137
n03775071
n01601694
n02093991
n01664065
n02077923
n02487347
n02444819
n02480855
n04505470
n03980874
n03447447
n01955084
n02056570
n03127747
n02692877
n06596364
n03400231
n03482405
n03920288
n03871628
n03496892
n12267677
n04310018
n02865351
n01924916
n03000247
n03393912
n02825657
n06785654
n02097474
n04179913
n02112350
n03444034
n03133878
n02132136
n02843684
n01770393
n01871265
n03290653
n03207941
n03476991
n03481172
n04590129
n01532829
n03642806
n03388183
n02094258
n03496892
n04467665
n02963159
n02328150
n02101388
n09256479
n03777568
n02165456
n03042490
n02363005
n13054560
n02808440
n04532670
n01688243
n03602883
n02206856
n03400231
n02346627
n01871265
n01806567
n02727426
n04067472
n02088094
n04553703
n13037406
n07718472
n04252077
n04258138
n02808440
n02328150
n03325584
n01774750
n02123159
n02111277
n04591157
n03871628
n03775071
n04136333
n03976467
n03908618
n03483316
n04487394
n02769748
n04523525
n12998815
n04553703
n04152593
n02346627
n02007558
n03110669
n01440764
n09472597
n02730930
n02782093
n04483307
n02028035
n04040759
n03372029
n02808440
n02120505
n03141823
n02100236
n01770393
n01739381
n03208938
n03954731
n04536866
n04456115
n03000247
n04612504
n02837789
n03538406
n02699494
n03967562
n04398044
n03710721
n04356056
n04033995
n02415577
n04270147
n03866082
n03271574
n02133161
n03483316
n01514668
n03770679
n04532670
n03720891
n02096437
n03444034
n02088632
n02328150
n02787622
n12998815
n07716358
n02817516
n03961711
n02823428
n01753488
n02443114
n04370456
n04542943
n03876231
n02509815
n04371430
n04141975
n02112350
n02321529
n02097474
n04461696
n03804744
n02786058
n12768682
n01855032
n03992509
n01773797
n02443484
n02101006
n09421951
n03837869
n04356056
n01744401
n02701002
n03977966
n02105056
n02102318
n03095699
n01728572
n01873310
n03930313
n03930630
n06359193
n02033041
n04604644
n03781244
n04599235
n02114548
n02356798
n03271574
n07932039
n02100735
n04069434
n04346328
n09332890
n12768682
n02795169
n04049303
n02403003
n04239074
n02493793
n02127052
n04317175
n02363005
n03832673
n04296562
n03630383
n01739381
n02107683
n02012849
n03786901
n04033995
n03782006
n02113624
n02783161
n02134418
n03532672
n02012849
n02415577
n02096437
n03220513
n01945685
n02892201
n04044716
n07742313
n03376595
n02643566
n01735189
n01729977
n02105251
n09421951
n02099712
n03388043
n02174001
n04147183
n02013706
n13054560
n02978881
n09246464
n02699494
n02107312
n03017168
n07745940
n02233338
n02791270
n01950731
n03857828
n02025239
n03452741
n02101388
n03388549
n01484850
n02111277
n01950731
n02174001
n02105162
n02480855
n03325584
n03272562
n03876231
n01644373
n04380533
n07697537
n04380533
n02190166
n07753592
n01630670
n02730930
n03788195
n02669723
n02100735
n03271574
n03179701
n02486261
n02105412
n02417914
n01770081
n02123394
n01855672
n02480495
n02692877
n01532829
n04372370
n01910747
n03400231
n02444819
n04099969
n03498962
n04154565
n02783161
n03124170
n03417042
n04254120
n07717410
n04372370
n07565083
n03661043
n04074963
n02504458
n03720891
n03445924
n03873416
n03775071
n02443114
n03623198
n03000247
n02423022
n03929660
n02782093
n01930112
n01776313
n03388183
n02133161
n02782093
n03393912
n03794056
n09256479
n07920052
n03384352
n02666196
n02894605
n03476684
n02526121
n02123045
n03673027
n03197337
n02114548
n04599235
n02085936
n02963159
n04258138
n03983396
n03187595
n03290653
n03179701
n01531178
n02398521
n02119789
n02089867
n04548362
n02486410
n01704323
n01494475
n04141327
n02790996
n02056570
n02106166
n02018795
n04523525
n03598930
n04118776
n03662601
n04509417
n02606052
n02966193
n03775071
n02317335
n03146219
n03355925
n02229544
n02443114
n03355925
n04590129
n02804414
n02114367
n03379051
n02138441
n03461385
n04200800
n03584829
n01755581
n04335435
n03127747
n04263257
n04192698
n01622779
n02422699
n02107683
n04532670
n02906734
n02804414
n12768682
n02108089
n02909870
n03837869
n02113186
n02112350
n01677366
n03630383
n02526121
n02840245
n01687978
n04515003
n15075141
n02841315
n02422106
n02783161
n02814533
n02102177
n02415577
n03782006
n01770081
n02114548
n03958227
n01728920
n03494278
n01873310
n02894605
n01833805
n03160309
n04458633
n03223299
n12620546
n12998815
n01496331
n04461696
n01981276
n03595614
n02101388
n03937543
n03100240
n03791053
n04613696
n02134084
n04141975
n02093859
n03125729
n02326432
n03680355
n03998194
n01494475
n02342885
n03976657
n01819313
n04606251
n01740131
n02797295
n02123394
n02169497
n03630383
n01689811
n03950228
n07584110
n04591713
n04127249
n12144580
n07831146
n03791053
n02808440
n02793495
n02437312
n02138441
n02111500
n02109961
n03459775
n03126707
n03388549
n02096294
n03961711
n04209133
n04243546
n02791270
n01685808
n02965783
n03775546
n02074367
n03775546
n03584254
n02119789
n02437312
n03888257
n03187595
n02123045
n03937543
n02412080
n01729322
n03908714
n02125311
n01494475
n02894605
n03908618
n02114855
n02123159
n03598930
n02107142
n03290653
n02791124
n03803284
n03937543
n03388043
n03131574
n02788148
n02106382
n04467665
n02100877
n04330267
n03697007
n03710721
n02403003
n02108089
n03017168
n03733281
n03792972
n02105056
n01806567
n01630670
n03337140
n03467068
n01873310
n02398521
n02013706
n04120489
n02708093
n02110341
n03770679
n02480495
n03450230
n03584254
n02823750
n04127249
n02410509
n04562935
n04019541
n04613696
n01632777
n07836838
n02114855
n02100236
n02102318
n07831146
n03742115
n03662601
n03720891
n02804610
n02107142
n03733131
n03791053
n03991062
n02808304
n03594945
n02749479
n04562935
n02134084
n02342885
n03538406
n02107683
n02012849
n01682714
n02988304
n07932039
n02206856
n03447447
n01753488
n01755581
n02119022
n04597913
n03314780
n02865351
n03459775
n01530575
n04335435
n09288635
n02769748
n02256656
n03131574
n03770439
n02123045
n02096177
n04131690
n02397096
n01798484
n02107574
n02113186
n01855672
n03791053
n03770679
n01983481
n02093256
n01968897
n02692877
n02356798
n07875152
n02107312
n02837789
n03042490
n03188531
n03447721
n02825657
n03868242
n04552348
n01770081
n02095314
n04204347
n02087394
n04065272
n02132136
n02134418
n01632777
n04325704
n03776460
n01955084
n02129604
n01644900
n02101006
n04357314
n12985857
n03670208
n07760859
n04067472
n02099849
n03770679
n02978881
n03623198
n03717622
n04536866
n02835271
n07717410
n04429376
n02869837
n03124170
n01632458
n01531178
n03127925
n02097047
n03950228
n03028079
n02107312
n13052670
n02090721
n07711569
n02091831
n01530575
n04146614
n01667114
n03958227
n02098286
n07871810
n01980166
n02412080
n02500267
n01924916
n04254680
n02480495
n01774384
n03216828
n07711569
n03026506
n01749939
n03344393
n03938244
n02098105
n01986214
n01917289
n04418357
n02058221
n02106030
n02966193
n03032252
n02206856
n03063599
n02107312
n03843555
n02108551
n01855672
n02107142
n02102040
n04357314
n04505470
n03529860
n02437312
n02129604
n03773504
n02100877
n03877472
n04501370
n07880968
n04458633
n02167151
n03721384
n02102480
n07579787
n02123394
n02484975
n03942813
n04270147
n03777568
n02085782
n01729977
n04404412
n04311174
n03160309
n02454379
n02096294
n04065272
n02483362
n02364673
n03100240
n07873807
n03594734
n04344873
n07590611
n01883070
n03770439
n03141823
n02133161
n01689811
n01833805
n02814860
n04367480
n03710637
n07714571
n02071294
n01768244
n03388183
n01847000
n03325584
n01667114
n02236044
n04141327
n03467068
n01687978
n04285008
n03483316
n03447447
n02264363
n02097209
n04501370
n09468604
n02930766
n01917289
n04554684
n02979186
n02442845
n03345487
n02486410
n02841315
n03899768
n09399592
n03344393
n02088364
n03763968
n02105162
n04235860
n03903868
n09428293
n03661043
n03249569
n02268443
n02444819
n02116738
n03902125
n02093991
n02110185
n03832673
n03983396
n07716358
n02113712
n03887697
n03424325
n03958227
n01534433
n02086646
n04591713
n07753113
n03841143
n02790996
n02165456
n02009229
n02814860
n04462240
n02730930
n02085620
n02098413
n03337140
n02807133
n04263257
n02108422
n02138441
n01630670
n04008634
n02113799
n02643566
n12057211
n01665541
n04404412
n03691459
n01729977
n03290653
n01924916
n02486410
n04332243
n13052670
n03598930
n02437616
n02093991
n01729977
n02115641
n02825657
n02786058
n02788148
n02094258
n02793495
n03388043
n02128757
n02443484
n02088094
n03110669
n01985128
n07714990
n02869837
n03595614
n04592741
n02127052
n07880968
n02643566
n09256479
n02356798
n02509815
n04487394
n03721384
n01728572
n02992211
n03877845
n02231487
n02445715
n02095570
n04579145
n03706229
n02107574
n01833805
n01629819
n03445777
n03710721
n03014705
n04336792
n04311174
n03724870
n03920288
n03063689
n03908618
n02085620
n02699494
n02096437
n03804744
n04209239
n03249569
n11939491
n01882714
n02129165
n03773504
n04346328
n02102040
n12620546
n02177972
n02066245
n03492542
n02090721
n04482393
n01914609
n02174001
n02233338
n01693334
n01665541
n02280649
n01514668
n01641577
n02107683
n04040759
n03355925
n04579432
n02280649
n02361337
n03937543
n03891251
n02492035
n03759954
n03763968
n01582220
n03866082
n04086273
n04330267
n04476259
n04118776
n03180011
n03838899
n03627232
n04264628
n02101006
n02113624
n02395406
n01675722
n04090263
n03785016
n02137549
n02277742
n03642806
n07718472
n03447447
n03792782
n04008634
n04254777
n01631663
n04254680
n02074367
n01744401
n03127747
n02190166
n03623198
n02607072
n02877765
n02790996
n02992529
n02492660
n02117135
n01580077
n03028079
n02102040
n01494475
n04461696
n01917289
n04146614
n04004767
n02906734
n01560419
n02085936
n12267677
n03075370
n01682714
n02669723
n01751748
n02999410
n10148035
n02797295
n03958227
n03134739
n01860187
n02443114
n03028079
n03495258
n03787032
n02108089
n01687978
n01484850
n02098105
n03942813
n02109525
n04613696
n01631663
n09835506
n01784675
n02137549
n09472597
n02895154
n03676483
n04209239
n01784675
n03028079
n03355925
n03483316
n03337140
n03495258
n04311004
n04270147
n03791053
n02488702
n02895154
n02100583
n10565667
n04548280
n02091134
n01806567
n02264363
n02708093
n02111277
n02692877
n03837869
n03240683
n03773504
n03706229
n03742115
n01734418
n12998815
n03452741
n06596364
n03041632
n02096585
n04317175
n07892512
n01755581
n03777568
n03457902
n02106382
n01601694
n03691459
n02114855
n03461385
n02096294
n03498962
n04482393
n02412080
n03857828
n02124075
n02106550
n03950228
n07730033
n02093991
n07768694
n02870880
n02672831
n02268443
n03773504
n09332890
n02025239
n04562935
n07742313
n04192698
n04049303
n01644900
n02769748
n01774384
n02894605
n03127747
n03045698
n03388549
n03724870
n03706229
n03825788
n01775062
n03670208
n02492035
n01983481
n04435653
n03028079
n03445924
n02108000
n01882714
n02346627
n09399592
n12620546
n03047690
n02807133
n03630383
n03325584
n02110063
n07860988
n01443537
n04523525
n02112706
n02815834
n03720891
n03843555
n02992211
n02107908
n03662601
n03207743
n04507155
n02094433
n02791270
n02788148
n02094258
n02105162
n04179913
n07930864
n03873416
n02027492
n02790996
n03924679
n07753275
n03658185
n02444819
n07802026
n01484850
n02113186
n02110341
n02090622
n04366367
n01773157
n03792972
n02690373
n02090622
n06794110
n02101388
n07697313
n03297495
n03032252
n01688243
n02090379
n02017213
n04152593
n02108551
n03658185
n02643566
n04049303
n03544143
n03709823
n01632458
n02111500
n07717556
n01688243
n07747607
n01592084
n03485794
n02443114
n03888257
n07753592
n01930112
n03127747
n01580077
n12057211
n03344393
n03697007
n01601694
n01818515
n04517823
n04584207
n02002724
n03424325
n03895866
n03787032
n02100236
n03110669
n04523525
n01983481
n04465501
n02090721
n02980441
n02088094
n02492035
n03109150
n02091635
n07695742
n02074367
n07754684
n02783161
n03761084
n02096585
n04099969
n01930112
n03379051
n02105412
n02097298
n04026417
n03866082
n04004767
n01704323
n04286575
n02321529
n04417672
n04389033
n02909870
n01685808
n01806143
n02006656
n03832673
n07697313
n07932039
n02206856
n12144580
n02108422
n07753113
n03777754
n04259630
n02641379
n13052670
n03788365
n02870880
n02799071
n02137549
n02999410
n04317175
n02094114
n03529860
n03188531
n03160309
n03697007
n02091831
n03594734
n04389033
n02799071
n07747607
n02504458
n04277352
n01914609
n02281787
n03868863
n09421951
n03792782
n02102318
n01484850
n04192698
n02089867
n03584254
n01728572
n03062245
n02109047
n02108422
n02088632
n02447366
n02236044
n02910353
n02105056
n03498962
n03250847
n04120489
n02999410
n03467068
n03187595
n03255030
n04004767
n02091635
n04507155
n03782006
n02317335
n02165456
n04243546
n02099849
n04239074
n09246464
n04335435
n03770439
n01978455
n01644373
n02256656
n02509815
n03584254
n03710721
n01795545
n07753592
n02412080
n07892512
n02091032
n04074963
n03197337
n03075370
n02111129
n03930630
n01770081
n04235860
n02132136
n02100735
n01978287
n02097658
n04540053
n04149813
n02105251
n01984695
n03314780
n02115641
n04235860
n02843684
n04311004
n04118776
n02276258
n02909870
n02701002
n02051845
n04599235
n01689811
n03637318
n03344393
n04591713
n02018795
n02795169
n04462240
n03776460
n03404251
n03188531
n07749582
n01631663
n02123597
n02328150
n02110958
n02125311
n04023962
n03133878
n03131574
n02091467
n01484850
n02096177
n01496331
n02058221
n03028079
n02113023
n02480855
n02892201
n04418357
n03042490
n03124170
n12985857
n04141975
n01860187
n02130308
n04037443
n13052670
n07714571
n02391049
n04149813
n04099969
n01729977
n04243546
n02978881
n03131574
n02127052
n04366367
n02229544
n01669191
n02489166
n07716906
n03208938
n02088466
n02093754
n01632777
n04118538
n02363005
n02114855
n09256479
n02787622
n02105412
n03498962
n12768682
n03216828
n03598930
n02643566
n03837869
n07695742
n01817953
n01667778
n04251144
n02231487
n04005630
n03445777
n04597913
n07615774
n02769748
n01833805
n01828970
n01796340
n01694178
n03995372
n03494278
n03271574
n03014705
n02088632
n03788195
n02328150
n02992529
n03498962
n02169497
n02112137
n02483362
n07836838
n02086240
n01739381
n02325366
n03877472
n04589890
n02133161
n01632777
n02105162
n04019541
n01775062
n02107574
n04509417
n01860187
n02088632
n03459775
n03133878
n04254680
n01755581
n02939185
n02091134
n02114712
n07714990
n02484975
n03445924
n03018349
n02802426
n01774384
n03124043
n03355925
n03146219
n03388183
n02226429
n07860988
n03388183
n04009552
n02488291
n03899768
n03649909
n03393912
n02797295
n03014705
n03729826
n01560419
n02114367
n03637318
n02115641
n04517823
n02346627
n02033041
n02804414
n07714990
n04120489
n03481172
n02099267
n10565667
n03825788
n03240683
n02123597
n02097130
n02090721
n02094433
n02667093
n03461385
n02101388
n09399592
n02109047
n04153751
n04479046
n03223299
n13133613
n01688243
n02363005
n04493381
n02445715
n02280649
n03804744
n04596742
n04597913
n01729322
n02793495
n04604644
n04592741
n03425413
n04332243
n04562935
n02494079
n07693725
n07717410
n06874185
n03063689
n02389026
n02110627
n03930630
n01871265
n07716358
n02114712
n03216828
n06596364
n03494278
n07579787
n04548280
n04409515
n02102040
n07753113
n01632777
n02843684
n02395406
n02100583
n03481172
n02099849
n02708093
n01980166
n02096294
n01744401
n03291819
n04004767
n01534433
n03223299
n03773504
n04090263
n02002724
n02422106
n04325704
n01531178
n02948072
n02281787
n04239074
n04399382
n03400231
n02802426
n02165456
n02256656
n02104029
n06794110
n07932039
n02793495
n02093754
n02834397
n02165456
n03394916
n02138441
n01729977
n02138441
n04311174
n03388043
n03344393
n03445924
n02504013
n13040303
n02363005
n02206856
n03982430
n03661043
n02107574
n03785016
n02231487
n04487394
n04376876
n04277352
n07718472
n04118776
n01914609
n01798484
n01944390
n03355925
n03742115
n02108089
n03924679
n03134739
n02011460
n02974003
n02100583
n01496331
n01860187
n02100236
n04596742
n02119789
n02342885
n04044716
n04099969
n03602883
n07717556
n04548280
n03843555
n04409515
n02093647
n01797886
n04429376
n03063599
n07760859
n02487347
n01697457
n03706229
n02988304
n03134739
n02979186
n02892201
n03840681
n03425413
n13044778
n04330267
n03425413
n02099849
n04044716
n01440764
n02105251
n03599486
n03240683
n02097130
n04162706
n03443371
n02492660
n03793489
n04347754
n04296562
n03666591
n04584207
n04136333
n02123159
n04070727
n02981792
n07718472
n01694178
n10565667
n04532670
n02480495
n07590611
n02111277
n04554684
n01695060
n04311004
n02102480
n04447861
n02807133
n04398044
n04418357
n03690938
n01644373
n03837869
n02493793
n01796340
n02095889
n03781244
n02088466
n02906734
n04596742
n12057211
n02097658
n03954731
n02447366
n03223299
n03710637
n03459775
n04458633
n02397096
n03877472
n07584110
n03393912
n07716906
n07836838
n03720891
n02109961
n04326547
n01753488
n02389026
n07734744
n07745940
n02094114
n02981792
n02097298
n03930630
n02783161
n04346328
n01774750
n01829413
n02910353
n02894605
n02132136
n04372370
n04040759
n02493509
n03788195
n04357314
n02106166
n02168699
n02091831
n02105056
n01986214
n02268443
n01739381
n01774384
n02444819
n02105641
n01687978
n04606251
n03325584
n04596742
n02325366
n02950826
n04067472
n02086646
n02113799
n04557648
n04429376
n01704323
n02056570
n02488291
n07614500
n03089624
n01532829
n03160309
n04550184
n07730033
n02095570
n04367480
n04081281
n04254120
n04443257
n03777568
n03584829
n04201297
n12144580
n02834397
n03127925
n02100735
n02256656
n02092002
n01753488
n04259630
n03197337
n02510455
n02108422
n02013706
n03840681
n02108089
n04485082
n03584829
n02134084
n03814639
n04522168
n04589890
n04252225
n03188531
n03594945
n03691459
n04041544
n04033901
n04090263
n02486410
n03873416
n03871628
n02325366
n02841315
n02037110
n02909870
n01629819
n07565083
n02088094
n03954731
n12998815
n03661043
n04332243
n02167151
n04099969
n04266014
n03733131
n02033041
n02165456
n02109047
n02999410
n02177972
n02033041
n03899768
n01685808
n04023962
n02114712
n03775546
n02092002
n02107142
n02977058
n01582220
n04127249
n03814906
n03769881
n03393912
n03291819
n02497673
n03127925
n09193705
n07831146
n03980874
n07753113
n01558993
n02808304
n03854065
n04483307
n02102040
n04326547
n02443484
n09256479
n03961711
n01641577
n03733131
n04254680
n02099601
n02089078
n03016953
n03216828
n02101388
n02229544
n02606052
n04141076
n01694178
n03063689
n01774384
n02607072
n02091244
n03937543
n04328186
n03532672
n03485407
n07717556
n02006656
n04525305
n02123597
n02708093
n02137549
n07614500
n03947888
n03983396
n03544143
n01440764
n01440764
n03717622
n02085620
n02727426
n03485794
n03825788
n04259630
n02788148
n03930630
n04392985
n02454379
n02100236
n01534433
n02102318
n04044716
n02113186
n02066245
n02127052
n01950731
n03000684
n02843684
n04147183
n02110063
n07590611
n02113712
n04074963
n03871628
n02168699
n09246464
n07802026
n01693334
n03908714
n02130308
n09193705
n02091244
n02111500
n03642806
n04033901
n02999410
n02128925
n06359193
n07717410
n02102318
n04208210
n02086079
n03868863
n03743016
n03062245
n03717622
n04069434
n03598930
n01978287
n04026417
n01748264
n02096294
n04483307
n01592084
n03787032
n03742115
n01795545
n02807133
n02769748
n02108915
n04509417
n02093754
n02129604
n02090622
n01806567
n04579432
n04542943
n03400231
n07871810
n09399592
n02114367
n04049303
n02979186
n02494079
n03944341
n03535780
n03297495
n07831146
n02457408
n04254680
n03028079
n03498962
n02883205
n02077923
n02090721
n04005630
n02056570
n01775062
n03866082
n02087394
n04336792
n01917289
n04111531
n02007558
n04086273
n02843684
n13037406
n04200800
n03000684
n03991062
n02488702
n02808440
n03887697
n01784675
n02058221
n02841315
n02114367
n03657121
n02787622
n03095699
n03450230
n02123394
n02869837
n03793489
n02094258
n04380533
n02978881
n07584110
n02927161
n02930766
n02093428
n04507155
n03534580
n03857828
n01872401
n03337140
n02980441
n02102177
n02509815
n02097047
n02992529
n02797295
n03866082
n02279972
n03485794
n03530642
n01518878
n04483307
n04033901
n07749582
n02917067
n03623198
n02233338
n03623198
n03594945
n02256656
n02999410
n02093991
n02002724
n03788365
n03623198
n02110063
n01740131
n04346328
n04033995
n02095889
n04311174
n02445715
n03218198
n02640242
n04462240
n03180011
n02093256
n03425413
n02504013
n03877472
n02087046
n03976467
n02091134
n04044716
n02088364
n02009912
n02206856
n03297495
n02871525
n03633091
n02105855
n03075370
n02119789
n01644373
n03216828
n03478589
n03929855
n02939185
n01847000
n02317335
n01983481
n03657121
n02086910
n02088238
n02168699
n03976467
n07697313
n03743016
n04086273
n04200800
n01632777
n03529860
n03404251
n03255030
n03476991
n04311174
n02093991
n03924679
n03478589
n04258138
n01774384
n02277742
n01980166
n02951358
n03983396
n03482405
n02091244
n01592084
n02415577
n02125311
n03888257
n03871628
n02096437
n03743016
n04118776
n02526121
n07711569
n01694178
n01744401
n03424325
n10565667
n02007558
n01860187
n03127925
n04380533
n03637318
n02088238
n04118538
n02101006
n02110958
n01820546
n02106550
n03874293
n02229544
n03937543
n03838899
n04147183
n03697007
n02655020
n01677366
n02415577
n03891332
n03673027
n02328150
n02363005
n04209133
n04065272
n04399382
n02114548
n03724870
n12620546
n04277352
n02105855
n01704323
n01697457
n02094433
n02110958
n02092339
n01734418
n02108915
n02791270
n01534433
n04111531
n03476684
n02708093
n01955084
n01580077
n01592084
n03602883
n02871525
n04037443
n02086910
n13040303
n07749582
n01930112
n13037406
n03792972
n01775062
n02403003
n02974003
n01644373
n02966193
n03481172
n02095570
n03297495
n01614925
n01440764
n02879718
n02105641
n03125729
n03891332
n01697457
n03443371
n03794056
n02231487
n02395406
n02787622
n03425413
n02111889
n01632458
n02110806
n03584829
n03733805
n04613696
n07747607
n02687172
n03792782
n02492035
n02489166
n03393912
n03018349
n03843555
n02769748
n02168699
n03272010
n04532106
n01943899
n01882714
n03127747
n02088632
n04589890
n12768682
n07715103
n02410509
n03995372
n01728920
n02091134
n01820546
n01739381
n02917067
n04591157
n07697313
n01728920
n02835271
n02028035
n03908714
n02096294
n02106030
n03384352
n02174001
n04522168
n03866082
n02817516
n01978287
n04259630
n04399382
n02113978
n03447721
n02749479
n03188531
n02483708
n07693725
n03014705
n01622779
n03642806
n02018207
n09332890
n03670208
n03291819
n02017213
n02098286
n04141327
n02105251
n02447366
n02321529
n03792782
n01443537
n01943899
n04522168
n13133613
n03891251
n02106166
n04592741
n04179913
n03216828
n04467665
n01883070
n07614500
n02105162
n04456115
n04332243
n04049303
n07615774
n01616318
n07802026
n03291819
n01688243
n02396427
n09229709
n09399592
n02027492
n04517823
n03325584
n02165456
n03803284
n02802426
n09428293
n02168699
n02106662
n03259280
n03733131
n04258138
n01924916
n01945685
n09428293
n02871525
n02786058
n03721384
n04285008
n03485794
n01784675
n04428191
n02092002
n04372370
n04099969
n03026506
n02971356
n02106030
n04131690
n01847000
n03794056
n12985857
n02488702
n01872401
n03372029
n01806567
n01917289
n03444034
n01776313
n02814533
n02672831
n03637318
n02113978
n02165456
n04548280
n02917067
n01560419
n02825657
n04552348
n02999410
n02190166
n03065424
n02825657
n07716358
n02877765
n09421951
n12267677
n01819313
n04264628
n03344393
n02002724
n01641577
n02256656
n01532829
n03854065
n02791270
n02951585
n03014705
n01592084
n01728572
n01774750
n03868242
n04370456
n03337140
n03124043
n03290653
n02488291
n04505470
n04553703
n02107574
n01692333
n12620546
n04086273
n03657121
n01582220
n03485407
n03840681
n07768694
n03782006
n02114548
n11939491
n04552348
n03208938
n02006656
n03764736
n07695742
n01820546
n02326432
n02009229
n02408429
n03018349
n03018349
n02504458
n02089973
n01917289
n01739381
n02130308
n04099969
n02102040
n03788195
n03764736
n02422699
n01978287
n02860847
n02749479
n03877845
n03404251
n04209133
n07695742
n04090263
n03720891
n04311174
n03642806
n03933933
n04005630
n02093991
n02977058
n09835506
n03417042
n01742172
n03888257
n02782093
n07802026
n03208938
n02130308
n02090622
n04040759
n02422699
n03594945
n02437616
n03337140
n09399592
n02129604
n02488291
n04597913
n03089624
n03710193
n02930766
n04435653
n01806567
n03100240
n01582220
n03871628
n02422106
n02494079
n04372370
n07716358
n04277352
n02236044
n03891332
n03814639
n02396427
n02793495
n02096437
n02504458
n02085936
n01978287
n04239074
n03532672
n02869837
n02127052
n03680355
n02206856
n03602883
n01817953
n03733805
n03938244
n03450230
n04044716
n02965783
n03938244
n01592084
n03290653
n04479046
n07831146
n01735189
n04525305
n02870880
n02776631
n02172182
n04081281
n03876231
n01985128
n01917289
n10148035
n04286575
n03598930
n02085782
n02699494
n04009552
n03492542
n07749582
n03017168
n03494278
n02134418
n03792782
n01687978
n13040303
n03220513
n03347037
n03476684
n01828970
n02114367
n07715103
n02119789
n01749939
n03791053
n02457408
n01440764
n01824575
n04372370
n07802026
n04270147
n04033901
n04515003
n03950228
n04005630
n02091032
n02090379
n02486410
n07684084
n04592741
n02106382
n02165456
n02483708
n01737021
n02814533
n04081281
n03884397
n07749582
n01641577
n03929855
n04550184
n04467665
n03930313
n02951585
n02747177
n04487394
n01773549
n04228054
n02410509
n04596742
n02795169
n03496892
n04613696
n02398521
n03814906
n02823750
n02106550
n02128385
n02364673
n03770679
n02099429
n01669191
n12057211
n04476259
n02229544
n03781244
n02509815
n02807133
n02132136
n03447721
n02840245
n03743016
n04118776
n04356056
n02190166
n03424325
n04606251
n04146614
n04040759
n07754684
n02119022
n02454379
n02443484
n04310018
n03527444
n04399382
n03843555
n01740131
n02127052
n02749479
n03045698
n02086240
n01795545
n04592741
n02701002
n04149813
n02823750
n01728920
n04493381
n02894605
n03970156
n03838899
n03877845
n03534580
n02094258
n03047690
n02033041
n03208938
n03124043
n03000134
n03250847
n01817953
n02727426
n01669191
n02268443
n03770439
n02389026
n04550184
n02804610
n03461385
n02091244
n02363005
n02391049
n07717410
n03404251
n07695742
n04462240
n01817953
n06359193
n01685808
n02509815
n09835506
n04523525
n04398044
n01955084
n02423022
n02129604
n02066245
n01773797
n02859443
n04090263
n03617480
n04548280
n03929855
n03777754
n02791270
n02317335
n03791053
n03180011
n01677366
n03976467
n02497673
n01729322
n03297495
n02268853
n01742172
n07716906
n03630383
n02825657
n02094258
n07873807
n03776460
n01843383
n02840245
n02607072
n01491361
n03109150
n03908618
n02132136
n01950731
n02133161
n04070727
n03384352
n03594945
n03933933
n03891332
n01968897
n09229709
n02095314
n02088364
n01641577
n03124170
n03272562
n02817516
n01943899
n07590611
n04235860
n03991062
n02006656
n04026417
n02113799
n04311004
n02815834
n04008634
n07718472
n02437616
n04325704
n03676483
n03207941
n02066245
n03873416
n02489166
n03782006
n04523525
n03710637
n02791270
n09835506
n01768244
n03888257
n04325704
n02007558
n01641577
n03983396
n04179913
n03786901
n03425413
n02012849
n03876231
n02802426
n04067472
n02112350
n02797295
n03895866
n07753113
n03297495
n02091635
n04487394
n03729826
n02104029
n02102973
n03000247
n01871265
n03920288
n03627232
n02229544
n02092339
n02802426
n03018349
n13044778
n03014705
n02776631
n03109150
n13052670
n03218198
n04125021
n04550184
n04479046
n04443257
n03908618
n02094433
n02113186
n02105162
n02980441
n02971356
n07697313
n02102177
n04613696
n02095889
n02979186
n09472597
n03476684
n02692877
n01756291
n03976657
n03494278
n03026506
n04228054
n04146614
n03100240
n02018795
n01873310
n04026417
n02086910
n04192698
n02093991
n04116512
n02107908
n02066245
n04026417
n02444819
n02536864
n02361337
n03770439
n02086646
n03444034
n04008634
n02727426
n07615774
n02107908
n03637318
n04317175
n03662601
n09256479
n03933933
n03666591
n02102318
n07802026
n04467665
n03109150
n03710721
n02817516
n01855672
n03259280
n02108089
n01943899
n02655020
n02817516
n07871810
n03935335
n03250847
n04417672
n04252077
n01910747
n03950228
n02009912
n02690373
n02787622
n01685808
n02486410
n04326547
n03467068
n01742172
n02965783
n04209133
n06874185
n01797886
n01755581
n03942813
n02087394
n02137549
n03047690
n04447861
n04275548
n02229544
n03530642
n01930112
n04548362
n04552348
n02486261
n02328150
n03355925
n02096177
n02403003
n01817953
n01629819
n03983396
n03207941
n01806567
n02089973
n07714990
n03590841
n02086646
n03781244
n02090622
n03445924
n02051845
n04560804
n09288635
n03840681
n01622779
n03445924
n02058221
n03837869
n02125311
n02783161
n01698640
n02787622
n03706229
n02840245
n02808440
n03680355
n01560419
n01978287
n02422699
n01687978
n01537544
n03793489
n03016953
n04044716
n01560419
n02056570
n03179701
n09468604
n03623198
n02690373
n02454379
n04467665
n02112018
n04591157
n04243546
n04254777
n01558993
n07932039
n04258138
n02085936
n03240683
n04409515
n03661043
n01532829
n03930630
n02112350
n02837789
n02098286
n04485082
n03272562
n02105505
n03916031
n07742313
n03042490
n02105855
n04229816
n04447861
n02916936
n02120505
n02917067
n01984695
n02454379
n03529860
n03482405
n04049303
n03452741
n02113023
n03447721
n01728572
n03942813
n03929855
n03344393
n01692333
n01945685
n03929660
n07565083
n04579432
n03594734
n03793489
n02114712
n02111129
n02091244
n12057211
n02493793
n03404251
n03026506
n01817953
n02130308
n02930766
n03594734
n02777292
n02486410
n09468604
n02489166
n01981276
n04275548
n02865351
n04118538
n01641577
n02113624
n04008634
n01945685
n02692877
n02749479
n03891332
n02795169
n02105641
n04136333
n04417672
n04263257
n06596364
n02091032
n03770679
n07749582
n02977058
n03594734
n02317335
n04550184
n02437312
n01728572
n02395406
n04522168
n04209133
n02108000
n01843383
n04004767
n03804744
n04398044
n02643566
n13052670
n03443371
n02101388
n02133161
n02641379
n03814906
n02115913
n02108915
n01978287
n04277352
n04493381
n01608432
n04548280
n03379051
n03796401
n02051845
n04350905
n04612504
n03207743
n02097298
n03447447
n02804610
n01770393
n10148035
n02094258
n03720891
n02089078
n02130308
n02536864
n03942813
n02110341
n04579432
n07716358
n03095699
n02128925
n04141975
n02119789
n03481172
n03532672
n02655020
n07749582
n02109961
n02101556
n03662601
n03803284
n02641379
n04367480
n02101388
n04562935
n01694178
n02088466
n02536864
n03781244
n04192698
n02167151
n02089078
n03544143
n03026506
n02128925
n04251144
n03929855
n03085013
n03125729
n01677366
n03661043
n04584207
n04200800
n02487347
n02321529
n03814906
n01924916
n02802426
n01693334
n02169497
n02128925
n07717556
n03895866
n02099429
n03085013
n11939491
n09468604
n02109047
n07565083
n04310018
n02988304
n07754684
n02058221
n02114367
n03485794
n03424325
n04443257
n01697457
n02219486
n02877765
n01644900
n03775071
n02097047
n02085620
n07693725
n03160309
n02815834
n03110669
n03868863
n04008634
n03743016
n02094114
n03208938
n07590611
n04273569
n03706229
n02013706
n07753592
n02916936
n02112137
n02108089
n03841143
n03595614
n03125729
n07742313
n02487347
n04235860
n02782093
n01742172
n04604644
n04554684
n04086273
n02906734
n02091635
n03201208
n07693725
n09332890
n02088364
n03017168
n03729826
n03983396
n03676483
n04204347
n04251144
n02917067
n04081281
n03930313
n03494278
n03160309
n02389026
n03250847
n03133878
n02091635
n02389026
n02087394
n02113799
n02281787
n04548280
n04509417
n03384352
n02009229
n04370456
n07753275
n02102177
n01494475
n03459775
n02804610
n04456115
n02099712
n01494475
n04344873
n03788195
n01944390
n01910747
n03868242
n03452741
n13044778
n01883070
n02701002
n02793495
n02692877
n03220513
n01978287
n02483362
n01776313
n02808304
n03721384
n02012849
n03733281
n07920052
n02326432
n04192698
n02113799
n02106550
n02097298
n02509815
n02835271
n04548280
n04522168
n03950228
n01689811
n09428293
n01877812
n02100583
n01704323
n03680355
n03000247
n03742115
n04486054
n02097298
n02091635
n03680355
n02002556
n02101388
n01818515
n02454379
n03216828
n03933933
n02107683
n04252077
n02980441
n04039381
n03201208
n02102177
n03388549
n04523525
n03770439
n03710193
n01675722
n04501370
n04501370
n02092002
n03598930
n07932039
n02101006
n02268853
n04259630
n03871628
n02786058
n03485794
n02009912
n02091244
n02808304
n01860187
n07613480
n01843065
n02095889
n01943899
n02859443
n02112350
n02165456
n01773797
n02328150
n03485407
n01955084
n01601694
n03290653
n01796340
n06359193
n01558993
n03950228
n02096437
n02093859
n01773549
n04154565
n02437616
n02017213
n04146614
n02488702
n02137549
n02013706
n02100735
n04465501
n02727426
n04467665
n02095889
n02415577
n03075370
n02097298
n02027492
n02441942
n02104029
n03617480
n03623198
n02536864
n07875152
n04208210
n02423022
n03016953
n01669191
n04344873
n02526121
n09472597
n03873416
n01829413
n12057211
n02950826
n02786058
n02486410
n02486261
n02423022
n02107574
n03773504
n01558993
n02096177
n03961711
n01873310
n04118538
n02091032
n03483316
n13040303
n03180011
n02125311
n02172182
n03976657
n02094258
n02980441
n02107312
n01755581
n02776631
n02492660
n01664065
n01514668
n02966193
n02492035
n03482405
n04019541
n03954731
n02106550
n04404412
n02797295
n01955084
n04612504
n04069434
n02492035
n10565667
n02091134
n01631663
n02727426
n02071294
n02124075
n02092002
n02321529
n04208210
n01819313
n02087046
n04409515
n03485794
n04356056
n02087046
n02492035
n02085782
n03788365
n02483708
n04532106
n02106030
n03742115
n03868242
n03000684
n02100236
n02398521
n03976657
n03595614
n03884397
n03109150
n02978881
n02279972
n02391049
n03417042
n01734418
n07565083
n03970156
n02256656
n01689811
n02107683
n04591713
n02105855
n04099969
n02980441
n07720875
n04259630
n07920052
n03777754
n02099429
n03777568
n03759954
n02109525
n04264628
n03584829
n04525305
n02099712
n01689811
n02169497
n02011460
n02109961
n03814906
n02095314
n03866082
n02966687
n03710721
n02690373
n02514041
n03062245
n02797295
n02167151
n01518878
n13040303
n13044778
n02088364
n03045698
n03857828
n09288635
n03873416
n10148035
n02837789
n03388183
n03272010
n13054560
n02699494
n02051845
n02966193
n02437312
n04557648
n02177972
n03792782
n01751748
n02892767
n04344873
n03902125
n01558993
n02087394
n02006656
n01784675
n02099601
n03930313
n02980441
n02097209
n02091032
n03742115
n02606052
n02104365
n02097130
n07860988
n02120079
n04235860
n02883205
n02727426
n02099267
n03884397
n02992211
n03095699
n04254777
n02093859
n03146219
n04548362
n04335435
n02489166
n01531178
n02259212
n02894605
n02114855
n03188531
n02088466
n03956157
n04589890
n04525038
n02233338
n04612504
n07711569
n02437312
n03976657
n12144580
n01843065
n02120505
n07745940
n04552348
n03710721
n03425413
n01697457
n02396427
n02092339
n02493509
n02087046
n02123159
n04251144
n04259630
n02096051
n04507155
n02106662
n03445777
n03494278
n01756291
n03063689
n02105162
n04346328
n04591713
n03662601
n02093428
n02917067
n03710721
n02493509
n02794156
n07720875
n01669191
n02088364
n01873310
n04037443
n03598930
n07714571
n04069434
n03888257
n07718472
n03676483
n03929660
n02514041
n02105056
n04275548
n03534580
n04296562
n03770439
n02165456
n02704792
n03995372
n04344873
n02123159
n11879895
n02094114
n02514041
n03388549
n01629819
n02776631
n02963159
n03857828
n07768694
n01847000
n02229544
n02834397
n04380533
n07717410
n02112706
n03014705
n11939491
n02769748
n03075370
n03534580
n02116738
n02111277
n03482405
n02096294
n01819313
n02105056
n04540053
n03028079
n03467068
n02107683
n12768682
n02481823
n02447366
n03255030
n02977058
n12620546
n03131574
n02981792
n02110063
n03494278
n02415577
n02398521
n04554684
n03063599
n04579145
n04335435
n04264628
n04311004
n02457408
n02106550
n04483307
n02977058
n02091244
n02169497
n03041632
n03630383
n02669723
n02104029
n02364673
n02749479
n02107312
n02128925
n02091831
n04554684
n01978287
n02655020
n02125311
n04136333
n07753113
n01943899
n04204347
n03372029
n04418357
n02980441
n02859443
n04235860
n09472597
n02328150
n02017213
n01734418
n03930313
n03868242
n04355338
n04118538
n02804610
n02028035
n02835271
n02114548
n03710193
n04033901
n01984695
n03443371
n03956157
n07753113
n03532672
n01664065
n02786058
n02125311
n02085620
n02655020
n04235860
n03018349
n13040303
n03658185
n04254680
n01484850
n03594945
n04209133
n03877845
n12985857
n02102040
n02112018
n03467068
n02115641
n04562935
n03042490
n04429376
n02895154
n13052670
n01514668
n01491361
n01924916
n04039381
n02437616
n04065272
n01855672
n03733281
n03935335
n02492035
n02130308
n04131690
n01484850
n03197337
n03761084
n03899768
n02128385
n04604644
n03623198
n04152593
n02783161
n04252225
n04118538
n02412080
n03717622
n02480495
n02102480
n02676566
n02492035
n04265275
n07742313
n03483316
n03706229
n02129165
n07718747
n03967562
n01443537
n02190166
n01943899
n02089078
n03627232
n02110958
n03902125
n04081281
n02172182
n02099849
n02492035
n02999410
n04435653
n03127925
n07880968
n04243546
n03544143
n01877812
n02823750
n02814533
n02916936
n02120505
n02088632
n02977058
n07734744
n02676566
n01770081
n04116512
n02871525
n02091032
n02536864
n03223299
n02963159
n03180011
n03207743
n03496892
n03444034
n03100240
n04592741
n02091831
n04613696
n02097130
n03196217
n04523525
n04505470
n04153751
n03786901
n03220513
n02808440
n04399382
n03594945
n01978455
n01824575
n01986214
n03792782
n02730930
n03208938
n02641379
n02106030
n02106550
n02110063
n03786901
n04532670
n03595614
n13054560
n02233338
n03803284
n03355925
n02236044
n02951585
n03063599
n03047690
n01496331
n02708093
n02356798
n04442312
n02107574
n03459775
n04026417
n02860847
n02655020
n03983396
n03658185
n04589890
n03956157
n02093991
n02091032
n02977058
n01667114
n02500267
n03347037
n07716906
n03598930
n02841315
n04254777
n04049303
n13040303
n03495258
n04596742
n15075141
n02105251
n01667114
n01775062
n02002724
n04536866
n01768244
n02808440
n02087046
n02917067
n04111531
n02190166
n03690938
n13040303
n04133789
n03877845
n01985128
n03220513
n03970156
n04483307
n01641577
n03384352
n02823750
n02088238
n04346328
n04423845
n04356056
n04509417
n02606052
n01704323
n07831146
n02120505
n02099601
n02799071
n02233338
n03394916
n02865351
n03272562
n03843555
n09246464
n02825657
n02951585
n03692522
n04517823
n03803284
n02086910
n07613480
n09399592
n03775071
n02099429
n07695742
n03527444
n04330267
n03832673
n02894605
n02951585
n09332890
n13054560
n03623198
n02363005
n04275548
n09288635
n03902125
n04435653
n04398044
n02666196
n04147183
n02454379
n02107574
n04592741
n04200800
n02066245
n01629819
n03272562
n03877472
n02009229
n03532672
n02437312
n02089078
n04127249
n03443371
n02091635
n02667093
n03935335
n02364673
n02165105
n03770439
n03063599
n02363005
n03100240
n02815834
n04275548
n02791270
n02325366
n01695060
n02787622
n07753113
n02128385
n04125021
n02395406
n04371430
n03388043
n12620546
n04597913
n03967562
n02708093
n02280649
n02113978
n09288635
n03425413
n03207941
n01740131
n04120489
n02106382
n02536864
n04458633
n03633091
n03967562
n04371430
n02690373
n02113186
n02870880
n02114855
n02396427
n02132136
n02107908
n01950731
n02992529
n03814639
n03594734
n07613480
n07932039
n03721384
n02641379
n03721384
n03661043
n04509417
n02814533
n02437616
n04192698
n02002724
n15075141
n03670208
n02974003
n02094433
n03617480
n04486054
n03290653
n03255030
n04435653
n02916936
n01728572
n01632777
n03028079
n02106382
n12267677
n02279972
n02111129
n01820546
n03680355
n03991062
n02090721
n02879718
n01514668
n01728572
n04442312
n03379051
n02930766
n03982430
n02497673
n02115641
n02389026
n02793495
n03594945
n03661043
n04398044
n01773797
n03630383
n07892512
n02259212
n02128757
n03595614
n03126707
n04200800
n12620546
n02091032
n01531178
n03775071
n02346627
n02096294
n04204347
n02892201
n01807496
n03825788
n02342885
n02128385
n07745940
n04404412
n03720891
n02109961
n03976657
n02093256
n03787032
n03794056
n04136333
n03787032
n02105855
n01774384
n02974003
n02106030
n04023962
n03485794
n02086910
n02091134
n02727426
n04591157
n03804744
n04111531
n03733805
n02787622
n02980441
n03347037
n01630670
n04579432
n01944390
n12620546
n02114712
n03527444
n04239074
n01807496
n01592084
n02879718
n04429376
n02643566
n07871810
n07753113
n03042490
n02281787
n03179701
n01685808
n03814906
n02927161
n02346627
n03160309
n04037443
n02708093
n03590841
n04370456
n02948072
n02494079
n06785654
n04507155
n02011460
n02256656
n04037443
n03485794
n03271574
n04254777
n02128757
n04154565
n03461385
n02966193
n02226429
n02101006
n02112018
n07695742
n02110341
n02443114
n02110185
n02948072
n02840245
n03854065
n02096294
n02980441
n03062245
n03584829
n01644900
n03891251
n03599486
n02701002
n02172182
n03888605
n03642806
n04562935
n01930112
n02389026
n02783161
n02807133
n04099969
n03457902
n03633091
n03594945
n07695742
n07714990
n03208938
n04479046
n09835506
n03595614
n01983481
n03670208
n01734418
n01978455
n03721384
n02091635
n02133161
n04026417
n01734418
n03530642
n04209133
n04099969
n01616318
n02279972
n03676483
n03868863
n02666196
n02396427
n01768244
n03240683
n02112018
n13133613
n03032252
n04235860
n02110627
n03404251
n04350905
n02087046
n01843383
n01797886
n02992211
n02950826
n02268853
n03888605
n07248320
n03160309
n07248320
n03868242
n01704323
n01944390
n04462240
n06794110
n03032252
n04376876
n02281406
n02134418
n03584829
n03598930
n04254777
n04435653
n02017213
n04049303
n03180011
n03782006
n02749479
n04525305
n02791270
n04429376
n02102318
n07584110
n02966687
n02423022
n02107142
n02101556
n04179913
n02999410
n02091134
n02797295
n04560804
n01955084
n07583066
n03743016
n03623198
n03843555
n02134084
n02093256
n02105505
n03788195
n07716906
n04542943
n04296562
n02120079
n03920288
n02892767
n04311174
n04141327
n02117135
n03888605
n04557648
n04523525
n02281787
n02951358
n03680355
n07693725
n02870880
n02007558
n06596364
n01984695
n03345487
n02091244
n09256479
n02105162
n07693725
n03838899
n03534580
n02493509
n02096177
n07892512
n02018795
n04592741
n01728920
n07875152
n01773797
n02051845
n04273569
n03125729
n01773549
n04376876
n04336792
n02137549
n03633091
n01877812
n02128757
n04423845
n02981792
n03452741
n01735189
n04532106
n02268853
n07615774
n03538406
n01917289
n01496331
n01773549
n03788195
n02916936
n03045698
n03743016
n03868863
n04479046
n01882714
n03197337
n02013706
n07873807
n02480855
n04409515
n02930766
n03888257
n03127925
n11939491
n02328150
n02895154
n02408429
n02361337
n02092339
n01484850
n03065424
n02167151
n01798484
n02110341
n02085620
n04417672
n02097047
n04235860
n02692877
n04599235
n04201297
n02110341
n03776460
n02037110
n02174001
n02797295
n02939185
n03637318
n03710721
n02086646
n03657121
n02509815
n07836838
n04592741
n04264628
n04399382
n02814533
n04311174
n02137549
n07753113
n02704792
n02093859
n01694178
n03444034
n01784675
n02088466
n03692522
n02091244
n02133161
n09835506
n01614925
n02168699
n02113624
n03109150
n02190166
n03710721
n02092002
n01644373
n04357314
n01704323
n01882714
n03908618
n04592741
n02095570
n02870880
n04277352
n03666591
n09332890
n02090721
n04326547
n04251144
n04033901
n02977058
n03095699
n02114548
n02966193
n07717410
n04562935
n02814860
n02963159
n02090721
n03891251
n02325366
n03630383
n03742115
n03400231
n07753275
n02174001
n01877812
n02870880
n02892201
n02727426
n02115913
n02395406
n03956157
n02074367
n07760859
n04476259
n03018349
n04208210
n04560804
n03794056
n03803284
n03476684
n01514668
n04347754
n01773157
n01820546
n04443257
n03976657
n04146614
n02100583
n04476259
n01776313
n02095570
n03180011
n02110806
n02129165
n02504013
n02808304
n03854065
n02066245
n01685808
n03290653
n01924916
n03776460
n02102973
n03871628
n04266014
n04350905
n02104029
n03598930
n04344873
n10565667
n02123045
n02437312
n03759954
n02437616
n02123159
n01664065
n02916936
n03124170
n02504013
n03272562
n03617480
n02091244
n02051845
n02090622
n04376876
n04613696
n02108551
n04328186
n01682714
n03777754
n02095570
n07802026
n02437616
n02169497
n02100735
n01748264
n03942813
n04296562
n02264363
n04517823
n03207743
n02927161
n04332243
n02110185
n04409515
n02480495
n09468604
n02100735
n07716358
n15075141
n03814639
n02105251
n01537544
n01855672
n01644900
n04037443
n02870880
n02264363
n04336792
n09229709
n03146219
n02837789
n03733281
n04599235
n04008634
n02111500
n04560804
n02116738
n02009229
n03272562
n02106030
n03666591
n02356798
n09835506
n02727426
n02113712
n02397096
n04153751
n02808304
n02033041
n02992529
n02837789
n03355925
n03492542
n03991062
n02457408
n03085013
n04501370
n02843684
n02490219
n02106382
n02489166
n03670208
n02447366
n02655020
n13054560
n03445924
n03903868
n02099601
n02119022
n02422106
n04019541
n04355933
n04200800
n02123597
n13052670
n03250847
n02992529
n02951585
n03085013
n01768244
n04525305
n03187595
n01798484
n03467068
n04370456
n03832673
n02097130
n03240683
n04371430
n04579432
n04458633
n04483307
n02980441
n02102318
n04154565
n03452741
n03961711
n02808440
n03063689
n02114855
n02096051
n04461696
n04487394
n02113186
n07892512
n03223299
n04081281
n04371774
n04417672
n03249569
n03197337
n02101006
n01768244
n02113186
n03899768
n02783161
n01734418
n01728920
n02497673
n03063599
n04479046
n02895154
n02100877
n01983481
n03908618
n04507155
n03344393
n01829413
n02342885
n02190166
n07802026
n03991062
n02974003
n01698640
n04447861
n03623198
n04347754
n07614500
n12144580
n04254680
n04482393
n01943899
n03887697
n03598930
n02483362
n02120079
n03680355
n03485407
n02130308
n02894605
n03841143
n02172182
n02727426
n04418357
n02097209
n03495258
n02701002
n03481172
n02860847
n04435653
n03384352
n04131690
n02701002
n03868863
n01644373
n03000247
n02397096
n04118776
n02117135
n02051845
n03649909
n02869837
n03661043
n02090622
n02190166
n02134084
n02701002
n03496892
n02871525
n04277352
n02966193
n07697313
n03447447
n03388183
n02483708
n03623198
n09421951
n02128925
n02823428
n02410509
n02099429
n04162706
n01601694
n06794110
n03929660
n07920052
n04273569
n02259212
n03180011
n01685808
n02095889
n04204347
n02804414
n02236044
n04111531
n02132136
n07717556
n03388183
n04200800
n04154565
n02099601
n03065424
n03942813
n01930112
n04049303
n02965783
n03444034
n03131574
n02090721
n02281787
n04389033
n07615774
n02086240
n02105412
n03794056
n03977966
n01728572
n03218198
n07584110
n02134084
n03991062
n03124170
n04070727
n03908618
n07932039
n02110806
n01630670
n03598930
n04355338
n03014705
n02172182
n03721384
n02095314
n02979186
n01742172
n04409515
n02089973
n02422699
n03763968
n02492660
n02910353
n03743016
n03196217
n02840245
n03804744
n04532106
n03773504
n02100236
n02325366
n07753275
n03483316
n01494475
n04344873
n04259630
n03627232
n02280649
n02883205
n04404412
n04357314
n04286575
n03803284
n02098413
n04209239
n01632777
n03908618
n02110185
n02457408
n02788148
n03467068
n01443537
n04310018
n03325584
n02395406
n03133878
n02134084
n02089867
n01833805
n03443371
n03838899
n03216828
n03485794
n03761084
n02500267
n04435653
n01514668
n10565667
n01675722
n02233338
n02497673
n01784675
n03761084
n02279972
n03721384
n02088238
n03017168
n01770081
n03347037
n02231487
n12768682
n03877472
n02730930
n02088238
n01592084
n03998194
n03478589
n03776460
n02086910
n02113624
n02669723
n01930112
n04356056
n12768682
n09421951
n03908618
n02120079
n02133161
n03345487
n02087046
n04118538
n03344393
n02704792
n02112018
n02100583
n03196217
n04133789
n02640242
n02817516
n01740131
n01532829
n04548362
n04509417
n02364673
n02415577
n04204347
n12267677
n03445777
n07584110
n03544143
n03764736
n07892512
n01770393
n01688243
n04033995
n04590129
n01978287
n02113712
n02093428
n01819313
n02437312
n03706229
n03535780
n02112137
n04266014
n02137549
n03630383
n03089624
n04208210
n03100240
n02480495
n02860847
n03062245
n04409515
n04404412
n02687172
n04065272
n03770439
n04049303
n03249569
n02088238
n01978287
n04532106
n01687978
n01751748
n02981792
n03792972
n04326547
n01728920
n04612504
n07714990
n03764736
n07717410
n04141327
n03032252
n02107574
n02226429
n01820546
n02088364
n03961711
n07753113
n02094114
n03733805
n02607072
n02028035
n03857828
n02807133
n04456115
n02640242
n02206856
n12144580
n02115913
n03627232
n02699494
n01756291
n03630383
n02280649
n02799071
n07749582
n01773157
n09256479
n04235860
n06874185
n02002556
n02454379
n03775546
n02177972
n02009229
n03297495
n03895866
n01694178
n01698640
n01796340
n03124043
n02107683
n02981792
n04540053
n07695742
n02102318
n02123597
n04152593
n01695060
n04252077
n01689811
n01882714
n04141327
n07753592
n02793495
n04136333
n03876231
n02860847
n04591157
n04380533
n03259280
n03530642
n01558993
n04355338
n02017213
n02091032
n07615774
n07693725
n02319095
n04335435
n06794110
n11879895
n09332890
n02708093
n02643566
n03895866
n03838899
n03393912
n02112137
n01955084
n02094433
n02791124
n03877472
n03792782
n01756291
n02097474
n03259280
n02190166
n07715103
n02095889
n04532106
n04597913
n03743016
n04548362
n02481823
n03388549
n02319095
n03792972
n02823750
n03623198
n03933933
n02231487
n03476684
n02098286
n02169497
n03379051
n02457408
n07742313
n07615774
n02206856
n04239074
n03393912
n01592084
n03680355
n02837789
n03590841
n01986214
n03657121
n03697007
n01697457
n02447366
n04418357
n04367480
n03220513
n04479046
n03100240
n03000684
n01978287
n02105855
n03127925
n02105855
n02092002
n02028035
n02094258
n04204347
n01795545
n02125311
n02823750
n02112137
n03126707
n02123597
n03223299
n01798484
n02280649
n01776313
n02641379
n01608432
n03249569
n01630670
n03895866
n03888257
n02422106
n02093859
n04125021
n04065272
n03814906
n03992509
n04423845
n03393912
n02066245
n02114548
n10148035
n01608432
n04355338
n04277352
n03976467
n02859443
n04141076
n02127052
n02088466
n07880968
n09835506
n03874293
n03481172
n04355338
n02894605
n03544143
n02977058
n01773157
n02486261
n02112137
n03075370
n01601694
n04004767
n04273569
n04275548
n02966193
n03443371
n01755581
n02100877
n04325704
n02090379
n02088466
n03347037
n03691459
n01616318
n01820546
n04009552
n03637318
n01795545
n02108000
n01843383
n03908618
n07753275
n02950826
n04069434
n02701002
n02799071
n02786058
n02526121
n03459775
n04552348
n04462240
n02108915
n02088364
n02791270
n01682714
n02123394
n02101388
n02840245
n04493381
n01990800
n04162706
n13054560
n01632777
n02093859
n02025239
n02797295
n03179701
n02980441
n04596742
n01980166
n09835506
n03445777
n03110669
n02094114
n02086079
n01443537
n02110063
n04355338
n01560419
n03355925
n02119022
n03447447
n02219486
n02113624
n04523525
n01983481
n10565667
n03803284
n04367480
n03400231
n01980166
n04596742
n02417914
n02514041
n02033041
n02094114
n02134084
n13040303
n03763968
n04111531
n02090622
n02486261
n03452741
n04458633
n02094114
n02097658
n01978455
n02988304
n04229816
n02892767
n02804414
n03240683
n01443537
n02088632
n02172182
n02786058
n02701002
n04515003
n07693725
n03594945
n02100735
n04204347
n02093754
n09428293
n03958227
n03042490
n06359193
n02102177
n03445924
n04141975
n03690938
n02108089
n03075370
n04517823
n03208938
n03958227
n10148035
n02444819
n02092002
n10565667
n02437312
n02280649
n02909870
n03977966
n03110669
n03777568
n07930864
n04560804
n03888605
n02120505
n03014705
n01744401
n03770439
n03393912
n02727426
n02093754
n03379051
n03788195
n02099601
n02481823
n03291819
n04127249
n03803284
n03794056
n03478589
n02009912
n07579787
n02951358
n03297495
n04517823
n03794056
n03854065
n04325704
n03902125
n03207941
n03160309
n02727426
n03498962
n02056570
n01530575
n03290653
n03133878
n02099267
n03742115
n04273569
n02977058
n03724870
n04597913
n03763968
n03201208
n02672831
n02096437
n02916936
n04398044
n03110669
n01580077
n03775546
n01665541
n03109150
n01843383
n01751748
n04487394
n02804414
n04200800
n03661043
n01806143
n01641577
n02325366
n03976467
n02917067
n01819313
n04465501
n01955084
n03063599
n04099969
n02793495
n02086079
n02859443
n03690938
n13052670
n02088238
n02699494
n03721384
n02006656
n02415577
n02981792
n02492035
n03379051
n02280649
n03095699
n03720891
n03459775
n02422106
n01644373
n03347037
n02834397
n03218198
n03627232
n04557648
n02423022
n01784675
n03425413
n04579432
n07875152
n03461385
n03404251
n03658185
n07720875
n01943899
n12620546
n03967562
n02102480
n02500267
n02087046
n03595614
n02100236
n07892512
n04505470
n01986214
n02447366
n01978455
n03942813
n02917067
n02125311
n04275548
n02077923
n01829413
n04557648
n02483362
n03250847
n02454379
n02793495
n03891251
n03938244
n03467068
n02226429
n02106166
n04465501
n04423845
n02108422
n02776631
n01773797
n03250847
n04606251
n01664065
n04127249
n04254777
n02483362
n03041632
n01729322
n02093859
n02977058
n04252225
n02116738
n02950826
n03494278
n02130308
n03786901
n04462240
n03617480
n04418357
n02879718
n03018349
n03272010
n03379051
n01614925
n02102040
n01630670
n03627232
n13037406
n09288635
n07584110
n02102177
n03347037
n01632458
n01768244
n03584254
n04346328
n03599486
n03109150
n03692522
n15075141
n01742172
n02841315
n13040303
n02117135
n02107142
n04266014
n03724870
n07248320
n02704792
n03871628
n01990800
n02129604
n02119789
n02125311
n04606251
n07768694
n03187595
n04376876
n04483307
n02110063
n02107142
n02782093
n04487081
n01675722
n01608432
n03297495
n02098105
n01950731
n04238763
n02105855
n04552348
n02051845
n02128925
n02877765
n02128385
n02877765
n01872401
n01682714
n03481172
n02509815
n02236044
n02280649
n02488702
n03492542
n01749939
n03207743
n03179701
n02100877
n01981276
n03710637
n03223299
n01630670
n03877472
n01560419
n02259212
n04127249
n03796401
n04486054
n01807496
n03492542
n01694178
n01740131
n01985128
n03637318
n03584254
n07717556
n07753592
n02791124
n03786901
n02965783
n03733131
n04458633
n01614925
n04435653
n03534580
n04532106
n02276258
n01697457
n03187595
n04590129
n04004767
n03877472
n07248320
n03207743
n02892767
n03976467
n03133878
n03594734
n01877812
n03785016
n04613696
n03534580
n02013706
n01985128
n02110806
n02441942
n04554684
n03916031
n01748264
n04204347
n03450230
n01622779
n02799071
n02017213
n03201208
n02487347
n02497673
n01795545
n02487347
n04487081
n03710637
n04026417
n07747607
n02092002
n02701002
n02492660
n03995372
n02415577
n02091831
n02423022
n02165456
n03666591
n04604644
n02107142
n02951358
n02219486
n04542943
n03777568
n03787032
n04332243
n02927161
n09288635
n01704323
n02091244
n02894605
n04554684
n02085936
n03014705
n01871265
n02113799
n02107683
n03347037
n04296562
n09256479
n02110341
n06874185
n03967562
n02708093
n04344873
n02437616
n04523525
n02099712
n04404412
n04277352
n02948072
n04111531
n03452741
n02966193
n03452741
n02100735
n04597913
n07747607
n03764736
n02123159
n02107574
n01729977
n03976467
n03788195
n07717556
n15075141
n04596742
n01729977
n03042490
n02102040
n02093991
n12144580
n02107908
n04612504
n02981792
n01644900
n02128385
n02128925
n02110806
n01748264
n02777292
n04209239
n02112350
n02361337
n04141327
n02229544
n02281406
n03895866
n02108915
n12768682
n02106030
n03218198
n04133789
n02093428
n03461385
n02119789
n03444034
n02877765
n03724870
n03773504
n01698640
n02504013
n02231487
n01558993
n06785654
n01981276
n02389026
n04277352
n02687172
n03291819
n04447861
n04310018
n02486410
n02105855
n02948072
n03785016
n02002724
n03417042
n03188531
n02259212
n02776631
n02951585
n03337140
n01751748
n02879718
n04277352
n12057211
n02951585
n03967562
n07714571
n02085620
n02510455
n02869837
n01980166
n01756291
n03792972
n02112137
n03680355
n03841143
n07565083
n07693725
n07715103
n01820546
n01873310
n03777568
n01833805
n02676566
n03447721
n02500267
n03602883
n04239074
n04118538
n04536866
n04548362
n02776631
n01667778
n03825788
n03891332
n04258138
n04542943
n02099849
n03041632
n04179913
n01632458
n01537544
n02930766
n03814639
n02643566
n03498962
n01798484
n02692877
n03134739
n03314780
n02870880
n07768694
n04141076
n03786901
n03314780
n02172182
n02092339
n03259280
n07880968
n02115641
n01990800
n12768682
n07930864
n03527444
n02091244
n03769881
n01494475
n03249569
n02395406
n03776460
n12985857
n02056570
n02486410
n01737021
n02488702
n01978455
n01622779
n02510455
n01776313
n07831146
n02018207
n02808304
n01855032
n03803284
n02514041
n02099849
n01806143
n03837869
n03902125
n02895154
n04208210
n02107142
n01855672
n02480495
n04065272
n03761084
n02100236
n02111277
n02089867
n04552348
n02791124
n02101556
n02480855
n02097658
n03180011
n03899768
n02087394
n02236044
n02794156
n04550184
n02099849
n02111129
n03976657
n01847000
n04465501
n03063599
n03733131
n09332890
n02892767
n01978455
n02111129
n03832673
n04141327
n02276258
n03786901
n02672831
n01978455
n02807133
n03290653
n03297495
n02112350
n02894605
n03763968
n02776631
n04606251
n03498962
n04443257
n04355933
n02727426
n12057211
n04376876
n02403003
n03495258
n04584207
n04462240
n01729322
n03207941
n02483708
n10565667
n03866082
n04019541
n04154565
n13052670
n02992211
n03642806
n03372029
n03832673
n03617480
n01797886
n04591157
n04443257
n03045698
n03207941
n04081281
n02165105
n02105412
n02980441
n02097658
n02823750
n02397096
n03662601
n01514859
n03759954
n02859443
n02011460
n03467068
n04458633
n02111277
n01751748
n03127747
n03838899
n07715103
n02894605
n02793495
n07248320
n03995372
n02094258
n03937543
n03642806
n02607072
n03483316
n02090622
n04525305
n02085936
n03920288
n03063599
n01843065
n02099267
n01739381
n03793489
n02018207
n03775071
n01496331
n06785654
n03935335
n03887697
n07747607
n03773504
n07860988
n04456115
n02492035
n03874293
n04275548
n03063689
n02101006
n01807496
n02113978
n02655020
n02488702
n02174001
n04004767
n04579432
n04141975
n03584254
n02112706
n03127747
n02097047
n04458633
n02814533
n02510455
n02106166
n02492035
n13054560
n04090263
n02110341
n02965783
n04235860
n01735189
n01698640
n07697313
n02276258
n03868242
n02321529
n03042490
n04418357
n03814906
n02607072
n04517823
n03496892
n07717556
n02051845
n03291819
n09399592
n02791124
n02259212
n02233338
n07802026
n03047690
n03995372
n03530642
n02966687
n02492035
n02229544
n01689811
n01532829
n03733805
n01776313
n02112137
n04200800
n07747607
n03016953
n03729826
n07734744
n02088094
n04542943
n02667093
n03400231
n04355933
n03544143
n02128385
n04356056
n02112018
n02859443
n02128925
n02091032
n04004767
n02096051
n02113712
n02927161
n03476991
n02423022
n12144580
n04548280
n03724870
n04335435
n07583066
n02871525
n03272010
n02484975
n02786058
n09472597
n04209133
n03717622
n03598930
n02417914
n01824575
n04204238
n02999410
n04467665
n04239074
n03444034
n04263257
n03903868
n02492035
n02110627
n02007558
n02090379
n03995372
n04325704
n04277352
n02494079
n02321529
n12144580
n01687978
n03095699
n02074367
n02128925
n02363005
n02346627
n04579145
n03133878
n02776631
n03787032
n03127747
n01749939
n01860187
n04317175
n12768682
n02219486
n03630383
n02097130
n02859443
n03529860
n02229544
n03272562
n04116512
n01685808
n03902125
n02174001
n02112706
n02840245
n04141975
n01641577
n02326432
n07749582
n02797295
n04596742
n02974003
n01729977
n02504013
n02843684
n03825788
n04517823
n03216828
n04346328
n02408429
n01797886
n02493509
n02799071
n04204347
n07716906
n06874185
n02093647
n02111889
n04254777
n02966687
n03938244
n02321529
n03089624
n02096585
n02877765
n03259280
n02895154
n02107574
n07615774
n03131574
n02497673
n01688243
n04273569
n03873416
n03763968
n01534433
n03187595
n02786058
n02165105
n02099601
n02782093
n01601694
n03459775
n01770081
n04019541
n01742172
n03452741
n03891251
n01818515
n03825788
n04141975
n02087394
n02325366
n02092339
n07584110
n03649909
n02113712
n04579145
n03908714
n04392985
n02124075
n13040303
n02051845
n02231487
n02493509
n01748264
n03457902
n03146219
n01675722
n03787032
n02361337
n07579787
n04479046
n02168699
n02992211
n02113624
n02974003
n04357314
n07920052
n07615774
n03452741
n03534580
n02094258
n04505470
n02641379
n03868863
n02422699
n03249569
n02123394
n02106662
n01784675
n04371430
n04557648
n02514041
n02051845
n03916031
n01751748
n02504458
n07734744
n02494079
n03902125
n02930766
n03977966
n03724870
n04116512
n03272010
n04049303
n03590841
n02361337
n04044716
n03680355
n03637318
n11939491
n03866082
n03272010
n02119789
n07615774
n03602883
n03492542
n04310018
n02231487
n02110185
n03544143
n03995372
n02268443
n01440764
n02480855
n02317335
n01692333
n02109961
n03379051
n03075370
n02687172
n04442312
n03584254
n01729977
n02727426
n03134739
n01828970
n02093428
n02233338
n02091831
n02939185
n04579432
n04266014
n03291819
n03954731
n03838899
n07871810
n02077923
n12057211
n02415577
n02115641
n03781244
n07880968
n07711569
n03838899
n03180011
n02114712
n03887697
n02930766
n01644900
n02111277
n02999410
n03534580
n02497673
n02410509
n02777292
n03461385
n04086273
n03627232
n01689811
n09193705
n01955084
n03916031
n04355338
n04259630
n03617480
n01498041
n02169497
n02423022
n02422106
n02699494
n02494079
n04515003
n03724870
n02113799
n03930630
n04458633
n04065272
n02939185
n02281787
n02504458
n02190166
n03691459
n02408429
n07579787
n02114712
n04125021
n04461696
n03384352
n03388183
n03837869
n03485407
n01986214
n03255030
n02804610
n03255030
n01924916
n04398044
n04540053
n02667093
n03146219
n02483708
n03125729
n09256479
n02089078
n02607072
n03742115
n04067472
n02114712
n03196217
n04254120
n02105412
n03250847
n02111500
n07565083
n04162706
n01917289
n03018349
n03530642
n02107908
n02169497
n02018795
n03658185
n03424325
n02018207
n03630383
n03903868
n07745940
n02138441
n03372029
n02319095
n01855672
n03062245
n07753592
n04147183
n04254777
n03838899
n02219486
n04270147
n07871810
n01910747
n02999410
n12768682
n03649909
n04120489
n02002724
n01756291
n02445715
n02009912
n01798484
n04532670
n04604644
n04044716
n02169497
n02669723
n04461696
n02134084
n03743016
n01798484
n03404251
n02783161
n03201208
n02134084
n02607072
n03180011
n02094433
n03388549
n07590611
n02640242
n02085782
n02871525
n03967562
n02119789
n04507155
n04149813
n03492542
n02437312
n02098105
n01443537
n01632458
n02860847
n02113023
n03337140
n12620546
n03459775
n11879895
n03085013
n02096585
n02088466
n01751748
n02497673
n02236044
n03109150
n02130308
n04325704
n03676483
n02105412
n03180011
n02787622
n02025239
n01693334
n02325366
n02281787
n04597913
n04346328
n04404412
n02006656
n02107312
n02165456
n03042490
n04418357
n02093428
n04133789
n07754684
n03075370
n03916031
n04536866
n07711569
n02895154
n02105251
n02692877
n03344393
n04493381
n04579145
n03201208
n04243546
n02167151
n01797886
n09256479
n01582220
n04548362
n03476684
n04606251
n04579432
n02086910
n02134084
n02109525
n04238763
n03764736
n04044716
n04548362
n02692877
n03207941
n04229816
n03598930
n04591157
n02317335
n01734418
n15075141
n03825788
n04536866
n04254777
n02277742
n03877845
n02747177
n01667778
n01664065
n03180011
n02701002
n13040303
n03388549
n04591713
n04389033
n02699494
n02105162
n02280649
n04254777
n02607072
n01985128
n03045698
n03717622
n02086240
n03903868
n02326432
n02229544
n03530642
n01685808
n02091467
n03544143
n03902125
n02125311
n09399592
n04070727
n07730033
n07684084
n04398044
n03372029
n03483316
n03495258
n01728572
n04037443
n02395406
n03457902
n03761084
n01734418
n02090721
n03976657
n03785016
n01514668
n04357314
n02835271
n02504013
n02489166
n03530642
n02950826
n02111889
n04371774
n04560804
n03445924
n02091831
n07753592
n03447721
n01770081
n02487347
n02794156
n02097209
n03891251
n02790996
n03109150
n04380533
n03595614
n04153751
n04591713
n02108915
n04429376
n01641577
n04264628
n03271574
n02114367
n07930864
n02105641
n02104365
n03717622
n04423845
n02094258
n02116738
n01692333
n02909870
n02606052
n02099849
n02363005
n07734744
n02841315
n01860187
n02090721
n03841143
n02892201
n04125021
n04612504
n01537544
n04505470
n02281406
n03983396
n02123045
n01784675
n02493509
n03476991
n03534580
n02123159
n02808440
n04074963
n01616318
n03786901
n03721384
n02086240
n02488702
n03642806
n03160309
n01796340
n13044778
n09256479
n03089624
n02086910
n04604644
n04040759
n07584110
n04552348
n04149813
n02066245
n01580077
n04443257
n04336792
n02107683
n01797886
n02134418
n02134418
n01632777
n06359193
n01797886
n03485407
n04259630
n03992509
n07248320
n04486054
n03026506
n02088632
n03124043
n02442845
n02091467
n03376595
n04310018
n02966687
n03777568
n03100240
n04350905
n02843684
n02109961
n01631663
n03240683
n03141823
n02091635
n01443537
n11939491
n02002724
n03733281
n02106662
n03942813
n03337140
n03777568
n04251144
n07716906
n01820546
n03929660
n03478589
n02441942
n02364673
n09835506
n04515003
n02264363
n01773157
n01770393
n03777568
n04049303
n02219486
n02130308
n02437312
n02815834
n02093647
n01616318
n04332243
n12620546
n10148035
n02927161
n02128757
n03496892
n03417042
n04200800
n02484975
n01689811
n02107574
n03976657
n03998194
n02088632
n04243546
n03788365
n02087046
n10565667
n03832673
n02412080
n01558993
n03492542
n04540053
n01796340
n04376876
n02395406
n03075370
n07753592
n02481823
n02457408
n02110806
n03877472
n01667778
n03131574
n03956157
n02108422
n02114548
n03272010
n03394916
n01774384
n03623198
n02027492
n04099969
n02106662
n02951358
n01798484
n13133613
n03207743
n04560804
n02268443
n03775071
n04346328
n01930112
n03584254
n02790996
n09256479
n01985128
n02480495
n02268853
n03627232
n03180011
n02233338
n03982430
n02841315
n03649909
n04336792
n09468604
n02056570
n02787622
n03764736
n02442845
n02437616
n03445924
n01917289
n02107312
n02137549
n03599486
n03721384
n04041544
n01824575
n04285008
n01687978
n01514668
n04554684
n04209239
n03272562
n03425413
n02797295
n02106382
n06359193
n03642806
n01677366
n03134739
n02105641
n01985128
n03594945
n07583066
n02667093
n02086646
n07590611
n02111889
n03857828
n04259630
n02730930
n04285008
n03095699
n03761084
n02167151
n04404412
n04254120
n04461696
n04192698
n01873310
n03763968
n02804414
n04325704
n01682714
n02120505
n03584829
n04356056
n04476259
n09332890
n04399382
n03676483
n03961711
n09332890
n02096294
n04532106
n04149813
n03891251
n06874185
n02769748
n04485082
n04277352
n03793489
n03788365
n02389026
n03709823
n03032252
n02606052
n03271574
n03492542
n01665541
n01675722
n03691459
n07892512
n02799071
n02007558
n02510455
n03742115
n04136333
n03630383
n02910353
n02111129
n02488702
n01950731
n04204238
n04461696
n02102318
n03538406
n03916031
n02130308
n04311174
n01667114
n02115641
n04487394
n02233338
n02099267
n01797886
n02051845
n04428191
n02124075
n04532670
n03775546
n07892512
n02100877
n04398044
n04590129
n02101388
n04254680
n04485082
n03026506
n04111531
n03924679
n01667778
n02169497
n04311004
n03947888
n02093754
n01818515
n03763968
n04380533
n02077923
n02488702
n01770393
n02226429
n07932039
n02095314
n01847000
n03250847
n04296562
n02100236
n03045698
n07590611
n03787032
n02101006
n01873310
n02009912
n02096051
n07749582
n02112018
n03000134
n03447721
n04118776
n03970156
n01944390
n07613480
n02879718
n01873310
n03187595
n03325584
n01496331
n02097298
n03793489
n02111500
n04311174
n01739381
n02114548
n02165105
n01930112
n02823428
n04111531
n02137549
n04355338
n03916031
n03791053
n02113186
n04081281
n02104029
n03483316
n04579145
n01558993
n01748264
n02791270
n03929660
n02129604
n02102040
n03796401
n02007558
n11879895
n06794110
n07614500
n02006656
n04065272
n02486261
n02640242
n01806143
n03991062
n02788148
n09472597
n03935335
n02510455
n03958227
n02105641
n04428191
n03018349
n02116738
n03773504
n02087046
n03709823
n01749939
n02190166
n02085782
n01843065
n03743016
n01828970
n01828970
n03908714
n03937543
n02817516
n04592741
n02869837
n03874293
n04540053
n03250847
n02971356
n02114548
n02113023
n04081281
n03857828
n03450230
n04127249
n02108089
n02093428
n04392985
n04254120
n02782093
n02012849
n03179701
n04357314
n13133613
n02992211
n04243546
n01664065
n01695060
n04005630
n03400231
n03733131
n02107142
n02104365
n04597913
n04238763
n04371430
n03877472
n04589890
n04154565
n01734418
n03781244
n07745940
n02109961
n01755581
n07742313
n04118776
n01734418
n02085782
n03100240
n02013706
n03658185
n03290653
n02105505
n03888257
n02865351
n02277742
n02099849
n03131574
n02102177
n02093428
n02814860
n01734418
n01580077
n04136333
n04483307
n01774384
n02364673
n06874185
n07754684
n07734744
n04487081
n07802026
n09399592
n03602883
n04435653
n02096437
n02672831
n02107683
n02086646
n01698640
n03485794
n03967562
n01664065
n03837869
n01950731
n02909870
n01756291
n02091467
n03658185
n02690373
n02012849
n03709823
n02123597
n13044778
n02167151
n03425413
n07730033
n03721384
n03126707
n02883205
n02111889
n03866082
n01698640
n04584207
n03485407
n02105251
n03743016
n03314780
n03769881
n01494475
n04005630
n03291819
n03721384
n04118776
n03868242
n04265275
n09835506
n03443371
n03459775
n04501370
n01688243
n03494278
n02486410
n02105251
n03956157
n02410509
n02116738
n04532106
n02100236
n04591157
n02398521
n04131690
n03935335
n02098105
n04428191
n02110627
n03970156
n03950228
n02110341
n04201297
n07932039
n07920052
n03063689
n02137549
n03100240
n01665541
n04099969
n02106382
n02009912
n03223299
n02091635
n03982430
n04548362
n01978455
n01614925
n02841315
n07711569
n04335435
n02892767
n03345487
n02948072
n04127249
n02909870
n02099712
n04162706
n01981276
n02085620
n02917067
n07716358
n04332243
n03724870
n04074963
n01984695
n03794056
n03929855
n01773157
n01806567
n04350905
n03804744
n10565667
n07747607
n03218198
n03942813
n01877812
n03924679
n07753592
n02113799
n02086079
n03814639
n02834397
n02109525
n07720875
n04273569
n03018349
n03404251
n03888257
n03485407
n07730033
n13052670
n02095889
n01739381
n01514859
n02106030
n07860988
n03775546
n04263257
n03485794
n03924679
n04228054
n02319095
n02747177
n03770679
n03980874
n02097658
n02988304
n07579787
n02137549
n01644373
n02870880
n04069434
n13040303
n02106550
n02804414
n07565083
n03877845
n03187595
n02074367
n02099712
n01950731
n03884397
n03776460
n04209133
n03697007
n01978287
n03792972
n07716906
n04146614
n03887697
n02095889
n02096177
n04435653
n02091032
n02840245
n02097658
n02002724
n02058221
n03127747
n04501370
n01817953
n02113186
n01877812
n04004767
n02441942
n02408429
n04116512
n02134418
n03529860
n03041632
n03447447
n03188531
n03770439
n03633091
n02086646
n02011460
n04209133
n04229816
n01622779
n01667114
n01685808
n02113186
n02097047
n03876231
n02699494
n03961711
n03530642
n03452741
n02708093
n01985128
n02894605
n03124170
n03633091
n13054560
n02112137
n02120505
n01532829
n03929660
n04589890
n04507155
n01685808
n02077923
n04523525
n04592741
n02056570
n03841143
n02226429
n04243546
n04285008
n02483708
n03944341
n04553703
n03977966
n02441942
n01818515
n03871628
n03692522
n07768694
n02607072
n04456115
n04590129
n03476991
n02091134
n03394916
n01990800
n02066245
n02279972
n01944390
n02105251
n04273569
n03857828
n02110185
n02096051
n01770081
n02259212
n02799071
n01806143
n03476684
n01796340
n03100240
n01632777
n02190166
n02066245
n03976657
n03788365
n02108422
n03400231
n04589890
n04435653
n02326432
n03954731
n04591157
n02823428
n07716358
n02088632
n01824575
n01631663
n02086079
n03995372
n04517823
n02480855
n03445777
n04357314
n03884397
n03445924
n03777754
n03133878
n03873416
n02086240
n04553703
n04133789
n07693725
n02895154
n02317335
n04613696
n01819313
n03977966
n02109047
n03000247
n02443114
n03272010
n01697457
n04200800
n02109047
n02840245
n01739381
n06794110
n01756291
n01748264
n03950228
n02971356
n02123159
n04346328
n02092339
n01729977
n03187595
n02454379
n03794056
n03967562
n04039381
n02879718
n02441942
n04515003
n04311174
n03100240
n03868242
n03126707
n04461696
n13054560
n04398044
n01667114
n01664065
n02106382
n04613696
n02948072
n12144580
n03877472
n02096585
n03935335
n04429376
n02110185
n03207941
n02123045
n03788195
n04259630
n02097209
n02092002
n01877812
n03529860
n02966687
n03980874
n02013706
n02776631
n02445715
n01496331
n01807496
n02112137
n02086646
n04118776
n03658185
n01985128
n02504013
n12998815
n02233338
n12057211
n07875152
n03840681
n03721384
n03908714
n02412080
n02113799
n02096437
n02669723
n03775546
n03393912
n07718472
n01883070
n02120079
n01532829
n04443257
n02917067
n02877765
n02115913
n07920052
n01773797
n02123159
n03447447
n04613696
n03933933
n04380533
n01728572
n03535780
n04599235
n02877765
n13037406
n02971356
n02504458
n02101388
n04370456
n09229709
n02113624
n02492035
n02089867
n09421951
n02219486
n02494079
n02963159
n03930630
n02206856
n02091831
n02504013
n02097298
n09428293
n04596742
n01632777
n02018207
n03344393
n03388549
n03791053
n01729322
n02018207
n03599486
n03297495
n02093859
n01629819
n04037443
n01693334
n02058221
n03141823
n04252225
n04418357
n01774384
n03871628
n03598930
n03032252
n02321529
n02117135
n02206856
n03944341
n02111129
n02346627
n03404251
n02113023
n02009229
n02879718
n01748264
n01773549
n04252077
n02825657
n03476991
n03584254
n04350905
n13052670
n04141076
n03388549
n02415577
n02607072
n04346328
n01914609
n02641379
n03782006
n01601694
n03388183
n03803284
n02690373
n02106662
n02097047
n07892512
n02277742
n10148035
n02412080
n02091635
n01917289
n03742115
n04074963
n03124043
n02669723
n04507155
n02808304
n02111500
n03761084
n01797886
n03874599
n03476991
n04404412
n02108915
n01694178
n02802426
n02974003
n03028079
n03944341
n03742115
n02111500
n02117135
n02092339
n04133789
n03868242
n07714990
n07579787
n04252077
n02096051
n02102480
n02174001
n03085013
n01740131
n02107312
n04162706
n02869837
n02412080
n04612504
n01807496
n04041544
n03459775
n02017213
n02101006
n07749582
n02109047
n07718472
n02877765
n01622779
n01882714
n03781244
n02137549
n02342885
n03498962
n04127249
n06785654
n02105412
n03447447
n09193705
n02326432
n04590129
n02892201
n03425413
n04235860
n03000247
n03272562
n03598930
n02174001
n03347037
n07920052
n01784675
n07718747
n02279972
n02097298
n03394916
n03977966
n03692522
n03825788
n07717556
n02727426
n02396427
n07747607
n04330267
n03062245
n02389026
n02871525
n02107142
n02012849
n02077923
n03532672
n03216828
n02486261
n01494475
n04251144
n02109047
n03649909
n01873310
n03710637
n01632458
n02077923
n04263257
n04423845
n02279972
n01728572
n02128757
n04552348
n07747607
n07932039
n02071294
n02951585
n02123159
n04201297
n03680355
n02892767
n03930630
n01798484
n01729977
n01798484
n04371430
n02090379
n03347037
n03998194
n03947888
n02108422
n02837789
n03888257
n01739381
n04179913
n07590611
n02279972
n03063599
n02113712
n02444819
n03532672
n02687172
n07720875
n01819313
n02445715
n03793489
n02092002
n03899768
n03424325
n02978881
n01534433
n02999410
n04557648
n01608432
n02391049
n03929660
n02835271
n03876231
n02102318
n02777292
n04004767
n03933933
n07836838
n01751748
n07718472
n04254777
n03424325
n03063599
n02095570
n01824575
n04311004
n01677366
n03062245
n03627232
n03134739
n04372370
n03075370
n02802426
n03447721
n01829413
n02090379
n04192698
n03743016
n01692333
n02099601
n03720891
n02951585
n01532829
n02281406
n02096177
n03920288
n02927161
n04179913
n02100236
n04515003
n07802026
n02088632
n03950228
n09193705
n03841143
n02093647
n04336792
n04357314
n03929660
n02093647
n02093428
n04049303
n01873310
n02268853
n03838899
n01484850
n03337140
n01537544
n02174001
n03063599
n02640242
n03721384
n04596742
n02795169
n02492660
n02892201
n02361337
n04417672
n02113624
n02028035
n02999410
n01629819
n02115913
n02089078
n01768244
n04263257
n01944390
n01945685
n02071294
n03937543
n02391049
n02018207
n02129165
n02074367
n01518878
n03445777
n04149813
n02669723
n02097047
n02865351
n07753592
n02814533
n03874599
n07720875
n04116512
n02417914
n02027492
n03877845
n02123159
n04264628
n02236044
n02108089
n04133789
n04147183
n02085620
n02091134
n03944341
n13037406
n02422106
n01498041
n03775071
n04357314
n02102040
n01682714
n01775062
n03014705
n01693334
n01616318
n04604644
n03109150
n02088238
n01981276
n02422106
n01985128
n04026417
n01644900
n02095570
n04266014
n02236044
n02115913
n01883070
n03840681
n02481823
n03447721
n01981276
n03673027
n02835271
n02123159
n02113186
n03947888
n02100877
n03814639
n02510455
n04037443
n03929660
n03837869
n02791270
n03461385
n02951585
n04525305
n02788148
n02165105
n04592741
n02091467
n03188531
n02091134
n03617480
n03954731
n04328186
n02105162
n02870880
n03028079
n04596742
n04204347
n02108422
n01740131
n02363005
n03840681
n04116512
n02138441
n04367480
n01773797
n04350905
n02095314
n09229709
n02494079
n03788365
n02117135
n01641577
n04192698
n02087046
n12620546
n02410509
n03777568
n02948072
n03662601
n02690373
n02441942
n03127925
n02066245
n02097130
n03187595
n02977058
n03977966
n03291819
n02788148
n03482405
n02090721
n02105641
n04525038
n04328186
n03424325
n03498962
n03223299
n04552348
n09193705
n07697537
n04596742
n01797886
n01980166
n02093991
n01688243
n01817953
n03485407
n01795545
n02794156
n02102480
n01819313
n03188531
n02965783
n03534580
n02395406
n02033041
n03337140
n04200800
n02797295
n02804414
n02088364
n03000247
n03937543
n02389026
n01682714
n02101388
n01685808
n07880968
n02509815
n03938244
n04532670
n03967562
n03196217
n02892767
n01843383
n02978881
n01748264
n04423845
n02396427
n03388043
n03000134
n04429376
n03483316
n03485407
n02256656
n04086273
n02356798
n02747177
n01773157
n03297495
n02403003
n07718472
n03445924
n01843383
n02328150
n03447447
n02124075
n02098105
n06596364
n03388183
n06596364
n02504013
n04041544
n02009912
n02093859
n04350905
n02317335
n07871810
n02105855
n02607072
n02095570
n02389026
n06785654
n09421951
n02114855
n03216828
n01855032
n03095699
n02115641
n01955084
n03095699
n03133878
n03902125
n02395406
n04371774
n04525305
n03345487
n02108551
n01774750
n02480495
n03594945
n02091635
n04557648
n03388549
n01784675
n13040303
n13037406
n01776313
n02099601
n03134739
n02110185
n01537544
n13133613
n02102040
n01530575
n01735189
n01491361
n07583066
n02137549
n03908714
n03045698
n01914609
n02326432
n01631663
n03868242
n03920288
n03729826
n02002724
n03776460
n03535780
n03146219
n02094258
n03841143
n02797295
n02500267
n04392985
n02504458
n01773797
n04325704
n03920288
n02999410
n02655020
n02097474
n09472597
n02099712
n02980441
n04461696
n02814533
n03495258
n01784675
n03000684
n07760859
n04141327
n02641379
n04200800
n04141327
n01943899
n04037443
n04357314
n02097474
n03857828
n01630670
n02417914
n02747177
n04590129
n02037110
n03841143
n04204238
n04252225
n02791270
n09193705
n04376876
n02815834
n01817953
n04356056
n02007558
n02917067
n03544143
n03954731
n03372029
n02930766
n04310018
n03630383
n04009552
n02132136
n07745940
n02094114
n02480855
n02093991
n02113624
n03662601
n12144580
n02443114
n01914609
n04040759
n02834397
n02276258
n04557648
n07718472
n02108915
n07753113
n02093428
n03976467
n01984695
n02492035
n04275548
n02100877
n04254777
n02799071
n03908618
n03773504
n03347037
n02107574
n03529860
n02093256
n03291819
n02110958
n04275548
n04273569
n02113023
n03958227
n04417672
n03272562
n01980166
n01514668
n02002556
n02086079
n02104365
n01677366
n03770679
n02096177
n02094258
n01440764
n01943899
n02099849
n03899768
n01729322
n01776313
n06359193
n02447366
n03857828
n03384352
n02111277
n02226429
n04366367
n01737021
n01537544
n02951358
n04371430
n03196217
n02100236
n04443257
n04479046
n03983396
n03218198
n02105505
n01978287
n04286575
n03866082
n04208210
n03891332
n03857828
n02504013
n03982430
n04554684
n04317175
n04552348
n12057211
n02483362
n02097474
n02361337
n02120505
n03594945
n03498962
n01978455
n01829413
n02105505
n01978455
n04356056
n07718472
n01518878
n02795169
n03617480
n03372029
n02099267
n04229816
n07717410
n02895154
n02110185
n04149813
n02056570
n04404412
n03028079
n02110341
n04120489
n02804414
n02988304
n02167151
n04392985
n07747607
n02966687
n09399592
n03761084
n03400231
n04136333
n04423845
n02978881
n02099429
n07892512
n02137549
n01807496
n04033995
n03876231
n03063599
n04005630
n02489166
n03197337
n04456115
n03388043
n03062245
n03899768
n04371430
n03729826
n02165456
n02769748
n02412080
n02086240
n01665541
n02412080
n02445715
n01735189
n02086079
n02110185
n07697537
n02112350
n02137549
n02398521
n02971356
n03980874
n02106030
n02980441
n09193705
n03393912
n04562935
n03691459
n02870880
n02443484
n02979186
n02100735
n01682714
n02607072
n01688243
n02454379
n02443484
n07248320
n03814639
n04509417
n04019541
n03938244
n01667114
n03791053
n04442312
n02226429
n01693334
n02794156
n01773549
n01685808
n03598930
n02017213
n02124075
n02091134
n01530575
n03657121
n01768244
n04552348
n02106030
n01667114
n02790996
n02699494
n03291819
n01694178
n02423022
n01855672
n03459775
n04070727
n03770439
n03709823
n01924916
n06785654
n03272562
n02099429
n03100240
n02174001
n06794110
n03759954
n04357314
n03584829
n03345487
n03443371
n02100236
n03709823
n04350905
n02086910
n02977058
n02112018
n04409515
n04118776
n03376595
n02101556
n02776631
n02108551
n03291819
n07745940
n02109047
n04336792
n03494278
n03388183
n02398521
n03485794
n03018349
n03967562
n02116738
n02085620
n02108551
n02894605
n07695742
n01693334
n04356056
n02120079
n04540053
n03134739
n01644900
n01697457
n02108000
n03720891
n03733281
n04404412
n02098105
n02089867
n01530575
n03884397
n03602883
n02090721
n04228054
n03208938
n02483708
n02017213
n02097047
n02509815
n02447366
n03532672
n01518878
n02123045
n01847000
n02690373
n02092002
n02096177
n04487081
n02526121
n02124075
n03717622
n02106030
n02002724
n03240683
n03902125
n03709823
n02974003
n02100583
n03201208
n01833805
n13052670
n02219486
n02107574
n07742313
n02112018
n02489166
n02441942
n07753275
n01819313
n02643566
n03110669
n04482393
n04613696
n02129604
n02088466
n02134418
n02114855
n04591157
n02277742
n02112350
n03590841
n04476259
n02326432
n01755581
n11939491
n04264628
n12998815
n02101388
n02137549
n02236044
n02123394
n02909870
n03733805
n04120489
n03958227
n02100877
n02169497
n02168699
n03794056
n04146614
n03787032
n03937543
n03388549
n01978455
n06874185
n03717622
n07875152
n01820546
n03445777
n02109961
n04127249
n07716358
n03661043
n01534433
n03982430
n02490219
n04152593
n03062245
n01644373
n02951358
n04041544
n02974003
n02102318
n04127249
n02500267
n04548280
n02690373
n02125311
n01950731
n02007558
n12267677
n03045698
n01443537
n02447366
n02124075
n03916031
n03146219
n02843684
n02980441
n03187595
n02091134
n03124170
n07749582
n03594734
n02666196
n03782006
n07697537
n02111889
n03724870
n02085620
n03492542
n02102177
n04515003
n02167151
n03877472
n07720875
n02097209
n03208938
n01601694
n04067472
n02174001
n02123394
n07583066
n03599486
n04005630
n01698640
n03047690
n03793489
n02916936
n02124075
n01592084
n03127747
n02130308
n02094114
n04131690
n03063599
n02110341
n04008634
n03218198
n01496331
n03146219
n03496892
n02097047
n02397096
n03942813
n03787032
n02125311
n02119789
n01945685
n02105162
n03127747
n02107142
n02992529
n12620546
n04067472
n01630670
n02423022
n02948072
n01491361
n04067472
n04263257
n03223299
n02088238
n02231487
n01739381
n01532829
n02099849
n09256479
n01580077
n03895866
n02037110
n07742313
n02091032
n03841143
n01986214
n04356056
n02971356
n01774384
n02097474
n04019541
n07753275
n01944390
n04371774
n02120079
n07932039
n04033901
n04074963
n02843684
n03457902
n02089078
n03544143
n02088238
n02342885
n01753488
n02895154
n04009552
n01806143
n03794056
n01740131
n02423022
n02033041
n03942813
n04023962
n03630383
n04251144
n04376876
n02107142
n01740131
n03075370
n01494475
n04590129
n02786058
n01773549
n02028035
n01978287
n02966193
n03982430
n02442845
n07734744
n07615774
n03970156
n03000134
n01883070
n02124075
n07892512
n03970156
n03958227
n04532670
n03743016
n04479046
n02011460
n02391049
n03877845
n01981276
n02488291
n01592084
n03544143
n02168699
n01494475
n03887697
n03249569
n03777754
n02100236
n02017213
n02999410
n03590841
n03476991
n04192698
n01582220
n04604644
n03658185
n03773504
n02640242
n01819313
n02906734
n07697537
n02403003
n04270147
n03544143
n02859443
n03733131
n03733131
n04251144
n01806143
n04254120
n04350905
n02090379
n01582220
n03868242
n02088466
n02793495
n04136333
n03476684
n02129604
n02112137
n01622779
n02087046
n02114548
n07875152
n01773549
n03721384
n01843065
n01601694
n04254680
n07860988
n04523525
n01843383
n03314780
n04069434
n02791270
n04125021
n07880968
n03314780
n04346328
n04335435
n02093647
n04532106
n04465501
n02102177
n04344873
n03788195
n03803284
n09835506
n01872401
n01688243
n02233338
n03633091
n03888605
n02095570
n04579145
n03598930
n02980441
n03095699
n02088466
n04296562
n01739381
n02033041
n04346328
n01695060
n03733281
n04265275
n01796340
n07880968
n02894605
n04465501
n01644900
n03100240
n03447721
n03792782
n01828970
n02486261
n02690373
n01774750
n09229709
n03045698
n03874293
n12267677
n03637318
n02398521
n02782093
n01728572
n02457408
n04005630
n04525305
n01820546
n02138441
n03532672
n02808440
n12985857
n02085620
n04584207
n02125311
n07742313
n03355925
n03868242
n03871628
n03840681
n04310018
n02793495
n02489166
n02727426
n04592741
n02841315
n02490219
n04273569
n04228054
n03991062
n02093647
n02113023
n01698640
n04591713
n02111277
n04596742
n02110627
n03720891
n04251144
n03179701
n02091244
n07745940
n03000247
n04243546
n07697313
n03127925
n01985128
n03942813
n02013706
n02483708
n01632458
n02279972
n02009912
n02256656
n01768244
n02091635
n03770679
n12144580
n01806567
n04536866
n03991062
n02391049
n02326432
n04443257
n02097047
n02101006
n02051845
n03933933
n03595614
n07695742
n07579787
n02120079
n02110627
n02095314
n03201208
n03803284
n02444819
n03899768
n02233338
n02747177
n03483316
n04136333
n03220513
n03623198
n03134739
n03630383
n02808440
n03769881
n02799071
n04019541
n01498041
n04428191
n02094433
n03450230
n02092002
n03929660
n03000134
n01914609
n03721384
n04389033
n02128385
n03000247
n02091244
n02108000
n02110063
n02128385
n02641379
n01664065
n02109525
n07802026
n07714571
n03691459
n02109961
n01688243
n04515003
n04252225
n02877765
n03476991
n07717410
n04389033
n02129165
n01440764
n12985857
n04371430
n03447721
n02441942
n02110958
n02094433
n04146614
n03857828
n03788195
n03804744
n02102040
n02317335
n09246464
n02110958
n02256656
n03781244
n01689811
n02487347
n02092002
n03733805
n01531178
n02454379
n02088238
n01729322
n01945685
n01774384
n01632458
n03776460
n01877812
n07615774
n02423022
n03384352
n01518878
n03000684
n02018207
n03876231
n02113799
n01855032
n02910353
n02109047
n03967562
n02112018
n02708093
n02417914
n13040303
n04005630
n02794156
n01689811
n02113186
n03476991
n03773504
n03868863
n03788365
n02133161
n02708093
n07718747
n02106030
n03916031
n02493793
n02277742
n02701002
n04238763
n07742313
n01755581
n02321529
n01728572
n12057211
n03016953
n04009552
n02107312
n04486054
n03837869
n04127249
n03837869
n03895866
n03032252
n04380533
n02777292
n01729322
n02607072
n03792972
n03930630
n02814533
n04005630
n04099969
n02110806
n03594734
n03697007
n02071294
n02346627
n02096294
n01440764
n12267677
n02097658
n02111889
n03825788
n04153751
n04259630
n04254680
n02092002
n01833805
n04200800
n04435653
n07753113
n03888257
n01744401
n04192698
n02415577
n04550184
n02097474
n02793495
n04252225
n03388549
n02422106
n02807133
n02090622
n03598930
n01592084
n01924916
n07584110
n02114712
n03874599
n03590841
n09246464
n04589890
n03794056
n03180011
n02104029
n03272562
n04263257
n03874599
n07714990
n02791124
n03690938
n02837789
n02138441
n02859443
n03026506
n02442845
n04004767
n02397096
n04120489
n01882714
n03124170
n03992509
n01818515
n03124170
n02002724
n03680355
n02096051
n02492660
n04033995
n04019541
n02108915
n01872401
n04366367
n04501370
n04355338
n03661043
n02536864
n01796340
n02326432
n02493509
n02099849
n02096051
n02974003
n03481172
n03089624
n01773157
n03445777
n02138441
n07565083
n03916031
n02363005
n01944390
n02093754
n04560804
n12267677
n03967562
n07932039
n03666591
n02256656
n03770439
n04509417
n03720891
n07565083
n07875152
n01843383
n03481172
n02708093
n02165105
n02123394
n01644900
n02109961
n04335435
n02096177
n02110185
n02687172
n04116512
n01693334
n03133878
n02493793
n01806143
n07892512
n03670208
n04264628
n03014705
n07615774
n02992211
n03063599
n04209239
n02489166
n07920052
n04081281
n04486054
n02783161
n03594734
n03016953
n02834397
n04409515
n03544143
n01924916
n02174001
n04599235
n07754684
n07753275
n02112706
n03197337
n02095570
n02120079
n03804744
n01820546
n02099849
n04004767
n02092339
n03983396
n01749939
n04162706
n04264628
n03598930
n02098286
n07892512
n03929660
n04209133
n03000684
n04589890
n02963159
n02206856
n03970156
n04418357
n02090379
n03785016
n02488291
n04501370
n04118538
n04311174
n03838899
n02906734
n01665541
n03188531
n03642806
n03220513
n02105855
n03642806
n02123394
n02457408
n03208938
n04536866
n02056570
n02088466
n04019541
n02165456
n02097209
n02108000
n04536866
n02777292
n02939185
n04366367
n01616318
n03337140
n04229816
n03792782
n07831146
n03903868
n03041632
n02089867
n07695742
n03534580
n03271574
n01843383
n07836838
n02279972
n07584110
n02119789
n01843065
n02206856
n03042490
n02104029
n04447861
n03814906
n02280649
n03494278
n02256656
n02909870
n03602883
n01748264
n02093428
n03841143
n03710193
n01675722
n02395406
n03250847
n02397096
n12267677
n03770679
n02007558
n03642806
n07871810
n03742115
n02190166
n07716358
n01978455
n02169497
n04204347
n03417042
n02793495
n03530642
n03188531
n02105505
n02804414
n02093754
n02092339
n02860847
n02085936
n02786058
n02056570
n02165456
n03710637
n04200800
n04592741
n03935335
n02102973
n04296562
n04328186
n12267677
n01824575
n02494079
n02730930
n02356798
n03937543
n03290653
n02109047
n02112137
n02104365
n02085620
n09246464
n01817953
n03345487
n02410509
n02281787
n04487081
n01770393
n03814906
n01728920
n02481823
n01768244
n03891251
n04111531
n03347037
n03929660
n02951585
n02840245
n02489166
n01756291
n02669723
n07583066
n02268443
n04552348
n04263257
n04371774
n03379051
n04355338
n04355933
n04118538
n04099969
n04507155
n02480495
n03814639
n02105855
n02487347
n04553703
n04310018
n03895866
n03000247
n01796340
n03903868
n03903868
n07583066
n04192698
n02018795
n02096177
n02098286
n03970156
n03733281
n07614500
n03388043
n02110958
n01601694
n07715103
n02127052
n02325366
n03673027
n02950826
n02091467
n03110669
n03840681
n03680355
n02441942
n03485407
n02097474
n02398521
n02776631
n02701002
n02325366
n03388043
n07873807
n03763968
n04515003
n02094258
n02422699
n01667114
n04263257
n07590611
n02110185
n03899768
n03877845
n03197337
n12144580
n04152593
n02108089
n02493793
n02105855
n03481172
n04228054
n03899768
n02093754
n01737021
n02415577
n01685808
n01773157
n02101388
n03710721
n01873310
n03627232
n02708093
n02102318
n07747607
n02791124
n02870880
n03388549
n04372370
n03775071
n04347754
n03026506
n07720875
n01883070
n03690938
n03776460
n01558993
n04552348
n03457902
n07768694
n04356056
n04485082
n09288635
n07760859
n03991062
n04136333
n03938244
n02102177
n03991062
n04550184
n04127249
n01498041
n03691459
n03255030
n02417914
n02099429
n04254777
n04277352
n01855032
n01983481
n04604644
n02102973
n02790996
n02094258
n02489166
n03887697
n02443114
n04228054
n01667778
n02172182
n04133789
n03196217
n02018207
n03124170
n02841315
n02174001
n02138441
n02364673
n03874599
n02690373
n12267677
n02071294
n02396427
n02100236
n04125021
n01704323
n02281406
n02226429
n02097298
n02787622
n02086910
n02415577
n02123597
n03977966
n03743016
n02951585
n04548280
n03216828
n02096437
n02233338
n02536864
n01773157
n03657121
n02883205
n03777754
n01843065
n15075141
n04462240
n02086240
n03832673
n04026417
n04346328
n02808440
n04152593
n03017168
n03710193
n02110341
n02111500
n02117135
n02018207
n03769881
n02087394
n04286575
n02105855
n03218198
n04509417
n02749479
n01756291
n03584254
n07613480
n02437312
n04458633
n01518878
n01677366
n02797295
n07717410
n03775071
n04209133
n03425413
n04347754
n02028035
n02085936
n04317175
n04310018
n13044778
n01693334
n03047690
n03983396
n02268443
n04442312
n02109961
n04019541
n04335435
n07932039
n03743016
n02268443
n04523525
n02134418
n02860847
n02096051
n02817516
n04238763
n12620546
n02092002
n13037406
n03000134
n04228054
n02002724
n02086079
n03394916
n04265275
n04136333
n02481823
n04041544
n03272562
n02999410
n02488702
n01824575
n03967562
n02730930
n01843383
n04604644
n02177972
n01744401
n07860988
n04153751
n01491361
n03297495
n04346328
n03956157
n02325366
n02974003
n03733281
n03899768
n07717556
n02114367
n04366367
n03400231
n02808440
n01968897
n02259212
n03642806
n01955084
n03776460
n09835506
n01775062
n02979186
n02093991
n04263257
n04485082
n04482393
n03179701
n01739381
n02088238
n03991062
n13040303
n01534433
n01978455
n02480495
n02086910
n02097209
n02096294
n04209133
n09428293
n03018349
n07871810
n01986214
n01491361
n02106662
n03028079
n04179913
n04264628
n03450230
n04376876
n02129165
n02127052
n02111500
n04254680
n02951358
n03854065
n02488702
n02834397
n02128757
n03075370
n07583066
n03047690
n01829413
n03124043
n01843065
n07697537
n07734744
n02834397
n02814860
n02481823
n04356056
n03124043
n01990800
n03291819
n02487347
n03658185
n04404412
n03791053
n03866082
n02930766
n02074367
n02777292
n04458633
n02098286
n02843684
n04592741
n01641577
n03529860
n01484850
n04141076
n03485407
n03590841
n04037443
n07613480
n01688243
n04074963
n02701002
n03535780
n02090379
n02111889
n06874185
n07693725
n07802026
n07754684
n01774384
n01514668
n02028035
n04423845
n02096051
n02115641
n01774384
n02894605
n03026506
n02666196
n03690938
n02112706
n03787032
n01748264
n03733131
n03920288
n04141076
n02101006
n03944341
n12267677
n03782006
n03924679
n02437616
n02992529
n02871525
n02104029
n03376595
n04243546
n03854065
n03983396
n02104029
n01883070
n07716906
n02092002
n02114855
n03255030
n01873310
n01704323
n04192698
n03485407
n02916936
n07590611
n02869837
n03527444
n03595614
n02105412
n09835506
n04033901
n04285008
n02326432
n02104029
n07716906
n07760859
n03832673
n03492542
n02408429
n03781244
n02099849
n03840681
n02092339
n03590841
n01685808
n01694178
n07753592
n03535780
n02730930
n04270147
n02011460
n04483307
n01688243
n01737021
n02033041
n03100240
n03447447
n03584829
n02483362
n03998194
n02483362
n03481172
n01558993
n04606251
n01537544
n02808440
n03825788
n01773157
n04507155
n04141076
n02504013
n04562935
n07590611
n04357314
n01608432
n02097658
n03950228
n02814860
n01498041
n04553703
n12768682
n03032252
n02097474
n01955084
n07695742
n02483708
n02106550
n04515003
n02226429
n04370456
n03000684
n03837869
n02113799
n02102480
n03459775
n02120079
n02071294
n13054560
n04192698
n02504458
n04372370
n04251144
n02006656
n03908618
n04311174
n03018349
n13133613
n03796401
n04409515
n02102480
n02843684
n04040759
n02086646
n02948072
n07836838
n03476684
n02236044
n04296562
n02017213
n04612504
n02769748
n07717410
n07717410
n01751748
n03773504
n02085782
n04562935
n04239074
n07760859
n07768694
n03160309
n01692333
n03045698
n03272562
n04417672
n03954731
n04505470
n04154565
n03691459
n04209239
n04409515
n02363005
n07734744
n02422699
n03529860
n04235860
n04536866
n01981276
n03888257
n02276258
n03388043
n07718472
n02869837
n02006656
n03595614
n02917067
n01440764
n01855032
n03930630
n02105505
n01491361
n03345487
n04372370
n03187595
n01491361
n04264628
n04557648
n02119022
n02607072
n02396427
n07615774
n04553703
n07718472
n03530642
n02100583
n04557648
n03485407
n07745940
n01531178
n03954731
n04465501
n12768682
n04486054
n03595614
n04548362
n07753113
n02701002
n04525038
n02317335
n02443484
n02939185
n03314780
n02089078
n02859443
n02091467
n02124075
n03690938
n02091831
n02454379
n04065272
n03196217
n02655020
n04487394
n04286575
n03125729
n03854065
n03670208
n02108422
n02102480
n02988304
n02009229
n02099267
n02097209
n02948072
n02110806
n02177972
n03494278
n01737021
n13133613
n04447861
n04591713
n03495258
n02859443
n02860847
n04554684
n03637318
n04258138
n01797886
n03095699
n04041544
n03602883
n04525038
n03706229
n02093859
n02119022
n02454379
n07614500
n02276258
n07714571
n02177972
n02129604
n01601694
n04355338
n02999410
n07760859
n02165456
n02111129
n03220513
n02437616
n04465501
n03272010
n02167151
n02174001
n02607072
n04254120
n07584110
n03388549
n03063599
n02795169
n02727426
n02799071
n10565667
n02454379
n07717410
n02504013
n04266014
n04493381
n03832673
n02033041
n02447366
n03314780
n02930766
n02110806
n04033901
n02870880
n01872401
n03063689
n03814906
n01798484
n02219486
n02111129
n03124170
n03443371
n01855672
n03089624
n04239074
n03814906
n04285008
n02097474
n01819313
n02364673
n03773504
n04310018
n04398044
n13054560
n01665541
n02025239
n03976657
n04553703
n07715103
n02018795
n03794056
n03595614
n03026506
n02128925
n03717622
n03041632
n04417672
n07753275
n07718747
n01728920
n03447447
n02114548
n02769748
n01784675
n02100877
n02097658
n04523525
n02002556
n03404251
n03786901
n04162706
n02776631
n13133613
n04254777
n04355338
n02104029
n04201297
n03775071
n02093754
n03992509
n03134739
n12057211
n04116512
n02281787
n07920052
n02105641
n01943899
n03841143
n02487347
n04486054
n02281787
n02342885
n03775546
n02011460
n02089078
n03776460
n04423845
n02865351
n03089624
n04371774
n01514859
n01734418
n02328150
n09468604
n03063689
n02951585
n02095314
n03792972
n03776460
n02346627
n02894605
n01775062
n02130308
n04192698
n13044778
n01751748
n07697537
n03868242
n04525038
n02259212
n02391049
n04399382
n02667093
n01530575
n01632777
n03259280
n02840245
n04019541
n02422699
n02113712
n03930630
n02643566
n02231487
n04487394
n03937543
n03355925
n01828970
n01580077
n07932039
n02877765
n02167151
n03476991
n02825657
n01751748
n03207941
n03840681
n09288635
n01843383
n04536866
n03814906
n04429376
n04428191
n03814906
n04344873
n01693334
n03417042
n02747177
n01986214
n02277742
n03127747
n02422699
n12985857
n02672831
n02823428
n02112018
n04037443
n07695742
n02536864
n02788148
n02088364
n02105251
n02105641
n02123159
n03729826
n03125729
n04179913
n02097474
n03297495
n03042490
n04252225
n03141823
n09193705
n04149813
n02655020
n03788365
n03085013
n02037110
n01944390
n02120505
n04536866
n07695742
n02951358
n03417042
n03733131
n04325704
n03843555
n03179701
n02009229
n04523525
n02098413
n02096585
n03424325
n02105162
n04590129
n01537544
n02093991
n03394916
n01514668
n13133613
n03445924
n03873416
n01632458
n03706229
n02085782
n01632777
n04371430
n12144580
n01665541
n02102040
n02701002
n04131690
n04347754
n13040303
n01775062
n02114712
n01833805
n03759954
n02860847
n04330267
n02859443
n02138441
n01774384
n07717556
n04311004
n03908714
n02361337
n04065272
n04146614
n04179913
n01697457
n03857828
n04285008
n02089078
n01755581
n02056570
n02701002
n02483708
n02101556
n01737021
n03874599
n02107683
n03657121
n01592084
n03995372
n03788195
n02100877
n03447447
n09399592
n04350905
n04266014
n02979186
n02988304
n02879718
n03032252
n01530575
n03291819
n04131690
n02037110
n01632458
n02102177
n04367480
n01807496
n02107908
n01740131
n02096585
n04235860
n02363005
n02110958
n07711569
n03384352
n03530642
n03761084
n03602883
n01531178
n01774384
n04456115
n01985128
n01694178
n03065424
n04589890
n04049303
n07248320
n06874185
n04604644
n01775062
n02123597
n02095570
n01985128
n02115913
n01622779
n01601694
n04589890
n01560419
n01440764
n02051845
n03218198
n03047690
n03854065
n02442845
n02361337
n02835271
n01531178
n02108422
n02115913
n03141823
n02088238
n03690938
n03207941
n02510455
n01806143
n01740131
n03854065
n02488291
n04428191
n03063599
n02101556
n02087046
n02101556
n03792972
n04296562
n02101006
n02776631
n01773797
n03709823
n04458633
n02281406
n03691459
n03692522
n02089867
n03868863
n02012849
n03763968
n01944390
n01667114
n03950228
n02128385
n02319095
n04553703
n03452741
n03345487
n02672831
n03935335
n02104365
n01592084
n04149813
n03594734
n02233338
n01688243
n07718472
n03394916
n13040303
n01986214
n02510455
n04285008
n03956157
n02264363
n03127747
n03445777
n04467665
n03240683
n03065424
n04517823
n02165105
n03602883
n01753488
n04399382
n09256479
n02086910
n03956157
n03485794
n02484975
n02666196
n02097209
n03535780
n02112018
n03109150
n04590129
n01667778
n02787622
n02088364
n03388549
n02494079
n01843065
n02108551
n03929855
n03498962
n02109525
n04328186
n09256479
n04540053
n03459775
n03982430
n02444819
n01494475
n02086079
n02125311
n03529860
n01843383
n03992509
n01641577
n04099969
n04254777
n01608432
n02346627
n02397096
n02676566
n01491361
n02074367
n04252225
n04485082
n02092002
n02098286
n02727426
n03100240
n13054560
n02097298
n02123045
n02002724
n02109047
n03131574
n02692877
n02088632
n04465501
n02930766
n01843065
n03697007
n02102973
n04147183
n02117135
n07754684
n02787622
n02114548
n04515003
n01855672
n01682714
n02110063
n04127249
n03127925
n04429376
n03710193
n03796401
n02786058
n02794156
n02112018
n02423022
n02094114
n02092339
n03344393
n03888605
n02437312
n02107574
n03710637
n01491361
n04074963
n02128385
n04044716
n02093991
n02113186
n01592084
n07714990
n02174001
n02777292
n02090379
n04509417
n02486261
n02841315
n02096051
n01768244
n03895866
n03891332
n02102177
n04525038
n03777754
n07716906
n02091244
n02966687
n01981276
n02092339
n04612504
n09229709
n02099429
n04540053
n03935335
n01644373
n02088466
n04380533
n02105162
n02916936
n01944390
n02123159
n03459775
n01944390
n02100735
n01740131
n03599486
n02169497
n03888605
n04296562
n03794056
n03110669
n02356798
n03032252
n04482393
n03888605
n01748264
n02098413
n03967562
n03706229
n13052670
n04252225
n02009229
n04252225
n09421951
n01930112
n04461696
n04208210
n02443484
n03045698
n03967562
n07880968
n02177972
n01698640
n02704792
n04328186
n01828970
n04482393
n03400231
n03394916
n04467665
n04259630
n01860187
n03868863
n03000134
n02783161
n02509815
n04465501
n02417914
n04482393
n02787622
n02089867
n03240683
n02403003
n04296562
n02782093
n02892201
n03777754
n04612504
n03372029
n01756291
n03902125
n03355925
n01843383
n04579432
n02091134
n04579432
n03481172
n02841315
n07831146
n03075370
n02009912
n04201297
n02396427
n01753488
n03249569
n04090263
n01704323
n02526121
n04204347
n02777292
n03126707
n04254120
n02111277
n01582220
n02206856
n02939185
n01693334
n02641379
n04263257
n04347754
n07734744
n01990800
n04399382
n04270147
n03944341
n01773549
n03259280
n02089078
n02094433
n04525305
n04493381
n01669191
n02066245
n02841315
n03796401
n04371430
n04548362
n03944341
n01773157
n03223299
n03692522
n03594945
n02100877
n03000134
n02783161
n03345487
n02802426
n01944390
n02817516
n02102973
n03956157
n03627232
n02114712
n03837869
n02797295
n04458633
n03196217
n02963159
n02110341
n02108551
n09468604
n03452741
n02174001
n04380533
n07716358
n04037443
n03803284
n03958227
n09288635
n04442312
n03272562
n03891251
n04118776
n04532670
n01742172
n03733281
n02102177
n03026506
n02606052
n01818515
n04589890
n04428191
n02279972
n02123045
n04254120
n03000684
n01983481
n02704792
n07590611
n04162706
n02088632
n02112706
n03938244
n02112018
n02123597
n01531178
n02325366
n03000684
n02066245
n02859443
n03063599
n07753113
n02999410
n03777568
n02108089
n01872401
n02025239
n01484850
n03899768
n04162706
n02110341
n02091467
n04417672
n03000134
n04356056
n04417672
n01689811
n02412080
n02086646
n02096294
n01622779
n02089973
n02835271
n09193705
n04111531
n04456115
n09193705
n03633091
n07749582
n07697537
n02860847
n01855672
n03743016
n02077923
n07754684
n01833805
n02013706
n03976657
n03134739
n03720891
n02837789
n04355933
n03584829
n09472597
n01843065
n01749939
n03717622
n03982430
n02504458
n02127052
n03127747
n04026417
n03866082
n01872401
n02094258
n03291819
n02110627
n03982430
n02093256
n02277742
n02965783
n04428191
n01740131
n02795169
n02119789
n03535780
n03461385
n01980166
n02486410
n03720891
n04597913
n03666591
n02843684
n04252225
n10565667
n02268443
n01491361
n02098105
n03775071
n03187595
n07760859
n02259212
n03042490
n03942813
n04069434
n04120489
n01820546
n04548280
n07718472
n02417914
n02095314
n06874185
n03447447
n03983396
n04592741
n02102177
n03649909
n03594945
n02099712
n04370456
n04517823
n07875152
n03207941
n02398521
n03954731
n01796340
n01798484
n02113712
n01491361
n04423845
n03483316
n04461696
n02106550
n01773157
n13052670
n02091244
n03706229
n01560419
n03832673
n02492660
n04099969
n03982430
n04532670
n01631663
n02085782
n01728920
n03240683
n04584207
n01806567
n01729977
n01601694
n04350905
n04179913
n04592741
n02108422
n02110806
n02814533
n01773797
n02704792
n02782093
n03916031
n03467068
n03710721
n04554684
n01955084
n07717556
n02009229
n02256656
n03095699
n02094258
n02486410
n02027492
n04200800
n04371430
n03662601
n02444819
n01665541
n01614925
n02112018
n03773504
n04505470
n02951358
n02948072
n02101556
n03868242
n02093256
n01641577
n02128385
n03000684
n03874293
n03134739
n01440764
n02268853
n07584110
n04399382
n01843065
n03188531
n02086240
n04540053
n01829413
n04462240
n03018349
n03782006
n07730033
n03676483
n04275548
n03930630
n03764736
n02226429
n02007558
n04149813
n01820546
n01829413
n02110185
n02107683
n03840681
n02018207
n01833805
n03902125
n03868863
n03443371
n02113978
n03793489
n02859443
n02097047
n04192698
n07590611
n07880968
n07697537
n02342885
n02398521
n02002724
n02910353
n02442845
n02906734
n02494079
n02091831
n02823750
n04447861
n01796340
n03089624
n03924679
n01980166
n04435653
n03649909
n02107142
n02110063
n02403003
n04081281
n01735189
n01532829
n03891251
n02077923
n03977966
n03452741
n04465501
n02777292
n02113799
n04367480
n03787032
n01744401
n02667093
n03933933
n01580077
n02794156
n01796340
n02002556
n02837789
n01818515
n09835506
n04604644
n01917289
n03180011
n02102480
n03873416
n03995372
n03884397
n03657121
n02093754
n02102318
n02097658
n02108422
n01855672
n02489166
n03208938
n02116738
n07802026
n03584254
n02108000
n09256479
n02892767
n02105162
n03388549
n02870880
n02116738
n01807496
n03045698
n03717622
n03109150
n03388549
n02437616
n07930864
n03991062
n03709823
n03680355
n02033041
n02843684
n02795169
n02236044
n02509815
n04442312
n12998815
n03255030
n02111889
n03595614
n03788195
n02690373
n01756291
n01698640
n07565083
n01983481
n03445777
n03998194
n02879718
n07930864
n03255030
n02086646
n04120489
n03733281
n01667114
n03532672
n03179701
n04229816
n03733281
n09256479
n02105251
n03146219
n04330267
n06874185
n12620546
n01641577
n02106550
n02445715
n03146219
n02493793
n02509815
n02804610
n03590841
n01871265
n02483362
n02437616
n03895866
n02071294
n03291819
n13044778
n02114855
n01984695
n02500267
n06359193
n01843065
n03763968
n02643566
n04258138
n02667093
n07734744
n04153751
n02138441
n03188531
n07802026
n02100583
n07860988
n01817953
n02106166
n02483708
n03782006
n02007558
n04476259
n02835271
n03124170
n04550184
n03661043
n04204238
n03776460
n03837869
n04443257
n02486261
n01537544
n02317335
n02134418
n04557648
n01872401
n04209239
n01677366
n02100735
n02096437
n04479046
n01693334
n02965783
n01514859
n07613480
n02108422
n01914609
n03482405
n03710637
n04009552
n02106166
n01531178
n02704792
n04487394
n02834397
n02108915
n02484975
n04310018
n02095570
n03447721
n02119022
n03017168
n03697007
n03249569
n02835271
n04591713
n03347037
n02791124
n01692333
n01882714
n03196217
n02422699
n04041544
n03796401
n02028035
n02966193
n04235860
n03642806
n03838899
n02510455
n01930112
n03781244
n02091032
n02025239
n03196217
n02094114
n01978455
n04254120
n13040303
n03459775
n07716358
n03016953
n03876231
n02892767
n04069434
n02256656
n02168699
n02128757
n01986214
n02009229
n02790996
n03630383
n07718747
n02361337
n02951585
n07873807
n03223299
n07836838
n04266014
n03956157
n02002724
n02077923
n02002556
n02951358
n03259280
n02113186
n02843684
n04332243
n01775062
n02777292
n04118538
n02226429
n03908618
n02782093
n03777568
n02101556
n02701002
n02018795
n02102318
n03045698
n04254680
n02692877
n12620546
n02325366
n01560419
n02977058
n03127925
n04325704
n03483316
n02101556
n03450230
n04264628
n02101556
n03482405
n07715103
n03544143
n02395406
n01797886
n03207941
n04389033
n01978455
n01755581
n02708093
n03461385
n02342885
n01930112
n04009552
n02804610
n13037406
n02092339
n02106550
n04033995
n02395406
n03733131
n02859443
n04008634
n02841315
n02412080
n03785016
n01440764
n03100240
n01665541
n03710721
n04599235
n04370456
n02124075
n02138441
n03085013
n01744401
n04296562
n09835506
n03785016
n07754684
n04311004
n02124075
n02802426
n04239074
n02971356
n02009229
n02096177
n01695060
n03954731
n01828970
n02086240
n02447366
n03095699
n03590841
n03482405
n02107574
n02096294
n03085013
n04456115
n04486054
n04599235
n03141823
n04263257
n03877845
n04428191
n03976657
n02797295
n03637318
n03041632
n07579787
n02687172
n03201208
n04579145
n01608432
n02099849
n01667114
n04372370
n02106166
n03075370
n02138441
n03028079
n01930112
n03388183
n03825788
n13044778
n02687172
n03692522
n02391049
n04254120
n03146219
n03126707
n02025239
n07714571
n02869837
n01580077
n03594945
n02109525
n04099969
n03792972
n03623198
n01872401
n02441942
n03032252
n02687172
n02096294
n02037110
n04310018
n02280649
n03992509
n04037443
n01806567
n02325366
n03372029
n02259212
n04371430
n02391049
n01755581
n01820546
n02264363
n01494475
n03201208
n01774750
n03259280
n02687172
n04090263
n02483708
n04487081
n03218198
n02480495
n01692333
n03017168
n01843065
n03930630
n02056570
n03041632
n02799071
n03344393
n01514859
n02113978
n02027492
n01981276
n02397096
n04192698
n03134739
n02666196
n02117135
n04461696
n02231487
n09246464
n04149813
n02102040
n02086910
n04355338
n02457408
n02093428
n01689811
n03481172
n07836838
n03803284
n01910747
n04553703
n03478589
n03584829
n04254777
n04254120
n02105505
n02361337
n03992509
n02804610
n02102318
n01560419
n01773549
n03902125
n06359193
n02129165
n02120079
n02113712
n01728920
n03160309
n07871810
n04258138
n03045698
n04552348
n13044778
n03717622
n02025239
n02268443
n02108915
n04542943
n03240683
n02966687
n07754684
n03991062
n02769748
n03187595
n03271574
n02256656
n03637318
n04357314
n03207941
n01728920
n04074963
n03000684
n04118538
n03888257
n03000134
n02930766
n02437616
n01622779
n03954731
n04266014
n02108915
n01729977
n04553703
n02328150
n07715103
n03617480
n02441942
n01734418
n02229544
n02259212
n03017168
n02077923
n03871628
n02025239
n02992211
n01978287
n01755581
n04008634
n01773797
n04209239
n04584207
n02493793
n01616318
n04127249
n01877812
n02814860
n03535780
n04040759
n02879718
n02514041
n04592741
n03854065
n01614925
n04026417
n03837869
n02865351
n04239074
n06794110
n02190166
n04208210
n02088238
n02497673
n03179701
n04613696
n01693334
n02672831
n02817516
n02106662
n04392985
n03777754
n03649909
n04311004
n01664065
n04389033
n02807133
n03476991
n03141823
n03793489
n02988304
n03325584
n01871265
n09288635
n04326547
n02110063
n03220513
n02093859
n01693334
n02815834
n02107574
n04487081
n04347754
n07695742
n04086273
n04493381
n01580077
n02910353
n07754684
n04067472
n12768682
n01675722
n02437312
n04417672
n03868863
n13054560
n02100735
n03888605
n04009552
n04238763
n03876231
n03706229
n02859443
n01530575
n01824575
n02096437
n04486054
n02704792
n02110185
n01824575
n12620546
n03814906
n04154565
n02058221
n02111129
n03690938
n03857828
n01534433
n09229709
n02086910
n04507155
n02098105
n02089078
n04355933
n02930766
n03384352
n02892201
n03992509
n02109961
n04479046
n03000247
n03047690
n04258138
n04005630
n02281787
n01693334
n03379051
n01614925
n04479046
n04591713
n03920288
n02051845
n01756291
n02107312
n04435653
n03325584
n02058221
n02107683
n02111277
n03786901
n07768694
n03891332
n04204347
n03400231
n03961711
n02490219
n03347037
n04597913
n02090721
n03450230
n02112137
n03250847
n03868242
n02058221
n04141327
n03761084
n02090379
n02486261
n02095570
n01749939
n02804610
n04273569
n02777292
n03930630
n03775546
n07716906
n02916936
n02930766
n03709823
n02056570
n02412080
n02666196
n03196217
n04479046
n04509417
n01532829
n07697313
n02493793
n02058221
n04252077
n02002556
n02085936
n03063599
n04273569
n04550184
n03710193
n01742172
n02443484
n03720891
n03706229
n02643566
n03218198
n03877845
n01630670
n07714990
n02264363
n01532829
n04540053
n02113712
n04259630
n03661043
n03220513
n03445924
n07831146
n01530575
n03691459
n01773157
n06785654
n03290653
n03995372
n03866082
n02276258
n03777568
n01675722
n12985857
n02835271
n03444034
n02101006
n03637318
n03787032
n04258138
n03535780
n04065272
n02099267
n03347037
n01755581
n03908714
n02056570
n02093647
n01729977
n04344873
n01847000
n02112350
n01632458
n04562935
n03325584
n04127249
n04141076
n04554684
n07714571
n02027492
n03532672
n02992529
n02321529
n03538406
n03721384
n02013706
n04599235
n02093991
n02777292
n02123394
n07747607
n03424325
n03976657
n04209239
n02951585
n07753592
n04443257
n03388183
n10148035
n03344393
n04336792
n02120505
n01981276
n03933933
n01829413
n03916031
n02776631
n01775062
n04286575
n04209239
n07730033
n02099712
n07613480
n02100583
n03733805
n03873416
n04476259
n02113799
n02690373
n09468604
n02009912
n01980166
n02096294
n03764736
n03417042
n03000134
n10565667
n04120489
n02114855
n04039381
n04376876
n02843684
n02643566
n03924679
n03958227
n03773504
n02276258
n03776460
n03000684
n02129165
n03445924
n02108089
n04310018
n03873416
n02236044
n03483316
n02099601
n02115913
n02441942
n03967562
n04479046
n04344873
n02123597
n02229544
n03179701
n02791124
n04525305
n03976657
n04147183
n02835271
n01685808
n02280649
n01768244
n02489166
n04355338
n02279972
n03770679
n01498041
n04041544
n02085620
n02086240
n03532672
n02268853
n02978881
n02363005
n04442312
n02280649
n02108915
n04380533
n04462240
n03271574
n03930630
n02892767
n01797886
n01978287
n02437616
n03920288
n03160309
n01560419
n02666196
n03424325
n02514041
n02790996
n02397096
n01775062
n02071294
n02100583
n04380533
n01990800
n03903868
n07583066
n02013706
n02130308
n02113023
n03884397
n03000684
n04037443
n01687978
n02058221
n02704792
n07693725
n04039381
n03461385
n01950731
n03773504
n02104365
n04536866
n02328150
n07871810
n03372029
n04462240
n02133161
n02808304
n03443371
n01843065
n01914609
n01855032
n04380533
n02086646
n02363005
n04296562
n04033995
n02871525
n03742115
n02704792
n02108915
n03670208
n02093428
n04428191
n09421951
n01984695
n02128757
n01917289
n04033901
n02092002
n03840681
n03476684
n04286575
n04423845
n02951358
n03877845
n01728572
n03481172
n03208938
n02487347
n02107908
n07565083
n04479046
n03832673
n02948072
n02950826
n03929660
n04370456
n02978881
n01498041
n02783161
n03697007
n01820546
n03026506
n04584207
n02091467
n02422699
n02123045
n03793489
n03958227
n02443484
n02098286
n02788148
n04392985
n12768682
n03843555
n02894605
n04372370
n02077923
n02111889
n01770393
n02840245
n01631663
n02786058
n04462240
n02264363
n03942813
n02457408
n03476991
n02107312
n02917067
n04612504
n02100583
n04239074
n04476259
n02105855
n03929855
n02389026
n04389033
n03876231
n04041544
n01806143
n07584110
n02814533
n03868863
n02104365
n02128925
n02105251
n04447861
n04517823
n02395406
n04208210
n02091831
n04330267
n02444819
n02815834
n02264363
n01484850
n02105641
n02808440
n02116738
n01873310
n03792972
n02125311
n01855032
n02704792
n07717556
n03814906
n01667114
n03857828
n01784675
n02091032
n04409515
n01614925
n03769881
n02814533
n02093754
n07747607
n03857828
n04277352
n02104029
n04131690
n02951358
n02134084
n07749582
n03126707
n04325704
n02497673
n02105412
n01685808
n07871810
n02927161
n04380533
n04152593
n02106382
n04350905
n01795545
n03871628
n02965783
n07614500
n03884397
n03980874
n02492035
n02113712
n03417042
n04259630
n03483316
n01494475
n02088238
n07565083
n07753113
n04366367
n04120489
n04429376
n02091467
n02112350
n02699494
n03995372
n02113186
n01685808
n03347037
n02843684
n02108089
n03825788
n03773504
n02787622
n04325704
n03796401
n01698640
n03045698
n02422699
n04417672
n04141327
n04118538
n02113624
n04550184
n01728572
n04380533
n04209133
n01537544
n07920052
n04317175
n01742172
n02786058
n03417042
n03770679
n02804414
n02236044
n03085013
n04019541
n03661043
n03769881
n01773797
n02835271
n01494475
n01773797
n02097298
n01667114
n02106030
n02106030
n03146219
n01930112
n02102177
n13040303
n04357314
n04264628
n07875152
n04371774
n02099849
n03127925
n02869837
n03710193
n02097130
n07730033
n04311004
n03085013
n02102040
n04486054
n02111889
n04204238
n03792972
n03450230
n03617480
n02124075
n03495258
n03769881
n02916936
n01704323
n03063599
n01883070
n01614925
n04311004
n01692333
n03125729
n04192698
n03874293
n03496892
n04118776
n02454379
n04116512
n01677366
n01514668
n03476991
n03733805
n03942813
n03095699
n02883205
n02091467
n02817516
n06794110
n03131574
n02101388
n01978455
n02106382
n02108915
n03216828
n07615774
n07730033
n01770393
n04371430
n02123159
n01984695
n01737021
n02825657
n02099267
n03658185
n02815834
n02120079
n03908714
n04554684
n04604644
n03109150
n03866082
n03908714
n03617480
n02093647
n02510455
n04074963
n03089624
n02095314
n03218198
n02817516
n01943899
n03854065
n03891251
n04423845
n04131690
n04442312
n01537544
n03325584
n02095889
n03291819
n03042490
n02504013
n03146219
n04252077
n02328150
n01697457
n02655020
n04606251
n07720875
n02091831
n02097209
n01630670
n01950731
n01910747
n07695742
n03063689
n01871265
n03478589
n07583066
n02109525
n03982430
n04270147
n01871265
n02033041
n03476991
n01494475
n09229709
n03967562
n03902125
n02837789
n04311004
n04228054
n02087394
n04147183
n02133161
n03100240
n04204238
n02445715
n03481172
n04487394
n03796401
n02978881
n01877812
n01496331
n07717410
n02871525
n02442845
n02112706
n02879718
n03085013
n02799071
n03902125
n02965783
n02281406
n04404412
n02123159
n02747177
n04548280
n04591713
n04044716
n03742115
n02992211
n07717410
n10148035
n02099429
n02486261
n04447861
n03843555
n04263257
n04330267
n02787622
n02823750
n01740131
n04235860
n03498962
n02492660
n02437312
n07718747
n03803284
n02364673
n02906734
n07684084
n03970156
n03825788
n03814906
n07715103
n02749479
n02815834
n02877765
n02088364
n02088632
n04270147
n07248320
n01514668
n01883070
n02276258
n04554684
n02009229
n07248320
n01924916
n03376595
n03983396
n02112018
n01770393
n02403003
n02051845
n02870880
n02484975
n02113799
n03717622
n07930864
n07717410
n02730930
n03874599
n02105162
n02099712
n01530575
n03891332
n01773157
n02808440
n02177972
n03759954
n07579787
n02877765
n03958227
n03977966
n03825788
n03028079
n04501370
n02259212
n03961711
n03496892
n03706229
n04409515
n12144580
n03769881
n09193705
n02782093
n01734418
n04285008
n02120505
n02111277
n02640242
n02790996
n02099267
n07871810
n01986214
n01984695
n12985857
n04542943
n03888605
n04074963
n10565667
n04483307
n09835506
n02129165
n03538406
n01498041
n04461696
n03944341
n03259280
n01484850
n04486054
n03788195
n09193705
n03530642
n04557648
n02892201
n04509417
n03041632
n02093256
n02391049
n04479046
n03961711
n15075141
n02108915
n01847000
n02325366
n03770439
n03676483
n06794110
n01770393
n02788148
n03127925
n03710721
n02484975
n02536864
n02105855
n03733131
n04435653
n02124075
n03792782
n04465501
n01644373
n02085620
n03720891
n03814639
n03133878
n02892201
n02077923
n02992211
n02114712
n02410509
n03733131
n03843555
n02917067
n02128385
n04009552
n03888605
n03388043
n04596742
n03935335
n06785654
n02356798
n02398521
n03445924
n03041632
n03535780
n07753113
n02834397
n01824575
n07697313
n04487081
n02509815
n02106550
n01704323
n01742172
n02094433
n01817953
n03032252
n01742172
n02483362
n02096437
n02487347
n02096294
n04465501
n02948072
n03424325
n02111500
n02114367
n01537544
n01945685
n02607072
n04005630
n04127249
n07714990
n03662601
n03179701
n09468604
n01530575
n03100240
n06359193
n02510455
n02120079
n02096437
n03141823
n01484850
n04579432
n04118538
n02094433
n02086910
n01622779
n07747607
n07718747
n02106030
n02363005
n03599486
n03637318
n02101388
n03662601
n03188531
n02104029
n11939491
n04238763
n01945685
n02834397
n02099712
n01558993
n03450230
n03838899
n04243546
n02123159
n04536866
n02808304
n04120489
n03127925
n04505470
n03782006
n02281406
n04252225
n02776631
n02444819
n04005630
n03717622
n03961711
n03444034
n03970156
n01824575
n02396427
n02165456
n02226429
n02056570
n07693725
n04599235
n03944341
n02134418
n03788365
n07717410
n04264628
n03967562
n04265275
n03584254
n01614925
n07720875
n03814639
n04370456
n04037443
n03297495
n02129604
n03131574
n04243546
n02105855
n03895866
n03216828
n02317335
n02106030
n03661043
n01924916
n02165456
n04536866
n01616318
n02799071
n03788195
n02363005
n01924916
n04461696
n04270147
n02843684
n04258138
n03944341
n01737021
n01882714
n02817516
n02097298
n01843383
n04019541
n04118776
n02799071
n03967562
n03494278
n02229544
n04325704
n03967562
n13044778
n03344393
n04557648
n03447721
n09472597
n04118538
n03424325
n04599235
n01530575
n02835271
n09472597
n02092002
n02730930
n04599235
n02422699
n03657121
n01622779
n03903868
n02090721
n04443257
n01734418
n07714571
n01496331
n02264363
n03483316
n03742115
n07714990
n03590841
n03871628
n04311174
n02114548
n03255030
n02105505
n07579787
n07697313
n03400231
n06874185
n04591713
n04509417
n03255030
n03404251
n02268853
n07613480
n07768694
n02321529
n01818515
n01877812
n02895154
n03485794
n04553703
n02364673
n09229709
n02916936
n04235860
n07932039
n15075141
n02006656
n02487347
n02087394
n02480855
n04372370
n03733805
n02979186
n02033041
n10565667
n02006656
n02099267
n02108915
n03930630
n01728572
n04552348
n02090721
n02870880
n02951585
n04259630
n02328150
n04435653
n02843684
n03788195
n03887697
n04335435
n04228054
n01608432
n04355933
n02123045
n04589890
n04086273
n03832673
n02111277
n01704323
n03599486
n04254680
n02086240
n02817516
n02487347
n04592741
n03272010
n02018795
n01930112
n03223299
n03388043
n03888605
n04040759
n02169497
n02793495
n04376876
n02177972
n04485082
n07717410
n04081281
n03109150
n02090622
n03482405
n01664065
n03032252
n03355925
n01910747
n04536866
n03000247
n03527444
n02025239
n04254777
n04141975
n03793489
n02979186
n02127052
n01847000
n02328150
n02909870
n10565667
n03709823
n02992211
n02093859
n07747607
n07717410
n03249569
n01734418
n03944341
n04344873
n01677366
n02108000
n03876231
n04461696
n06596364
n09428293
n03482405
n02088094
n04136333
n04204238
n01697457
n04074963
n01514859
n02106662
n04252225
n02117135
n03476684
n01770393
n02795169
n03733131
n03676483
n04133789
n04435653
n01728920
n04033995
n04355933
n01675722
n03717622
n04428191
n03535780
n02105162
n07753275
n04483307
n02917067
n04118776
n03000684
n03000134
n02281787
n01770393
n02326432
n01753488
n02167151
n02808304
n04392985
n03197337
n03100240
n04286575
n03127925
n01945685
n02536864
n02799071
n02783161
n02346627
n02264363
n02088364
n02093754
n03617480
n02105162
n02966687
n01795545
n02091831
n01537544
n03041632
n02834397
n02699494
n03404251
n01860187
n04550184
n02992211
n02437312
n02098105
n07590611
n03527444
n07583066
n01748264
n02966687
n03803284
n04366367
n02119022
n01740131
n02099601
n01534433
n04606251
n02099601
n02488702
n04336792
n02391049
n02086646
n02086079
n02110806
n02110341
n04447861
n02119789
n04162706
n02259212
n03124043
n02101388
n03630383
n02980441
n02494079
n03602883
n01695060
n04141327
n04266014
n03047690
n02097209
n02113023
n02174001
n01669191
n01667778
n02096051
n04251144
n02112706
n02988304
n03461385
n03447447
n02077923
n03887697
n02342885
n01641577
n01616318
n02007558
n01698640
n04033995
n03804744
n02110063
n03355925
n01667114
n01914609
n03804744
n02669723
n07836838
n02412080
n03743016
n04336792
n13052670
n03791053
n03776460
n03017168
n04404412
n03777754
n04037443
n03796401
n04404412
n06596364
n02105412
n04023962
n01734418
n02328150
n02101006
n07684084
n02002556
n13133613
n07248320
n01753488
n02107908
n02123394
n04154565
n02504458
n13052670
n04008634
n02916936
n02107683
n02134084
n02443484
n07720875
n04493381
n03761084
n02102040
n03089624
n01985128
n01753488
n02137549
n09835506
n03443371
n02346627
n02002556
n04589890
n04562935
n01632777
n02317335
n01632458
n02493509
n02398521
n03970156
n02667093
n03825788
n02086646
n13044778
n02088238
n01776313
n02481823
n04423845
n03047690
n07749582
n02977058
n01796340
n02110627
n02910353
n03201208
n01728572
n02114367
n03980874
n02776631
n02165456
n02437312
n02364673
n03764736
n04041544
n12998815
n03388043
n03803284
n02113624
n02102318
n03424325
n03250847
n09288635
n03924679
n03956157
n01910747
n04560804
n07714990
n04542943
n07716906
n02128925
n04487394
n04399382
n04044716
n04465501
n03854065
n02398521
n02823750
n07583066
n02107312
n04584207
n01829413
n01833805
n02417914
n04081281
n02088364
n02113799
n04376876
n02093991
n02730930
n04133789
n02442845
n02018207
n03930630
n02910353
n02730930
n03776460
n02088364
n04264628
n07714990
n04461696
n03372029
n02090379
n01819313
n03657121
n02106662
n02109525
n02500267
n04376876
n04483307
n03843555
n13037406
n02097047
n02403003
n03290653
n02690373
n02536864
n02091467
n03843555
n04044716
n01537544
n02037110
n04146614
n04612504
n01484850
n07684084
n03220513
n04326547
n03127925
n02971356
n03476991
n01774384
n07565083
n02672831
n03967562
n03998194
n09229709
n01641577
n01682714
n04204347
n03160309
n03478589
n03792972
n04458633
n04392985
n02480855
n02099429
n07714571
n02098105
n02963159
n02777292
n03529860
n03706229
n12057211
n04612504
n04554684
n03590841
n03661043
n04065272
n01531178
n07614500
n02017213
n02859443
n04235860
n02256656
n03481172
n02110063
n02281787
n04579432
n01985128
n02363005
n04317175
n01737021
n03216828
n02095570
n07714571
n04525305
n07565083
n03494278
n04525038
n01494475
n04404412
n07718747
n03903868
n04376876
n02088632
n07720875
n02111277
n01728920
n04311004
n02877765
n06785654
n01978455
n01729977
n02906734
n01601694
n04429376
n02676566
n03733281
n02106382
n02817516
n04039381
n04356056
n01514859
n03791053
n04376876
n03630383
n04252077
n04417672
n01641577
n04141076
n02025239
n02992529
n02672831
n02088466
n01797886
n04501370
n04149813
n02172182
n04336792
n04417672
n03944341
n03961711
n04493381
n04258138
n04523525
n02423022
n02102177
n02865351
n04507155
n07930864
n02097047
n03916031
n02892201
n04254680
n01608432
n04461696
n03483316
n02500267
n02916936
n03452741
n02892201
n02113186
n03775546
n03478589
n03633091
n04599235
n03065424
n02097209
n01873310
n04604644
n04418357
n03794056
n03179701
n01440764
n01806143
n02093859
n01496331
n01669191
n04367480
n02971356
n02114548
n03249569
n01796340
n07613480
n04505470
n03804744
n02950826
n03743016
n02777292
n03089624
n02110341
n03485407
n02480855
n02356798
n02910353
n03662601
n01601694
n04141076
n03384352
n02492660
n03376595
n02776631
n02025239
n04065272
n02033041
n03417042
n09332890
n02097658
n04552348
n03447447
n03781244
n03000684
n01749939
n01677366
n02094114
n04465501
n04372370
n02281787
n03196217
n02277742
n02701002
n03290653
n03452741
n01806143
n04037443
n03825788
n04266014
n07716906
n02123597
n02110063
n02981792
n03804744
n02134418
n03970156
n02483362
n02486261
n01514668
n02134084
n03970156
n01558993
n01644373
n03692522
n03804744
n02804414
n02108551
n01560419
n02490219
n03710637
n03673027
n04552348
n02094114
n03967562
n03776460
n02447366
n03733805
n03127925
n02279972
n09428293
n03089624
n03938244
n04041544
n02113712
n03594734
n02206856
n03485794
n02256656
n02981792
n03347037
n03026506
n04356056
n09332890
n07565083
n07760859
n04286575
n02790996
n01873310
n03337140
n04483307
n02281787
n02114548
n12057211
n02971356
n04591713
n04371774
n03841143
n02229544
n02794156
n04270147
n04090263
n04592741
n02120505
n02120505
n03532672
n03062245
n03089624
n03710193
n03792972
n02085936
n01924916
n01692333
n04428191
n13044778
n06359193
n07693725
n02916936
n02488702
n02489166
n02102318
n03980874
n04265275
n04429376
n02480855
n07873807
n03478589
n02071294
n02097298
n01734418
n02123159
n02951585
n07714990
n02859443
n04447861
n02096585
n03902125
n04525038
n03028079
n03866082
n03891332
n03220513
n03207743
n04589890
n03871628
n01774750
n02125311
n02747177
n04153751
n02101556
n02095570
n01629819
n03042490
n01872401
n04311004
n04228054
n03983396
n04456115
n04070727
n02490219
n02093256
n03710193
n03742115
n03841143
n04285008
n02074367
n02526121
n02116738
n03666591
n02363005
n02910353
n02219486
n03063599
n01955084
n02104029
n02114855
n04023962
n04376876
n04275548
n01682714
n01641577
n02676566
n07892512
n01775062
n03457902
n04486054
n03457902
n02843684
n07768694
n04026417
n03355925
n02025239
n03781244
n03947888
n02280649
n03450230
n02098286
n03776460
n03594945
n07734744
n02276258
n07720875
n02988304
n03595614
n02951358
n03764736
n02939185
n02091134
n01978287
n02268443
n03127747
n03814639
n03874293
n04081281
n07768694
n07715103
n02790996
n03160309
n04525038
n02013706
n04540053
n02105056
n07715103
n01860187
n07920052
n01687978
n07590611
n03394916
n03947888
n01945685
n02110063
n04074963
n04606251
n03594945
n04254120
n03187595
n02110958
n02977058
n07930864
n02099601
n03590841
n02441942
n01806567
n02643566
n03874293
n03255030
n04487394
n07760859
n02112137
n04486054
n01496331
n03337140
n01882714
n02113978
n07615774
n02168699
n04465501
n02086910
n04136333
n04254120
n03530642
n03187595
n01770393
n02422106
n03709823
n02910353
n01855672
n02361337
n01580077
n01694178
n04120489
n04517823
n03775546
n01773157
n03775546
n03777568
n04355933
n01784675
n01498041
n02422699
n04447861
n02177972
n02319095
n03935335
n03980874
n03976657
n02442845
n02085782
n03976467
n07583066
n04461696
n04467665
n02105641
n04501370
n03777754
n04065272
n03447721
n02206856
n03459775
n03947888
n04111531
n02807133
n03481172
n01983481
n03733131
n02105641
n03841143
n03976467
n02391049
n03196217
n02422699
n04462240
n04328186
n04310018
n04417672
n03018349
n02965783
n01629819
n03207941
n04311174
n02226429
n02363005
n03041632
n04033901
n02410509
n02112137
n02747177
n02825657
n02097298
n02992529
n03032252
n01734418
n04090263
n04201297
n02094258
n04111531
n04265275
n04065272
n02676566
n03388043
n07930864
n02423022
n02108551
n03424325
n02815834
n04228054
n02097209
n02137549
n03314780
n01608432
n01820546
n02109961
n01580077
n07579787
n03788365
n02749479
n03930313
n01806567
n02927161
n04447861
n04548362
n02259212
n04252225
n02105162
n03345487
n02727426
n07584110
n04005630
n02096294
n04273569
n02422106
n03534580
n09288635
n01795545
n02397096
n02730930
n01806143
n03661043
n02807133
n02277742
n07613480
n03297495
n03761084
n03109150
n07716906
n12267677
n04204238
n04204347
n04596742
n03710637
n02481823
n02669723
n01491361
n01629819
n03982430
n02869837
n01843065
n04311174
n01820546
n01677366
n02108089
n01807496
n03710721
n03063599
n03498962
n01729322
n02769748
n02268853
n04081281
n03983396
n06359193
n02127052
n02107142
n02488702
n02006656
n07831146
n02676566
n04277352
n03527444
n03372029
n03314780
n02114712
n01978287
n03337140
n03538406
n02917067
n01756291
n01667778
n01795545
n01631663
n02088364
n02808304
n01797886
n02104029
n03201208
n01558993
n03967562
n04428191
n02494079
n04162706
n04515003
n04040759
n01774750
n01943899
n02098413
n02099601
n04270147
n02417914
n03065424
n07734744
n02007558
n02119789
n07695742
n02364673
n01689811
n02672831
n02124075
n01644900
n04335435
n02086646
n02095889
n02105251
n02391049
n01955084
n02480495
n03032252
n02808440
n03637318
n02877765
n04597913
n02112706
n04590129
n01910747
n02895154
n03062245
n03775546
n03372029
n04228054
n04258138
n04074963
n11879895
n01986214
n01943899
n02138441
n01806143
n01983481
n03478589
n04389033
n02951358
n02102318
n03763968
n03594734
n01689811
n07753113
n02074367
n01819313
n03467068
n03393912
n02056570
n04008634
n04254777
n01644900
n02106166
n03891251
n04435653
n01773549
n03729826
n01770081
n03529860
n03110669
n03841143
n02091244
n04067472
n04371430
n03796401
n03782006
n04238763
n01784675
n04019541
n02097209
n02259212
n03956157
n02112706
n02111889
n03527444
n02167151
n04442312
n07695742
n03710193
n04074963
n02099849
n02134418
n02825657
n13037406
n02085782
n02417914
n12620546
n04275548
n02804610
n04146614
n01514668
n01443537
n04509417
n02892201
n02088466
n03065424
n04254120
n03792972
n01924916
n02037110
n07697537
n03394916
n02101006
n02110806
n03146219
n02814860
n03649909
n03127747
n01980166
n02092002
n03787032
n02133161
n03874599
n04201297
n02106550
n07615774
n03710637
n03527444
n07714990
n03017168
n02111500
n01744401
n03950228
n02410509
n02483708
n07583066
n04589890
n02655020
n02259212
n01990800
n03457902
n07920052
n04505470
n02111129
n03216828
n02892767
n02095314
n02092002
n01664065
n03944341
n03495258
n01737021
n01677366
n01806567
n02097298
n04532670
n04522168
n02708093
n02066245
n02971356
n02906734
n03492542
n03930313
n02396427
n02037110
n03297495
n03017168
n01773797
n03786901
n02910353
n02102177
n02730930
n02480495
n04562935
n02109525
n02988304
n02091467
n04204238
n04476259
n01532829
n03208938
n04532106
n02165105
n01677366
n07715103
n02795169
n02127052
n02098286
n01728572
n01833805
n02445715
n02259212
n04209133
n07711569
n07860988
n09421951
n03125729
n04141076
n01742172
n03063689
n01704323
n01748264
n01770393
n01955084
n02894605
n03792972
n04141975
n02672831
n03018349
n02971356
n02859443
n07749582
n03792782
n02398521
n04254777
n02326432
n03877472
n02123045
n03623198
n02342885
n03187595
n03884397
n04330267
n04266014
n02138441
n03538406
n03000247
n02363005
n02883205
n07753592
n04371430
n03871628
n03633091
n04023962
n01740131
n04251144
n02870880
n02009912
n03461385
n02328150
n01945685
n02280649
n02012849
n02112137
n04326547
n02117135
n07930864
n04136333
n04370456
n01737021
n01817953
n03888605
n03452741
n04330267
n07932039
n02398521
n07930864
n03787032
n02112350
n12267677
n03494278
n07693725
n03857828
n02815834
n04376876
n03874293
n04371774
n03929855
n02841315
n02090721
n09468604
n02488291
n02106662
n03461385
n04485082
n03995372
n02493793
n01914609
n02002556
n07711569
n02098286
n07693725
n02422106
n02110958
n04613696
n03692522
n07920052
n02799071
n04037443
n02113978
n01530575
n10565667
n10148035
n03773504
n03347037
n09193705
n02113978
n01882714
n03527444
n02979186
n01877812
n02111129
n03417042
n03461385
n02114855
n12768682
n01950731
n02667093
n02011460
n03290653
n02108000
n04229816
n01930112
n02486261
n04542943
n04235860
n07768694
n02403003
n03786901
n02396427
n02109047
n01968897
n03388043
n04258138
n02112137
n02607072
n02134084
n03837869
n04200800
n02071294
n04141076
n02085620
n03218198
n02098286
n02099601
n04099969
n03216828
n02892767
n03482405
n03838899
n03018349
n04487394
n04141076
n02106382
n11939491
n03100240
n03908714
n07831146
n09256479
n12267677
n04152593
n02093428
n02791270
n02099429
n02105056
n03223299
n02643566
n07720875
n02124075
n02699494
n03888605
n03249569
n03584254
n02981792
n04133789
n03534580
n01518878
n02704792
n07747607
n13037406
n02488291
n03538406
n03627232
n02099429
n02704792
n07684084
n03733805
n02397096
n02114367
n02319095
n02086646
n02094433
n04133789
n04483307
n02504013
n04525038
n04265275
n04209239
n03967562
n02129165
n03777754
n09835506
n02727426
n01693334
n02457408
n02128925
n03903868
n04409515
n01950731
n06359193
n03187595
n01950731
n04041544
n02892767
n02363005
n04355338
n02277742
n04090263
n03314780
n04285008
n01847000
n02094433
n02098105
n07892512
n09229709
n03527444
n03530642
n01774384
n01773157
n04366367
n03676483
n01930112
n03933933
n03877845
n02104365
n07697537
n02444819
n13037406
n04296562
n02457408
n11879895
n04120489
n03958227
n03187595
n03930630
n02277742
n01774750
n04550184
n02837789
n04479046
n02500267
n04317175
n07875152
n01687978
n02088094
n02814533
n02109961
n02117135
n04579145
n07880968
n02190166
n02396427
n04542943
n04357314
n02114855
n03920288
n02120079
n01776313
n01847000
n04447861
n04019541
n03201208
n03857828
n03404251
n07754684
n09256479
n02442845
n06794110
n02917067
n04592741
n02389026
n03444034
n03724870
n02895154
n02165456
n03804744
n01742172
n02037110
n02087046
n02865351
n02025239
n03887697
n02814533
n04133789
n03891332
n02483708
n07714571
n03982430
n04579145
n02127052
n07932039
n04238763
n03710637
n02825657
n03977966
n02321529
n02493509
n02219486
n09193705
n01950731
n03457902
n03908714
n03980874
n02113624
n03393912
n03379051
n01688243
n02971356
n04243546
n02510455
n02092002
n02116738
n02391049
n04111531
n02128925
n02097047
n02071294
n04462240
n01748264
n02086910
n04326547
n02107908
n06874185
n03773504
n04039381
n03874293
n04482393
n04371774
n02088094
n03887697
n03452741
n07802026
n02509815
n03347037
n03983396
n01774750
n02879718
n03888257
n01796340
n07717556
n02112706
n01742172
n12998815
n03271574
n01775062
n02112706
n04153751
n04350905
n02481823
n02487347
n01950731
n02667093
n02089973
n04592741
n03393912
n02840245
n02006656
n01498041
n04548362
n02782093
n09193705
n02443114
n01773549
n02093428
n04116512
n01770393
n02128925
n02939185
n04133789
n02777292
n03976657
n03876231
n02443114
n04590129
n02114855
n04335435
n03372029
n04418357
n02109961
n02088094
n02279972
n03657121
n04482393
n04229816
n02264363
n04136333
n02027492
n03617480
n07753592
n03459775
n04154565
n03425413
n01955084
n03127925
n02017213
n02437616
n01774384
n07760859
n01818515
n03000684
n02128385
n04487081
n02105505
n03376595
n02130308
n02108000
n03042490
n02992211
n07718472
n02417914
n02701002
n02058221
n03888605
n01694178
n01855672
n02168699
n02676566
n04507155
n03777754
n01704323
n02088094
n03444034
n02883205
n02909870
n02787622
n02102973
n02514041
n03085013
n04328186
n02494079
n02093428
n01986214
n03594945
n01847000
n02110958
n04252077
n03041632
n09421951
n03776460
n03676483
n02804610
n02112350
n02096294
n02108089
n03690938
n04372370
n03877845
n02111500
n04476259
n02104029
n02085782
n03424325
n01943899
n02443114
n02865351
n02129604
n04487394
n02493509
n03026506
n04136333
n04507155
n04356056
n04039381
n03944341
n03947888
n02098105
n02133161
n02841315
n04251144
n02094114
n04505470
n01829413
n02493509
n11879895
n07875152
n01983481
n02500267
n02085620
n13040303
n03902125
n12620546
n03599486
n03891332
n02102480
n04118538
n01807496
n01860187
n03444034
n01491361
n07831146
n02666196
n02892767
n13040303
n03032252
n02125311
n02168699
n02117135
n02395406
n01537544
n07753275
n04428191
n02109961
n04235860
n02417914
n04584207
n04070727
n01873310
n02749479
n02769748
n07714571
n04367480
n02012849
n01665541
n02167151
n02088466
n03527444
n04409515
n02013706
n03325584
n02441942
n07613480
n02101006
n02088632
n02129604
n01685808
n02966687
n04367480
n03908618
n02977058
n04111531
n03042490
n03717622
n06785654
n02980441
n01968897
n01843065
n04554684
n04523525
n04417672
n01855672
n03873416
n02100877
n02105505
n03492542
n01833805
n04116512
n04487394
n02105505
n03297495
n02119022
n04392985
n02108422
n02098413
n02012849
n04487394
n01990800
n02817516
n03216828
n03187595
n07871810
n02669723
n02229544
n02966687
n02113712
n03930313
n03417042
n02389026
n03249569
n03633091
n02096294
n02110627
n03916031
n07920052
n04146614
n03207743
n02325366
n03954731
n04133789
n03788195
n03982430
n02112706
n02017213
n02492660
n03976467
n03792782
n02123159
n07754684
n03444034
n03063599
n02326432
n02009912
n04154565
n03492542
n03649909
n02101388
n02091134
n02892201
n02077923
n02168699
n04239074
n03899768
n04461696
n03124170
n09428293
n03000247
n01558993
n02104365
n02093991
n03837869
n02169497
n03492542
n03706229
n02129165
n03216828
n03662601
n02444819
n03930313
n04039381
n01601694
n04228054
n02788148
n03133878
n01983481
n02093859
n02106166
n02102973
n03982430
n02667093
n03891332
n01592084
n02172182
n03404251
n02259212
n03250847
n02817516
n07747607
n03063599
n03935335
n02085620
n02092002
n02999410
n02504458
n03100240
n04392985
n02105855
n07718747
n03721384
n02483362
n01629819
n02107683
n02951358
n07920052
n03733805
n02483362
n01798484
n04418357
n04251144
n03197337
n03908618
n01978287
n01817953
n04486054
n04127249
n01945685
n07711569
n02088238
n02105641
n02910353
n07892512
n01484850
n03657121
n02859443
n07860988
n04141327
n03868863
n01768244
n03657121
n02102973
n02111500
n01632458
n02319095
n04328186
n04311004
n01558993
n01773549
n01622779
n02442845
n07768694
n01632777
n03733805
n03133878
n02012849
n03496892
n02066245
n02094433
n03271574
n02128757
n03792782
n02018795
n01630670
n02101006
n04067472
n02100583
n04317175
n03602883
n04141327
n02102040
n07875152
n02892201
n04127249
n07753275
n04355338
n02236044
n01749939
n07717556
n02317335
n02606052
n04483307
n04435653
n04264628
n04347754
n04179913
n07583066
n04146614
n03478589
n03599486
n02676566
n02264363
n04371430
n03782006
n04604644
n03180011
n03045698
n03887697
n02085936
n07614500
n04296562
n02074367
n01729977
n02018795
n01735189
n03777568
n03775546
n02091244
n03838899
n04357314
n01945685
n03788365
n02441942
n04429376
n02119022
n01945685
n03627232
n02056570
n02437616
n03590841
n01491361
n01871265
n04442312
n01833805
n04596742
n04553703
n04487394
n03763968
n02514041
n11879895
n04525038
n02510455
n04275548
n01531178
n04162706
n03240683
n04589890
n03871628
n04443257
n02655020
n04264628
n01843383
n02138441
n02091032
n02281406
n03272010
n03775546
n03345487
n03532672
n02814860
n07714571
n02423022
n03187595
n03992509
n03933933
n03956157
n07920052
n01981276
n03710721
n04201297
n09472597
n02097130
n02111889
n03929660
n02804610
n03961711
n07613480
n01755581
n02277742
n03452741
n02396427
n01514859
n04590129
n04116512
n01631663
n07711569
n02134084
n04332243
n04517823
n01558993
n02817516
n02088632
n03457902
n01775062
n02328150
n02804610
n02077923
n02129604
n02095314
n03388183
n02536864
n03134739
n03014705
n02423022
n04254120
n03776460
n03788195
n03637318
n02112706
n03777568
n02089078
n03838899
n03661043
n02687172
n02097658
n02395406
n01820546
n03788365
n02963159
n02097298
n07717556
n02114367
n02219486
n04442312
n04536866
n02979186
n04458633
n07584110
n03633091
n04501370
n03000684
n02417914
n02093859
n04228054
n03478589
n02112137
n03642806
n02113712
n02817516
n03980874
n01644900
n11879895
n04347754
n03788195
n02825657
n02119789
n02128925
n02129604
n04523525
n04162706
n03000247
n04347754
n02447366
n02096294
n02002724
n02098413
n03467068
n01582220
n02002556
n03063689
n01855672
n02971356
n02086240
n02817516
n01930112
n02490219
n09428293
n02091467
n03710637
n02917067
n06596364
n01532829
n02056570
n04560804
n01735189
n04557648
n07711569
n06785654
n04118776
n02860847
n02007558
n02356798
n04070727
n02489166
n07714990
n02104365
n02007558
n03649909
n01667114
n01641577
n03028079
n03494278
n07880968
n03775071
n01632458
n01990800
n02442845
n02119022
n02006656
n02701002
n02483362
n03124170
n01531178
n02704792
n02099849
n01873310
n01735189
n04462240
n03065424
n04398044
n04120489
n04330267
n03967562
n02099601
n03388043
n02100583
n02093991
n09399592
n01773797
n03761084
n02342885
n02206856
n02098286
n03207743
n13040303
n01629819
n02927161
n04125021
n04554684
n02328150
n03476684
n02114367
n03793489
n03633091
n03930630
n02871525
n02097474
n02113799
n02408429
n03899768
n07831146
n04525038
n02808304
n03724870
n02033041
n02110063
n03063689
n01855672
n02395406
n04254680
n03063689
n02487347
n02640242
n03457902
n12267677
n04482393
n04009552
n02174001
n01990800
n04209133
n01950731
n02113186
n03095699
n01770081
n04127249
n02971356
n02490219
n04044716
n01667778
n03710721
n03141823
n04099969
n02325366
n04599235
n01978455
n03599486
n02090622
n03630383
n02117135
n02037110
n02219486
n03297495
n02105505
n04263257
n02442845
n04266014
n03393912
n02115641
n02883205
n01729977
n03047690
n02361337
n04560804
n02106662
n03876231
n03041632
n02098105
n01560419
n02089078
n03218198
n04153751
n02123597
n03584829
n02930766
n03781244
n02264363
n07711569
n04418357
n06596364
n03345487
n02835271
n04467665
n03450230
n03692522
n03929660
n03935335
n01630670
n02120505
n02172182
n03777754
n04209133
n01687978
n03481172
n02088094
n02112350
n03982430
n02124075
n03854065
n04141076
n06785654
n02981792
n03207941
n03028079
n13133613
n02423022
n03777568
n02328150
n02037110
n02092002
n02655020
n04443257
n02963159
n01687978
n09193705
n10148035
n03065424
n03792972
n02013706
n01494475
n07860988
n02099267
n04355933
n02457408
n01943899
n03733131
n04252077
n02978881
n03868863
n03544143
n03692522
n12768682
n02088094
n04023962
n02793495
n03840681
n01773549
n03843555
n04482393
n07753592
n03673027
n07930864
n01685808
n02037110
n02787622
n06596364
n02033041
n04204238
n12267677
n02321529
n03404251
n03000684
n07753592
n03804744
n01514668
n03594945
n02110627
n03793489
n04243546
n02490219
n02817516
n03291819
n02100877
n01440764
n04209239
n02088364
n04590129
n02110806
n09229709
n02447366
n04606251
n04562935
n02128385
n02837789
n02363005
n04133789
n02165456
n03649909
n03661043
n02107683
n01688243
n01843383
n03891251
n12620546
n03832673
n03452741
n04074963
n04228054
n03982430
n01795545
n02877765
n03196217
n04435653
n02105505
n04467665
n07695742
n02672831
n03690938
n04456115
n04125021
n15075141
n03761084
n04487394
n02108089
n07932039
n01806567
n02089078
n02028035
n03623198
n02108551
n01632458
n03445924
n01739381
n03887697
n07836838
n02364673
n03355925
n02113799
n04476259
n02437312
n03534580
n03841143
n03131574
n07697537
n01818515
n03929660
n02093647
n02892767
n03916031
n04081281
n04443257
n02441942
n01534433
n01843383
n02951358
n02089078
n03874293
n03127925
n02094258
n04366367
n03485407
n04597913
n01755581
n01795545
n01601694
n01944390
n03124170
n02395406
n03594734
n01685808
n01582220
n02110627
n03991062
n02699494
n09472597
n02500267
n03476991
n02963159
n02089867
n01697457
n03347037
n01806143
n02074367
n02699494
n04090263
n03763968
n02422699
n04070727
n01694178
n01797886
n03459775
n03977966
n01751748
n03803284
n01950731
n01532829
n02454379
n02051845
n03976657
n07248320
n07753275
n09332890
n02002556
n03602883
n12057211
n02123045
n02950826
n02219486
n02115641
n02085936
n02951585
n02111889
n02102480
n01443537
n02105162
n02794156
n04479046
n03047690
n02105412
n02692877
n01739381
n07930864
n04552348
n02835271
n01531178
n04120489
n01582220
n02840245
n02422106
n01697457
n03075370
n04136333
n03874599
n03492542
n02389026
n03207743
n02089867
n04136333
n06359193
n02106382
n02101006
n02091467
n03325584
n01616318
n02804610
n07717556
n02111500
n01608432
n02007558
n03887697
n02107142
n02641379
n07734744
n03710193
n02231487
n02028035
n04296562
n04009552
n02977058
n03710721
n03884397
n03775546
n07892512
n04254777
n07697537
n03792782
n02102480
n03000247
n02117135
n01796340
n02892201
n04254680
n04040759
n01773549
n04040759
n03124170
n02790996
n04037443
n02033041
n04509417
n01484850
n03697007
n04208210
n04209133
n02497673
n03840681
n03785016
n04086273
n02085936
n02134084
n03404251
n02098286
n07734744
n03998194
n02086910
n03250847
n03983396
n04336792
n03457902
n03026506
n03980874
n01818515
n04507155
n03933933
n13037406
n04235860
n02504013
n03297495
n02802426
n01491361
n02916936
n01755581
n02727426
n04228054
n03584254
n04317175
n01667114
n04486054
n02110341
n04465501
n02974003
n12768682
n12998815
n02111129
n11879895
n03775546
n03496892
n03791053
n01768244
n09421951
n04192698
n04517823
n02514041
n12985857
n13054560
n04330267
n03388549
n04254120
n04423845
n11879895
n02776631
n02137549
n03495258
n03355925
n02486410
n02749479
n03187595
n03388043
n04005630
n02100877
n07714990
n06359193
n02096051
n02105641
n07579787
n09472597
n04355338
n03680355
n02730930
n03874599
n02730930
n04552348
n03535780
n01753488
n02012849
n01704323
n02097209
n03908714
n04589890
n04372370
n01443537
n03457902
n04238763
n09246464
n01739381
n02488702
n04026417
n01530575
n07749582
n02102480
n04557648
n02096585
n01740131
n04389033
n03314780
n07875152
n02492660
n12057211
n04371430
n02099267
n03495258
n02096051
n02105162
n02105641
n03016953
n02808440
n03598930
n04542943
n01855672
n03733281
n07717410
n02504013
n02091831
n04133789
n04356056
n02879718
n03891251
n03379051
n02113978
n09288635
n02444819
n01945685
n03980874
n02526121
n02101556
n04040759
n02009229
n03837869
n04311174
n07583066
n02777292
n03950228
n02129165
n02114548
n02100735
n04590129
n03400231
n03868242
n02074367
n06874185
n04141327
n01833805
n09288635
n04070727
n02795169
n03944341
n01560419
n03187595
n02092339
n03388043
n03255030
n04532670
n02120505
n02894605
n02101388
n01608432
n03995372
n02259212
n03908618
n03223299
n02107683
n07932039
n03063689
n01629819
n03982430
n03188531
n01748264
n03877472
n02115913
n01748264
n04350905
n04070727
n02643566
n02966193
n01770393
n02672831
n02494079
n02930766
n03259280
n02442845
n03903868
n03710721
n02690373
n01531178
n01496331
n03710721
n02088094
n07717556
n03920288
n02089078
n02109525
n02808304
n03447447
n04548280
n02906734
n07716358
n01774384
n03637318
n02909870
n03788195
n02699494
n04355338
n02095889
n02606052
n03623198
n01641577
n01669191
n02457408
n03627232
n02769748
n04311004
n03584254
n03220513
n03530642
n04285008
n01644373
n09421951
n03733281
n03047690
n02808304
n03720891
n02437616
n07684084
n01749939
n04409515
n02494079
n02948072
n02110806
n02077923
n01924916
n01496331
n04604644
n02667093
n02107142
n01692333
n04277352
n04254777
n02676566
n12144580
n03630383
n02095889
n03666591
n03937543
n01498041
n03272562
n09472597
n03223299
n04456115
n02099601
n03000134
n02951585
n03717622
n01910747
n06596364
n01820546
n02018795
n04264628
n02096177
n01944390
n01978287
n01818515
n03125729
n02093256
n01855032
n02009912
n02097047
n02113712
n01883070
n01774750
n01665541
n02093428
n01980166
n04392985
n03947888
n02690373
n02090721
n04023962
n03476684
n04389033
n03729826
n02910353
n01632458
n02167151
n02676566
n03045698
n01770081
n04238763
n10148035
n04344873
n02481823
n04467665
n02013706
n02088238
n02877765
n01833805
n07718747
n02091467
n03627232
n04141076
n04209239
n01950731
n04467665
n03976657
n03729826
n04398044
n07754684
n04465501
n01776313
n02111129
n03207743
n03201208
n01847000
n02085936
n03710721
n04599235
n02817516
n02807133
n04389033
n02840245
n04423845
n07718472
n02356798
n02167151
n02966687
n02790996
n02840245
n02342885
n02437312
n07716906
n02233338
n03379051
n01990800
n02443114
n01498041
n03337140
n02165105
n04525305
n02226429
n01558993
n02110341
n04069434
n01644900
n02096177
n04347754
n03127747
n02106382
n01608432
n02412080
n02134084
n04486054
n04026417
n02437616
n04081281
n04417672
n02018207
n03018349
n03595614
n02120079
n03388183
n03902125
n02403003
n03933933
n09193705
n01872401
n03534580
n02129165
n03710193
n01981276
n02259212
n07873807
n01843065
n02457408
n02837789
n02177972
n02951585
n02101006
n02965783
n04482393
n01616318
n04465501
n03485407
n02086646
n02085620
n02361337
n01753488
n04579145
n01682714
n02105641
n04065272
n01968897
n02102973
n12144580
n04372370
n02127052
n02690373
n02895154
n04049303
n03676483
n02268443
n02869837
n02206856
n04201297
n02091244
n02101556
n02843684
n04380533
n07753275
n01534433
n02027492
n02971356
n04118538
n03384352
n03444034
n03676483
n03495258
n02666196
n01756291
n03482405
n02098413
n04355933
n03841143
n02120079
n02417914
n03857828
n02114712
n01729977
n01770081
n03733131
n03793489
n03590841
n02088364
n01847000
n11939491
n03724870
n02025239
n07717556
n02119789
n03016953
n02129165
n04033901
n02790996
n02012849
n02099429
n03691459
n04330267
n10148035
n03888257
n07584110
n02096437
n04515003
n02804610
n02096437
n04418357
n02033041
n02092339
n12620546
n01669191
n03160309
n02112137
n02172182
n03110669
n04380533
n03673027
n03347037
n04201297
n02492660
n02110958
n02783161
n02483708
n02110958
n04120489
n03908618
n02423022
n04350905
n04153751
n02444819
n02114548
n07747607
n07614500
n04070727
n04074963
n01616318
n02112706
n02096437
n04228054
n01644900
n01756291
n02442845
n03980874
n02441942
n04149813
n03950228
n01843383
n02910353
n03207743
n04263257
n02099429
n04486054
n02606052
n04238763
n02099601
n02177972
n03584829
n04356056
n03673027
n02086646
n04485082
n02692877
n03761084
n03249569
n04252077
n02092339
n01770081
n02877765
n02129604
n03032252
n13044778
n02607072
n03498962
n02120505
n01534433
n01491361
n07730033
n02098413
n02793495
n02017213
n02100877
n02948072
n02398521
n03498962
n02494079
n04026417
n03259280
n04209133
n02094258
n02028035
n03627232
n03529860
n02077923
n03843555
n03873416
n02116738
n03995372
n02104365
n04347754
n04590129
n03657121
n01774384
n03937543
n07836838
n04127249
n02391049
n04296562
n02492035
n04254120
n04201297
n02115641
n02094258
n03729826
n02090379
n02165456
n02107142
n01518878
n03649909
n01558993
n01843383
n01695060
n02134084
n02101556
n02123045
n03929855
n02110185
n03291819
n02099601
n04443257
n02487347
n01795545
n04458633
n02229544
n03325584
n04086273
n03017168
n01729977
n03388043
n01675722
n02009229
n03126707
n02117135
n03873416
n04332243
n02486410
n03394916
n02480855
n02837789
n03018349
n03998194
n04317175
n01819313
n03291819
n01664065
n02128385
n02417914
n04040759
n01440764
n09468604
n03240683
n07248320
n11939491
n02971356
n02096437
n02101556
n04467665
n03983396
n04146614
n04252077
n03476684
n02777292
n03617480
n04004767
n02102177
n02088632
n07749582
n04264628
n04487081
n02808440
n04399382
n03961711
n04229816
n03977966
n03133878
n03877845
n03995372
n04131690
n02093754
n02110806
n01872401
n02106662
n07836838
n04553703
n02095314
n12620546
n02231487
n02277742
n04456115
n02643566
n02317335
n04008634
n04476259
n04550184
n02107908
n02125311
n03355925
n03769881
n07615774
n02443114
n02167151
n04590129
n12620546
n02177972
n03866082
n07718472
n02102318
n07697313
n03384352
n04330267
n03874293
n03895866
n02444819
n03908714
n02395406
n04355933
n03220513
n04147183
n02099267
n01983481
n01770081
n02095570
n01695060
n02115641
n04355338
n07584110
n02843684
n04023962
n02102480
n04116512
n02094258
n04326547
n02951358
n01784675
n03494278
n03935335
n02106662
n02256656
n03944341
n02105641
n02666196
n03982430
n02814533
n04204238
n07730033
n01807496
n03042490
n02963159
n02504458
n03535780
n04355933
n02009229
n02423022
n01582220
n07614500
n02321529
n03272562
n03642806
n04251144
n02115913
n02107312
n03924679
n02699494
n03908714
n04522168
n09246464
n03617480
n02231487
n02127052
n04335435
n02804610
n02437616
n03249569
n01682714
n02790996
n03742115
n02112350
n02837789
n04371774
n03443371
n02992529
n01688243
n03733281
n07875152
n02105641
n02110958
n02018795
n04482393
n03063689
n02328150
n02109525
n02071294
n02808304
n03530642
n03970156
n01860187
n02102973
n03220513
n03032252
n01797886
n03792782
n02085936
n04487394
n02790996
n01773157
n04367480
n03290653
n03478589
n04542943
n07579787
n02190166
n06785654
n02002724
n01740131
n04033995
n01978287
n02011460
n03937543
n02096437
n01534433
n02978881
n03445924
n07716358
n02093428
n01776313
n02704792
n01687978
n04550184
n02102973
n02165456
n03347037
n01755581
n02111889
n03967562
n01491361
n02437616
n02089078
n02123597
n04507155
n03110669
n03868242
n03874599
n02120505
n03930313
n02165105
n04604644
n03445777
n02099712
n02009229
n04389033
n04371774
n02437616
n04243546
n03794056
n03775071
n04479046
n03796401
n02892767
n03929660
n02133161
n03944341
n03884397
n04589890
n03590841
n02071294
n04263257
n01768244
n02410509
n04465501
n02098286
n02747177
n02105162
n01667114
n02999410
n01560419
n07749582
n01968897
n02130308
n02110806
n02106382
n07590611
n07697537
n04591157
n04462240
n02988304
n03126707
n02727426
n04127249
n02843684
n03179701
n02443484
n04344873
n02280649
n03216828
n12985857
n04548280
n03602883
n03447721
n01694178
n02415577
n02699494
n03085013
n02895154
n04371774
n03495258
n03791053
n02641379
n02980441
n02950826
n02110063
n03788195
n01693334
n02606052
n07742313
n02113624
n03874293
n04209239
n03388043
n02927161
n03944341
n04579432
n03759954
n02101388
n01978287
n03443371
n02129604
n01693334
n07742313
n01770393
n06785654
n03126707
n02058221
n03721384
n02093647
n07684084
n03775546
n03494278
n03131574
n02823428
n02111889
n04208210
n02190166
n04228054
n03888257
n02169497
n01770081
n02974003
n03637318
n02089078
n02117135
n02457408
n02606052
n03877845
n02776631
n01882714
n03325584
n02095314
n02102973
n02236044
n02090622
n02797295
n01775062
n02098286
n03498962
n02128385
n02783161
n07768694
n03337140
n01751748
n04447861
n02172182
n03743016
n03599486
n04380533
n07892512
n03598930
n02085782
n01685808
n02879718
n01491361
n04273569
n02441942
n04553703
n03649909
n03141823
n02115641
n04372370
n04265275
n04493381
n06596364
n02825657
n02480495
n02097298
n03532672
n01531178
n03843555
n03770679
n02346627
n02127052
n03297495
n02869837
n02106166
n01440764
n02510455
n02095570
n02177972
n03347037
n01978455
n02488702
n02791124
n04229816
n01675722
n03630383
n01930112
n04005630
n04039381
n03950228
n04592741
n01914609
n02129165
n01871265
n03902125
n01689811
n03534580
n01945685
n01773549
n02089867
n03788195
n02788148
n02113023
n03534580
n04592741
n02797295
n03017168
n04355933
n02097209
n02167151
n04026417
n03271574
n02105251
n04004767
n02108000
n04350905
n02106662
n03201208
n03126707
n01443537
n02837789
n02165456
n03796401
n02870880
n02641379
n01622779
n02113023
n07880968
n02165456
n03840681
n03372029
n04044716
n03840681
n03692522
n03992509
n02085620
n03530642
n02113186
n02086079
n07614500
n09468604
n03602883
n09468604
n04270147
n04146614
n02892201
n03958227
n03832673
n02268443
n02236044
n01494475
n02009912
n01532829
n02093754
n03404251
n03770439
n07734744
n04252077
n07714571
n02120079
n01665541
n02123394
n03240683
n04264628
n02457408
n07614500
n02124075
n03425413
n03133878
n07930864
n03160309
n02484975
n02086240
n02978881
n04404412
n02643566
n02494079
n02749479
n02114855
n02106166
n02114712
n03662601
n07583066
n02396427
n02108089
n04335435
n03017168
n02113186
n04493381
n02909870
n03075370
n03627232
n03794056
n01734418
n02951358
n02457408
n02883205
n02917067
n03250847
n02804610
n02110958
n02088364
n03891251
n02641379
n02098105
n02113624
n02027492
n02066245
n02168699
n06359193
n03627232
n09229709
n02749479
n04355338
n04252225
n02939185
n01632777
n02395406
n02219486
n02988304
n01518878
n03891332
n02114548
n02892767
n01491361
n03933933
n02795169
n09472597
n07579787
n03032252
n02093754
n13054560
n03891251
n02105505
n02132136
n07873807
n02640242
n04461696
n04613696
n09468604
n02113186
n02493509
n04553703
n01968897
n04296562
n03467068
n03763968
n04209239
n02219486
n03888257
n01871265
n03325584
n03272562
n03854065
n01558993
n03670208
n01665541
n03325584
n01695060
n02457408
n02797295
n02950826
n02099429
n03291819
n02939185
n03976467
n02120079
n02879718
n04579145
n04120489
n01632458
n02009912
n04328186
n06874185
n02398521
n02488291
n02107312
n03026506
n02119022
n01843383
n03657121
n03062245
n07584110
n02091032
n03476991
n02013706
n02607072
n02113712
n03788365
n04355338
n04428191
n04442312
n01753488
n12620546
n03417042
n02108089
n07871810
n03930313
n04019541
n04074963
n02408429
n02817516
n01955084
n02747177
n09472597
n03866082
n02099267
n03782006
n03998194
n02823428
n04487081
n03956157
n03854065
n02002556
n01440764
n02093256
n02229544
n02109047
n03160309
n02825657
n02423022
n03016953
n04179913
n01860187
n02107574
n06359193
n02088094
n04065272
n02088632
n02130308
n03769881
n02966193
n06794110
n07590611
n03924679
n04153751
n02112706
n02509815
n04335435
n04579432
n02815834
n02361337
n02123159
n03133878
n02457408
n02092002
n04347754
n03775071
n03498962
n02101388
n03447447
n02443114
n04039381
n02791124
n02104365
n01776313
n04442312
n03584254
n02094258
n02086646
n04370456
n01797886
n03724870
n01775062
n02687172
n02091244
n03124043
n01632777
n02787622
n01930112
n01664065
n01734418
n02110063
n01818515
n04336792
n03793489
n02097298
n02017213
n04273569
n03485794
n02002724
n04507155
n11879895
n02087046
n02486410
n04033995
n03345487
n03692522
n04347754
n01986214
n03873416
n03483316
n02101556
n03425413
n03000684
n02114367
n02113712
n03535780
n02454379
n03788195
n02086240
n02095889
n02422699
n03400231
n03690938
n01494475
n02099601
n04612504
n07753275
n03814639
n02165105
n03314780
n03478589
n01796340
n02105641
n01847000
n01877812
n02447366
n03929660
n02992529
n02088094
n07745940
n04522168
n04069434
n12620546
n03673027
n03998194
n03028079
n04252225
n02033041
n01843065
n07720875
n02099712
n02939185
n02098413
n04296562
n03796401
n01729977
n02859443
n02105251
n02860847
n04209133
n02108000
n04235860
n02782093
n02814533
n01614925
n01484850
n01669191
n04525305
n07716906
n02119022
n03721384
n02259212
n03976657
n02415577
n04392985
n04023962
n02793495
n04592741
n02233338
n02777292
n01514859
n03127747
n04548362
n03947888
n03792782
n03445777
n04592741
n02165105
n02105056
n04525038
n02395406
n02129604
n09399592
n09229709
n06785654
n03045698
n04380533
n02835271
n07715103
n03692522
n02950826
n02259212
n03773504
n04560804
n04355933
n02167151
n01695060
n02091635
n07745940
n03958227
n03642806
n01537544
n03733131
n02028035
n02667093
n03617480
n02443484
n04532106
n06874185
n02730930
n01632458
n04067472
n09246464
n02264363
n09229709
n02708093
n03804744
n03042490
n03347037
n02120079
n02098105
n02092339
n03017168
n02099429
n03160309
n12267677
n03642806
n07579787
n02817516
n01770393
n01667114
n04417672
n04515003
n02091134
n02090721
n04428191
n02086646
n04536866
n03000684
n01692333
n04591157
n03967562
n03743016
n04579145
n02110063
n04040759
n02074367
n03100240
n04552348
n02916936
n03485407
n02489166
n03271574
n01677366
n02457408
n02966193
n04152593
n01491361
n01748264
n03530642
n03840681
n01768244
n02226429
n03642806
n02002556
n03598930
n01631663
n03787032
n03954731
n04462240
n03680355
n02013706
n03271574
n04357314
n02397096
n01697457
n02441942
n03661043
n01985128
n03658185
n02099267
n04522168
n13037406
n02108422
n04111531
n01728920
n02085620
n01644373
n02101388
n02795169
n02100877
n04509417
n02088466
n02769748
n02965783
n03649909
n03179701
n01742172
n01877812
n03769881
n03000247
n02106662
n03888605
n03937543
n04346328
n03976467
n03187595
n15075141
n03062245
n03710721
n04009552
n02447366
n02107574
n03970156
n03991062
n02098413
n07892512
n03529860
n03935335
n01531178
n02835271
n03787032
n02101388
n02085620
n02701002
n11939491
n01698640
n02233338
n11879895
n02101556
n07753592
n02441942
n07871810
n01914609
n02132136
n02097658
n07720875
n02259212
n01560419
n02510455
n04200800
n04254777
n01616318
n04522168
n02100236
n04356056
n07615774
n03160309
n02666196
n02169497
n03207941
n07831146
n04131690
n04136333
n02895154
n02002556
n04311174
n04243546
n13052670
n02895154
n03527444
n02090622
n04429376
n01667778
n01871265
n01608432
n03424325
n02111129
n02094114
n03706229
n02883205
n07590611
n02948072
n01770393
n03290653
n02128925
n02110185
n02110341
n01796340
n02342885
n02487347
n04310018
n02091635
n02708093
n03016953
n02264363
n04372370
n03272562
n02089078
n03764736
n02963159
n03874599
n02641379
n01984695
n02802426
n02346627
n03773504
n04273569
n02111889
n03498962
n03141823
n04350905
n02095314
n04335435
n03388183
n01537544
n03947888
n02106662
n03854065
n01484850
n02086079
n07714571
n01768244
n04070727
n03494278
n03584829
n03837869
n01945685
n03733281
n04429376
n02099601
n04554684
n04509417
n01943899
n07565083
n04515003
n03777754
n03594734
n03777568
n03840681
n02536864
n04442312
n03127747
n03445777
n04579432
n03063599
n02113978
n03787032
n01742172
n02487347
n04486054
n02093859
n04162706
n02328150
n03482405
n04517823
n07615774
n04192698
n02808304
n02037110
n04254120
n02490219
n07684084
n02094258
n02814533
n02174001
n07753275
n04033901
n02481823
n03770679
n03134739
n01560419
n04275548
n01667778
n01737021
n01806567
n04456115
n07613480
n01737021
n03761084
n07753592
n04461696
n04336792
n02137549
n02100735
n04005630
n02112706
n12144580
n03785016
n03372029
n04486054
n02117135
n01667778
n02927161
n07760859
n03924679
n04040759
n07742313
n02106030
n03388549
n03950228
n01768244
n07734744
n04479046
n02791124
n01807496
n04357314
n01484850
n03888605
n04277352
n04326547
n03876231
n07584110
n02092002
n01667778
n01682714
n02091831
n02108089
n02951585
n02219486
n02090379
n01950731
n02089867
n01828970
n03837869
n01978287
n02092002
n02814533
n01664065
n12768682
n07930864
n04357314
n02802426
n02089867
n03063689
n03535780
n04591713
n03796401
n02877765
n02823428
n07717410
n04612504
n03642806
n04033995
n02095889
n04074963
n01855032
n04270147
n03110669
n03255030
n03530642
n10148035
n07745940
n02490219
n02074367
n02097130
n02106662
n03891332
n02089973
n04209239
n04548280
n04154565
n02037110
n02113978
n02115913
n02018795
n02823428
n02091032
n03874293
n04146614
n04560804
n04522168
n07717556
n04311004
n02105855
n02109961
n02134084
n02930766
n01855032
n02480495
n02509815
n02100877
n02795169
n02125311
n01734418
n03124043
n02165105
n02840245
n03759954
n01622779
n02442845
n04328186
n04152593
n04554684
n02965783
n02510455
n03445777
n07615774
n12998815
n07717410
n03742115
n04264628
n02165456
n04074963
n02098105
n02132136
n01872401
n02441942
n04560804
n02422699
n02802426
n07768694
n01518878
n02096051
n02786058
n02483708
n02099601
n04435653
n01630670
n02177972
n13052670
n02028035
n01978455
n13054560
n02165105
n04317175
n01739381
n02168699
n02483362
n02342885
n02007558
n01798484
n04579145
n02361337
n02643566
n04147183
n04208210
n01798484
n02488291
n03773504
n03662601
n02483708
n01986214
n04005630
n02165105
n02009229
n03814639
n04462240
n02090379
n03786901
n01734418
n01770081
n02814533
n03445777
n03196217
n02747177
n02493793
n03970156
n02165105
n03930313
n02169497
n04204347
n02113712
n02979186
n02085782
n04265275
n01694178
n09229709
n04317175
n07760859
n02865351
n03841143
n01601694
n02128925
n03908714
n01775062
n01770393
n02877765
n03902125
n01744401
n02094114
n03271574
n04372370
n07697313
n04229816
n02692877
n01537544
n04153751
n02490219
n09193705
n02951585
n01986214
n02865351
n02105855
n04392985
n03825788
n04265275
n12267677
n03787032
n02088632
n04507155
n03481172
n03868242
n02797295
n02500267
n02480855
n03956157
n02948072
n03792782
n03478589
n04590129
n01729322
n02105056
n02837789
n03393912
n02319095
n02100735
n02093256
n03782006
n03388043
n03891251
n02391049
n02167151
n03045698
n01534433
n04067472
n02105641
n04423845
n01983481
n03160309
n02802426
n09428293
n02106382
n04325704
n02444819
n01755581
n02895154
n02129604
n02910353
n07873807
n07716358
n03325584
n02104029
n01883070
n02408429
n02992529
n02111277
n04141327
n02098105
n12998815
n04133789
n02837789
n02321529
n04041544
n03131574
n01968897
n03721384
n09428293
n03637318
n04536866
n01641577
n01828970
n02794156
n02105855
n02825657
n02100735
n02487347
n02281406
n04550184
n02804414
n03594734
n01806143
n09256479
n04204238
n03544143
n04350905
n04380533
n03459775
n04509417
n02480495
n04204347
n03967562
n03666591
n03481172
n03179701
n01728920
n09835506
n02509815
n11939491
n02125311
n01774750
n01924916
n04380533
n03496892
n02510455
n02808304
n04328186
n04009552
n02105505
n02454379
n04507155
n01592084
n04118538
n01644373
n02965783
n03742115
n07715103
n03733281
n02268853
n03967562
n02107574
n04597913
n01798484
n04562935
n04584207
n07717556
n02110958
n04597913
n07693725
n02086910
n04136333
n01843383
n02794156
n02101556
n04192698
n02389026
n03250847
n01817953
n01682714
n01491361
n06874185
n02093647
n02483362
n04435653
n01667778
n04548280
n03133878
n02840245
n01950731
n04229816
n01817953
n04346328
n07871810
n04493381
n03476684
n01882714
n03100240
n02105505
n03623198
n02128925
n07749582
n03124170
n03042490
n01531178
n03180011
n02276258
n03538406
n01843383
n01833805
n02109047
n01735189
n01514859
n02396427
n01537544
n07920052
n02077923
n03661043
n03445924
n01514859
n04418357
n01630670
n02256656
n02980441
n01985128
n03787032
n09399592
n02096177
n03095699
n02791270
n02002556
n02099429
n02687172
n04487081
n03775071
n04120489
n02100877
n04131690
n02111277
n04008634
n03796401
n03690938
n03496892
n02487347
n02098286
n04398044
n02281787
n02641379
n03179701
n03110669
n03314780
n03388549
n02441942
n02091831
n03933933
n07584110
n02510455
n02437312
n02417914
n02110806
n02667093
n03384352
n03529860
n04209239
n04254120
n04310018
n07615774
n01984695
n03188531
n02701002
n01749939
n03494278
n04317175
n02480855
n04553703
n04591713
n02093991
n03496892
n03498962
n02870880
n07734744
n02090622
n02095889
n03089624
n03814906
n01443537
n03775546
n03895866
n04254680
n02093991
n02094433
n03709823
n04133789
n04356056
n09421951
n03781244
n03970156
n03709823
n03873416
n03950228
n03425413
n09229709
n03141823
n03290653
n01675722
n04259630
n04613696
n03838899
n01443537
n03617480
n02112350
n01774384
n02108915
n03876231
n02099429
n02226429
n01770393
n01694178
n06794110
n03220513
n11879895
n03124043
n02105855
n02486410
n04004767
n09835506
n07745940
n02097047
n03721384
n03133878
n02093647
n06794110
n04317175
n02134418
n02692877
n02128757
n03794056
n02727426
n01484850
n02514041
n02106382
n02097298
n04613696
n02701002
n03770439
n01855672
n02328150
n03944341
n09468604
n02281787
n04554684
n02098105
n03179701
n02174001
n02109961
n03742115
n04562935
n03729826
n04133789
n04086273
n01514859
n04597913
n04476259
n01914609
n02095889
n03125729
n04366367
n02443114
n02098413
n03599486
n01614925
n04483307
n02105412
n01631663
n02500267
n02095889
n04264628
n07753592
n02123597
n03884397
n04579432
n03938244
n07831146
n02101006
n02092002
n02006656
n02106166
n04596742
n03770679
n04149813
n04599235
n04332243
n03379051
n01776313
n01806567
n09468604
n04554684
n02747177
n04243546
n03838899
n01855032
n01917289
n02226429
n03706229
n03843555
n07615774
n02268853
n04141975
n01728920
n01531178
n03838899
n09472597
n01847000
n13133613
n04522168
n02088466
n09193705
n03445924
n02092002
n02640242
n07742313
n04612504
n01986214
n09229709
n02488291
n02643566
n03891251
n09468604
n01983481
n07920052
n03770679
n02097130
n03769881
n03498962
n07697537
n02422699
n04254777
n03452741
n04152593
n01616318
n02259212
n03690938
n04501370
n04355933
n01498041
n04023962
n02488702
n04443257
n02091134
n02978881
n02091244
n01756291
n04120489
n04141327
n02504458
n01667778
n02108089
n03843555
n02951358
n01807496
n02102318
n07745940
n06794110
n02363005
n07753113
n01644900
n02363005
n01484850
n02105056
n02107312
n03482405
n01945685
n02823750
n02090622
n03710193
n03379051
n07873807
n04263257
n03062245
n02088632
n04208210
n04141327
n07932039
n02951358
n02790996
n02777292
n02804414
n03970156
n04501370
n02641379
n01774750
n01498041
n04116512
n02233338
n03706229
n02097047
n07697537
n02444819
n04153751
n02398521
n03908714
n02088632
n02113712
n02132136
n04258138
n03425413
n02397096
n02443484
n06785654
n04367480
n03717622
n03721384
n02981792
n01955084
n02090721
n02879718
n02113712
n02417914
n02093859
n02009912
n02006656
n01770393
n02701002
n01818515
n12998815
n03532672
n03666591
n06794110
n03110669
n03220513
n03976467
n02396427
n03888257
n02514041
n02837789
n07711569
n07613480
n03075370
n07684084
n02708093
n02099267
n03131574
n01843383
n02091032
n03796401
n04243546
n04389033
n03014705
n03868863
n01883070
n01744401
n12267677
n03876231
n01847000
n02219486
n01955084
n03089624
n04350905
n02119022
n04004767
n02793495
n03404251
n03014705
n01677366
n03690938
n04162706
n04552348
n01985128
n07873807
n02526121
n07932039
n02102973
n02108000
n04493381
n02097130
n04086273
n03832673
n02088364
n02119789
n02113712
n07716906
n03792972
n02097658
n02226429
n09428293
n02116738
n07753113
n02777292
n02017213
n04209239
n02077923
n02509815
n07716906
n02843684
n02417914
n07920052
n09288635
n01980166
n09193705
n03124043
n03944341
n02219486
n02127052
n04147183
n02106550
n04550184
n01728572
n02102480
n04371430
n03983396
n02815834
n04264628
n04356056
n02096294
n02106382
n07579787
n02536864
n03630383
n02114367
n03781244
n03271574
n01739381
n04008634
n03594734
n03201208
n02058221
n02134418
n10148035
n01631663
n02526121
n02002556
n02095314
n02098105
n04509417
n04612504
n02497673
n01580077
n01697457
n03109150
n09468604
n03874293
n02109961
n02110627
n02892201
n02088364
n03100240
n03532672
n02892767
n07860988
n03337140
n02951358
n03691459
n03134739
n02422106
n02788148
n03814906
n02444819
n06785654
n04612504
n02123394
n03042490
n04116512
n03527444
n09288635
n01983481
n09332890
n07715103
n01828970
n04037443
n03089624
n02504458
n01917289
n03223299
n02119022
n02206856
n04252077
n02012849
n02037110
n01751748
n07930864
n04131690
n07697313
n02841315
n03950228
n04254680
n04141975
n03983396
n02124075
n12998815
n03709823
n01689811
n02966687
n03590841
n02002556
n01770393
n04532106
n02109961
n04286575
n02910353
n03785016
n04125021
n04370456
n02115641
n03874293
n13054560
n02480855
n02105855
n01773157
n02108915
n02108000
n03764736
n02231487
n04507155
n01744401
n04325704
n02526121
n04371774
n01582220
n02088094
n12267677
n07880968
n04266014
n02417914
n04270147
n07684084
n01443537
n03866082
n04179913
n02422106
n07697537
n02687172
n03803284
n01692333
n04192698
n02481823
n02115913
n03404251
n02138441
n02999410
n03388183
n02317335
n03759954
n04335435
n03814906
n03692522
n13052670
n03729826
n02790996
n02012849
n03935335
n01667114
n07836838
n01580077
n07615774
n03535780
n02226429
n03903868
n02999410
n03532672
n03498962
n01531178
n03868242
n02128757
n03793489
n01755581
n09332890
n02087394
n03920288
n02128385
n03495258
n02114712
n03976467
n04259630
n02794156
n01774384
n02091467
n04467665
n02091635
n04579432
n03599486
n02328150
n04147183
n02486410
n04252077
n02395406
n07584110
n03075370
n02138441
n02105505
n04311004
n04086273
n04435653
n04467665
n04201297
n01689811
n03345487
n02090379
n02776631
n04023962
n02114367
n13044778
n02917067
n07711569
n03452741
n01734418
n03272010
n01744401
n09399592
n02114855
n03594734
n02860847
n04141076
n02133161
n03804744
n01924916
n04532106
n01770081
n02096177
n02797295
n03188531
n04204347
n03063689
n02841315
n02276258
n02086646
n03775071
n03947888
n02137549
n03063599
n02074367
n02051845
n03832673
n03982430
n01776313
n02102177
n02106550
n03929855
n04201297
n01592084
n02906734
n03124043
n03598930
n07590611
n02091635
n02128757
n04204347
n01698640
n01955084
n03891251
n02823428
n03417042
n03666591
n03958227
n03895866
n02690373
n01667778
n02692877
n03532672
n07920052
n03924679
n03085013
n07697313
n02444819
n02992211
n07248320
n02950826
n02077923
n03786901
n03016953
n02111889
n02892201
n02786058
n02106382
n02877765
n02687172
n02747177
n02105412
n07753113
n03207743
n04418357
n02009912
n01580077
n01616318
n04273569
n01945685
n03706229
n04326547
n02105056
n13037406
n03459775
n02526121
n02837789
n04346328
n01819313
n02321529
n03916031
n03026506
n02105251
n04599235
n01518878
n02110627
n01984695
n01943899
n04069434
n02113023
n01531178
n03947888
n03733805
n03873416
n02087394
n04273569
n03690938
n02281787
n04515003
n01630670
n03445924
n04317175
n02395406
n02018207
n02128385
n03255030
n02169497
n03717622
n03602883
n02488291
n01622779
n03992509
n02877765
n03873416
n01855672
n03478589
n03404251
n07584110
n03980874
n03476684
n02138441
n02977058
n02105162
n03485407
n01616318
n02051845
n03793489
n01768244
n04209239
n03930630
n04532106
n03259280
n02841315
n02966193
n03980874
n04532106
n02981792
n01776313
n04355338
n02110341
n03697007
n02454379
n02655020
n03841143
n07584110
n02123394
n03255030
n07711569
n03724870
n03110669
n03133878
n01641577
n01644373
n04049303
n07768694
n03075370
n02823428
n02640242
n02104365
n04009552
n02129604
n03733805
n02281787
n04208210
n04067472
n01514859
n03384352
n03544143
n03355925
n01694178
n03950228
n07717556
n02317335
n02113799
n07583066
n02999410
n07760859
n02410509
n02013706
n04285008
n04296562
n03196217
n03000134
n02110627
n04442312
n02787622
n02443484
n02137549
n03337140
n03594734
n02879718
n02415577
n02092339
n03450230
n02102040
n07747607
n03085013
n03026506
n06874185
n02493793
n03532672
n01644900
n03792782
n04004767
n02966193
n01784675
n13037406
n03481172
n03775546
n04033995
n02101556
n03666591
n04317175
n01882714
n02640242
n03063689
n04560804
n01860187
n04376876
n04523525
n01833805
n02169497
n03314780
n02988304
n02168699
n04044716
n02109961
n01770393
n01531178
n04152593
n02106662
n04389033
n01735189
n07871810
n04277352
n02077923
n03347037
n02111500
n02088238
n03534580
n03314780
n02791270
n04548280
n03109150
n03944341
n02137549
n04523525
n04592741
n04266014
n01978455
n02091032
n04398044
n02113624
n02408429
n04417672
n04009552
n02231487
n04599235
n07248320
n04086273
n04606251
n03532672
n02112137
n09256479
n04523525
n01697457
n03662601
n04070727
n02098286
n02017213
n02177972
n01689811
n03697007
n03874599
n02110185
n04417672
n04310018
n02130308
n04252077
n03534580
n01860187
n03814906
n02442845
n04487394
n02090379
n01930112
n07860988
n02869837
n02231487
n03956157
n03482405
n02489166
n02107683
n01677366
n01806143
n03775071
n02825657
n02783161
n01622779
n02268853
n04044716
n04540053
n02107142
n04487394
n03376595
n01496331
n02815834
n02099267
n04229816
n07615774
n03272562
n01855672
n02804414
n01818515
n02704792
n02483708
n01629819
n03393912
n03794056
n01644373
n02951585
n02497673
n02415577
n01871265
n07718747
n02966193
n03017168
n01530575
n02319095
n02090379
n03297495
n03388183
n03825788
n01798484
n03814906
n02027492
n02111889
n04118538
n02356798
n01983481
n01986214
n02808440
n02486261
n01751748
n03777568
n04335435
n07720875
n03633091
n03534580
n04141975
n04162706
n03998194
n07579787
n02676566
n03483316
n01693334
n04238763
n02071294
n04493381
n07875152
n01753488
n02091635
n03314780
n03291819
n03924679
n12768682
n06794110
n03291819
n03544143
n01698640
n06785654
n03782006
n04154565
n02012849
n07930864
n03017168
n04133789
n02138441
n03769881
n03773504
n07930864
n04589890
n01806143
n03207743
n02097474
n01582220
n02939185
n02640242
n02981792
n03657121
n02106166
n02666196
n01751748
n03188531
n01768244
n04429376
n02690373
n01806567
n02319095
n02107683
n04550184
n04350905
n01797886
n04447861
n04485082
n03443371
n04229816
n03443371
n04579145
n03125729
n03942813
n03649909
n02119022
n02105251
n12144580
n02992529
n01518878
n02977058
n01968897
n02233338
n03642806
n01833805
n09421951
n01985128
n01824575
n04286575
n04330267
n02106166
n07875152
n02094258
n02123394
n01537544
n04493381
n02102480
n02086240
n02085782
n03786901
n04254680
n03721384
n04311174
n04487394
n02099267
n03207941
n02883205
n02672831
n04008634
n03868863
n04251144
n03529860
n01608432
n02093647
n02028035
n03982430
n01687978
n01632458
n03125729
n02389026
n02085782
n06359193
n03459775
n01773797
n02093754
n04275548
n02120505
n03450230
n03854065
n02096177
n02112706
n02089867
n02138441
n02504458
n02865351
n04479046
n03180011
n03223299
n02804414
n02134418
n01751748
n02483708
n01692333
n02992211
n03404251
n07716906
n01924916
n07695742
n02112137
n02692877
n02423022
n02860847
n01877812
n04326547
n02051845
n01855672
n02667093
n01829413
n07760859
n01630670
n02869837
n02086910
n01740131
n02398521
n03016953
n02091134
n02096585
n02093647
n03220513
n07716906
n03188531
n03627232
n03690938
n02788148
n04254680
n02493509
n02098413
n03532672
n02111889
n01843065
n02666196
n02457408
n03785016
n02097474
n02704792
n03868863
n04540053
n03529860
n04238763
n03658185
n03970156
n04285008
n02526121
n02096585
n03814639
n03180011
n02480855
n03594945
n02101006
n04517823
n12985857
n02104029
n04111531
n01729322
n03773504
n01580077
n02098413
n04065272
n02085936
n02093859
n02104365
n09472597
n02865351
n04254680
n02951358
n02281787
n01496331
n02093256
n01910747
n04509417
n02417914
n02389026
n03666591
n06794110
n03786901
n07695742
n02133161
n04540053
n02782093
n01871265
n03690938
n02028035
n02106550
n02494079
n07831146
n01498041
n02130308
n04483307
n01820546
n02105056
n04487081
n09332890
n02437312
n03692522
n02871525
n02326432
n07749582
n02992211
n02497673
n03544143
n13052670
n13133613
n07714571
n03868863
n02606052
n02111129
n03874293
n02190166
n02226429
n02363005
n02443484
n04579145
n03425413
n03018349
n03452741
n02791124
n02346627
n02128757
n03998194
n03530642
n01592084
n01917289
n03764736
n07615774
n03977966
n02877765
n02089973
n01986214
n01872401
n03942813
n01689811
n02834397
n07714990
n02486261
n02397096
n04467665
n02909870
n04517823
n04131690
n01728572
n01729322
n01797886
n02108551
n03866082
n01677366
n02979186
n03710637
n03933933
n03930313
n03899768
n03763968
n02326432
n02107142
n02066245
n04099969
n07860988
n07695742
n01924916
n03895866
n03788365
n01632777
n02787622
n01768244
n01768244
n03146219
n06785654
n02110341
n03400231
n02123045
n02025239
n03670208
n01784675
n03982430
n04485082
n03208938
n01990800
n03930313
n02708093
n04597913
n01796340
n02100236
n01608432
n01828970
n01614925
n03400231
n01631663
n03759954
n01872401
n01917289
n02690373
n01664065
n03016953
n04376876
n01664065
n02950826
n04557648
n02793495
n02111129
n01968897
n03781244
n07871810
n02641379
n02097209
n02109047
n03065424
n03838899
n04501370
n01753488
n04049303
n02097047
n04311004
n03538406
n03666591
n02017213
n02093647
n04409515
n03207743
n01843065
n03697007
n03291819
n03197337
n03000247
n02443484
n03891251
n02085782
n04033901
n03658185
n01819313
n03388549
n02606052
n04612504
n01582220
n02883205
n04467665
n03535780
n04326547
n03895866
n02095889
n02123045
n03777568
n01631663
n02999410
n07717410
n02837789
n04461696
n07720875
n03141823
n03216828
n04589890
n02105641
n03196217
n01797886
n07742313
n02396427
n04532106
n02655020
n02437312
n03028079
n02037110
n03788365
n01978455
n02483362
n02444819
n01580077
n04347754
n01728572
n03063689
n02106662
n02672831
n03895866
n04560804
n04540053
n02233338
n03777754
n02788148
n09472597
n02484975
n04404412
n02087046
n02089078
n03255030
n03095699
n07714990
n02641379
n03218198
n02481823
n01514859
n03337140
n04399382
n02641379
n02129604
n03982430
n04127249
n04125021
n01774384
n01740131
n02325366
n04041544
n02667093
n07836838
n01739381
n02108000
n02277742
n01950731
n03777754
n04310018
n02917067
n02835271
n04515003
n02119789
n02966687
n03085013
n12144580
n02071294
n12998815
n04162706
n03028079
n03218198
n02895154
n04562935
n07613480
n02128925
n03649909
n01629819
n01883070
n02098413
n02002724
n02106382
n01530575
n02113978
n02124075
n04332243
n02655020
n04239074
n01910747
n09399592
n02096051
n03930630
n07693725
n03933933
n03187595
n02281787
n02892201
n02108000
n01687978
n03803284
n07892512
n02074367
n03891251
n03384352
n04409515
n02107574
n01860187
n03529860
n02280649
n02860847
n03325584
n04409515
n03692522
n02089973
n02782093
n03208938
n02980441
n01693334
n01773157
n01729977
n03063689
n02865351
n03459775
n03637318
n04263257
n04604644
n04311004
n02120079
n02112018
n03196217
n01871265
n02804610
n07892512
n03124043
n02219486
n02089973
n02109047
n04040759
n07711569
n04458633
n07720875
n02277742
n01675722
n02119022
n02106030
n03763968
n02105412
n03017168
n03857828
n04346328
n04005630
n03492542
n02480495
n02090622
n03814906
n04004767
n02992529
n02692877
n09332890
n02979186
n01770393
n02129165
n02391049
n07871810
n03355925
n04398044
n07860988
n03961711
n02089973
n03404251
n02395406
n03063689
n04070727
n04552348
n02112137
n02110958
n01753488
n07697537
n04389033
n02783161
n07693725
n04286575
n07753113
n07716358
n03394916
n02093256
n01737021
n07836838
n02268853
n02130308
n02906734
n02134418
n02108000
n01560419
n03131574
n02133161
n03000247
n02279972
n02951585
n03733805
n01677366
n03976467
n03535780
n03938244
n01644373
n02109525
n03649909
n02190166
n01692333
n02910353
n01807496
n03982430
n02974003
n03950228
n01978287
n03720891
n02892767
n02504013
n01855032
n02483362
n02025239
n03868242
n02094114
n02109047
n07749582
n01669191
n03785016
n04041544
n02087046
n03272010
n03447447
n02783161
n03976657
n02087394
n04548280
n01860187
n01689811
n04584207
n04251144
n02113023
n03977966
n03792972
n13054560
n06785654
n07734744
n02115641
n04606251
n02277742
n02794156
n02137549
n04479046
n01753488
n04485082
n02100735
n02869837
n03534580
n02879718
n04525305
n01829413
n03792782
n02109961
n03443371
n02009229
n01744401
n01728572
n02098413
n04311004
n03272010
n02095570
n01632458
n02783161
n01644900
n01601694
n01608432
n04335435
n02086910
n04418357
n02097658
n03124170
n04228054
n02494079
n07754684
n02493793
n02165105
n02133161
n01847000
n03394916
n02105162
n01950731
n03970156
n02233338
n03045698
n02099601
n11939491
n04467665
n04346328
n04347754
n03063689
n03100240
n02127052
n03887697
n09428293
n02361337
n02606052
n04590129
n02692877
n03796401
n04532106
n03538406
n07747607
n01978455
n07717556
n02894605
n03134739
n04243546
n03903868
n02879718
n01824575
n01877812
n01770081
n04525305
n01773549
n02099712
n01774384
n02823428
n01860187
n03461385
n04366367
n02167151
n02454379
n03777568
n01833805
n03761084
n04542943
n02504458
n02033041
n02095314
n03527444
n02280649
n02123045
n01644373
n12998815
n03792972
n02480495
n03417042
n02091467
n02415577
n12985857
n03544143
n04370456
n02110806
n03676483
n03602883
n03538406
n04201297
n03929855
n02504013
n10565667
n02097130
n03950228
n01675722
n04523525
n02966687
n02504458
n02089973
n01641577
n04330267
n04146614
n01631663
n02978881
n07802026
n04039381
n03485794
n03825788
n04265275
n03141823
n04033995
n03179701
n01986214
n04604644
n02730930
n03920288
n02799071
n04399382
n04023962
n02951358
n02114367
n02074367
n03992509
n03000134
n01824575
n04525305
n02119789
n03899768
n03617480
n02012849
n03814639
n04347754
n04597913
n02113799
n04562935
n03777754
n02687172
n02066245
n02704792
n01751748
n02090622
n03857828
n03777754
n02130308
n02606052
n03483316
n02808440
n02114712
n01774384
n09468604
n03045698
n02107574
n02112706
n03777754
n04209239
n07745940
n02690373
n07584110
n03388549
n03977966
n04584207
n02279972
n02443114
n02493509
n02494079
n03063599
n01774750
n01968897
n01695060
n04380533
n02128757
n09256479
n02909870
n04501370
n03935335
n07693725
n04591713
n03787032
n01498041
n03042490
n02086910
n01855672
n04596742
n02445715
n02859443
n02804610
n03709823
n02488291
n02410509
n03393912
n03498962
n03131574
n03791053
n03763968
n02097130
n03042490
n01641577
n01677366
n01828970
n02096051
n03888605
n02094114
n02892201
n02486261
n03983396
n02133161
n03602883
n03065424
n02749479
n02791124
n01968897
n02797295
n02877765
n01843065
n02892201
n03786901
n02174001
n03133878
n02107908
n04136333
n02437616
n04592741
n04044716
n01773157
n02130308
n02325366
n04591713
n04090263
n03902125
n03670208
n07753113
n03866082
n04201297
n02093859
n02410509
n02823750
n01740131
n03417042
n03874293
n03710193
n02871525
n02091467
n04254120
n02109525
n04404412
n02094433
n11939491
n02107683
n04356056
n02002556
n02168699
n01945685
n04376876
n04033901
n01530575
n03838899
n01776313
n03028079
n03658185
n04310018
n02090379
n02109525
n04376876
n04418357
n04409515
n07583066
n03841143
n02837789
n03494278
n03457902
n02497673
n02504013
n02110063
n02835271
n01491361
n02807133
n02085782
n02088364
n02607072
n02120505
n07718472
n03781244
n02389026
n03026506
n02769748
n02096177
n02840245
n02606052
n03857828
n03837869
n01735189
n02093256
n02112706
n02749479
n04525038
n03982430
n02510455
n02410509
n03680355
n02105505
n03017168
n02120079
n03532672
n03992509
n02009229
n02106166
n02105056
n02422699
n03770439
n03794056
n03777568
n02110806
n01950731
n04371430
n03417042
n03743016
n01729977
n02669723
n02094433
n04251144
n02119022
n01697457
n01682714
n07614500
n02127052
n03042490
n02113799
n04399382
n03794056
n02963159
n02730930
n01592084
n04067472
n02815834
n07753592
n13052670
n07875152
n06785654
n04509417
n03977966
n03345487
n03223299
n04277352
n06794110
n02389026
n07920052
n02100877
n04435653
n04239074
n04069434
n03617480
n01494475
n02672831
n07831146
n02097047
n03814639
n02514041
n02091635
n01687978
n02116738
n01630670
n01695060
n04204238
n04090263
n04081281
n01819313
n02132136
n03787032
n04044716
n15075141
n03954731
n04389033
n02002556
n04591157
n04133789
n04277352
n02641379
n03733805
n04417672
n02403003
n01580077
n03920288
n03673027
n07697537
n07836838
n04243546
n02977058
n07684084
n07697537
n02132136
n03131574
n02093647
n03443371
n03134739
n04550184
n03891251
n02087394
n07697537
n07583066
n04522168
n04493381
n04065272
n02097130
n04467665
n01614925
n03961711
n02802426
n02089078
n02018207
n03947888
n01748264
n02280649
n02002556
n03709823
n01494475
n03485794
n04479046
n02108551
n03325584
n03188531
n02091032
n02259212
n02033041
n03290653
n04033995
n07614500
n02169497
n04553703
n02268443
n09288635
n01843383
n04428191
n03717622
n02268853
n02012849
n02894605
n02134418
n01751748
n02823750
n02177972
n03424325
n02397096
n07753275
n02417914
n03379051
n02096585
n03814639
n03355925
n03127747
n02264363
n03733131
n02481823
n03447447
n04409515
n02066245
n02102318
n03028079
n02107574
n04026417
n02058221
n02106662
n02607072
n01641577
n03376595
n07892512
n11939491
n02488702
n09421951
n01910747
n02364673
n07248320
n03908714
n02939185
n02099601
n03680355
n02095889
n02917067
n04380533
n01592084
n02109525
n02123394
n02236044
n02346627
n12057211
n12620546
n04346328
n01531178
n01735189
n04152593
n04487394
n02123597
n01768244
n02129604
n09193705
n04131690
n02085936
n02088238
n03538406
n03131574
n02110185
n03124043
n03000247
n02107574
n02110958
n03018349
n02930766
n02229544
n02483362
n03887697
n01773797
n02264363
n02088364
n04127249
n02113023
n03146219
n02114855
n04536866
n03770679
n01796340
n03866082
n04380533
n03764736
n07749582
n03658185
n04579145
n01784675
n01644373
n02110063
n02971356
n02494079
n02361337
n02490219
n03803284
n02113624
n02106550
n03814906
n03180011
n01872401
n02730930
n04548280
n02814860
n02105162
n03676483
n01871265
n07716358
n04476259
n03887697
n07697537
n02514041
n04004767
n04371774
n01855032
n01518878
n09835506
n01943899
n03908714
n03400231
n02129604
n02492035
n04252225
n02107312
n03443371
n02950826
n03814639
n02951585
n04265275
n01806567
n03482405
n01882714
n01580077
n02091831
n04266014
n02895154
n04532106
n02999410
n03729826
n03345487
n02105162
n02690373
n04597913
n04325704
n03461385
n01695060
n01818515
n09472597
n01806567
n07754684
n04326547
n02093859
n04049303
n02641379
n03196217
n02088466
n04376876
n02009229
n03929855
n02025239
n03814906
n03291819
n04612504
n03000134
n02837789
n07718747
n03459775
n02281406
n01693334
n02219486
n04266014
n04399382
n01774750
n02980441
n03062245
n04418357
n02841315
n04239074
n02117135
n03908714
n04429376
n02089867
n01641577
n02444819
n04277352
n01443537
n04522168
n02137549
n03770439
n03697007
n07248320
n04523525
n04141975
n04442312
n02979186
n03929855
n03160309
n07613480
n04154565
n03452741
n03063689
n01983481
n03884397
n02687172
n01622779
n01774750
n02096051
n04074963
n03207941
n02107908
n03180011
n04557648
n01491361
n04209239
n02091467
n03930313
n03417042
n02395406
n02112350
n02108915
n02123597
n04125021
n03777754
n09288635
n02066245
n03196217
n04118538
n03733281
n02106550
n02111889
n03720891
n04604644
n03016953
n03249569
n04039381
n02100735
n01582220
n02423022
n03764736
n03109150
n02028035
n02510455
n01735189
n02666196
n02992211
n04356056
n03240683
n01978455
n04579145
n02963159
n09288635
n02442845
n04606251
n02087046
n03344393
n01883070
n03697007
n03891251
n03662601
n02138441
n01753488
n04613696
n01950731
n03485794
n02110341
n02892767
n02492035
n04273569
n04008634
n02095314
n03794056
n09472597
n02802426
n07716906
n03792972
n01872401
n03673027
n02279972
n02910353
n03933933
n03938244
n01558993
n03908714
n01914609
n02101006
n02672831
n04067472
n02526121
n07836838
n02817516
n07742313
n01828970
n04286575
n03649909
n02107683
n02988304
n02165456
n04560804
n01629819
n03814906
n03782006
n02264363
n02909870
n09246464
n02328150
n02730930
n04596742
n03095699
n03146219
n01824575
n03977966
n01807496
n02500267
n02098105
n01796340
n02113978
n02948072
n03089624
n04550184
n07565083
n03529860
n03544143
n02791270
n03775071
n03710721
n13044778
n02504458
n02514041
n03743016
n03483316
n12985857
n03709823
n04465501
n03028079
n04209239
n01807496
n02859443
n04398044
n03337140
n02783161
n02500267
n01644373
n07711569
n03888257
n02655020
n09399592
n03197337
n02007558
n03961711
n04542943
n02116738
n01580077
n02088632
n02096294
n03388183
n02099267
n03445924
n04133789
n04332243
n03201208
n03032252
n02504458
n02979186
n04584207
n03535780
n02229544
n02111500
n04525305
n03197337
n02398521
n02088238
n02364673
n04146614
n02113186
n02391049
n02098286
n04548362
n02009229
n07802026
n07716906
n02111889
n02730930
n01632777
n02099601
n02981792
n03637318
n01735189
n04049303
n02129165
n02443484
n03770679
n04149813
n01622779
n03110669
n01945685
n03937543
n02977058
n02457408
n03041632
n01694178
n03095699
n02085936
n04252077
n03529860
n01978455
n01768244
n06359193
n02107908
n04162706
n03494278
n02009912
n01740131
n03717622
n13054560
n03014705
n02087394
n02093991
n03063689
n02113023
n03733131
n04493381
n03825788
n02643566
n03495258
n06794110
n02280649
n04065272
n02110958
n03452741
n03314780
n01828970
n02871525
n04447861
n02815834
n04417672
n04328186
n02134418
n03788365
n03877845
n04487081
n02500267
n03372029
n03837869
n01968897
n03443371
n12768682
n01685808
n03584829
n02814860
n03485407
n03670208
n01817953
n03026506
n01440764
n01685808
n03691459
n04141076
n04179913
n03670208
n01755581
n03958227
n03388043
n03223299
n02504013
n01773549
n01694178
n02112018
n01739381
n01695060
n01980166
n03788365
n03187595
n02277742
n01669191
n02892201
n02123045
n07747607
n04604644
n04149813
n04074963
n02111277
n02101006
n03961711
n01978287
n03127747
n02129604
n07717410
n02264363
n07802026
n02089973
n02096585
n04243546
n01688243
n02817516
n04596742
n03673027
n02797295
n07753113
n01685808
n02871525
n02093991
n01984695
n07760859
n03032252
n07711569
n02280649
n03761084
n03160309
n03891332
n02883205
n04372370
n04041544
n04552348
n04264628
n04041544
n01910747
n03950228
n02666196
n04204347
n01560419
n04204238
n02236044
n03131574
n04487081
n02018795
n02843684
n03000684
n01667778
n02115641
n04548362
n01943899
n02100877
n02093256
n02018207
n02112137
n03141823
n02093754
n02174001
n04476259
n02480495
n03887697
n02769748
n02002724
n02113978
n02110627
n03874293
n02107574
n02109047
n01855032
n02794156
n03134739
n07742313
n03124043
n02486261
n02992529
n01734418
n02321529
n03047690
n02879718
n02025239
n03131574
n04347754
n03216828
n02264363
n03041632
n02071294
n01914609
n02497673
n02172182
n01667778
n02106550
n02814860
n01773549
n01986214
n02236044
n02009912
n02487347
n01755581
n03623198
n02445715
n06794110
n02085620
n04482393
n01820546
n04579145
n02326432
n07754684
n04111531
n03724870
n02093256
n07711569
n02017213
n01688243
n01669191
n01664065
n02092339
n02108551
n04525305
n03950228
n03929660
n03956157
n03891332
n04493381
n02102973
n03255030
n01990800
n02500267
n02281406
n01824575
n03032252
n02129165
n02356798
n03538406
n02009229
n02097658
n03095699
n03786901
n03743016
n02980441
n07742313
n02106166
n03314780
n02097209
n04037443
n04086273
n03394916
n02037110
n02112018
n03379051
n02951585
n04501370
n04355338
n03874293
n04153751
n07930864
n02930766
n01496331
n04265275
n02256656
n01667114
n03630383
n04591713
n02704792
n03207743
n03854065
n03720891
n07873807
n02120505
n02099849
n04152593
n02100877
n04560804
n03792972
n03733131
n13133613
n02114548
n03000247
n04146614
n04398044
n02325366
n03633091
n09256479
n03617480
n01530575
n03633091
n03018349
n01768244
n02871525
n04040759
n03658185
n03272562
n02447366
n04392985
n02797295
n03903868
n04548362
n07714571
n03884397
n03888605
n02105505
n03666591
n03063599
n03530642
n02097474
n04483307
n04554684
n02978881
n02492660
n03692522
n04589890
n04579432
n02127052
n02112706
n02804610
n02190166
n11939491
n03000134
n01697457
n12620546
n02256656
n01968897
n02950826
n03127925
n02939185
n06596364
n02091134
n03877472
n02113799
n02102973
n02027492
n03498962
n02834397
n07248320
n04286575
n01735189
n02417914
n03690938
n03404251
n01739381
n02099267
n02219486
n02108089
n02206856
n03208938
n03127747
n02279972
n02281406
n02113023
n01601694
n07715103
n02107908
n02120079
n02102318
n02096051
n01990800
n02917067
n03372029
n03538406
n12267677
n03314780
n03903868
n02009229
n02100236
n03759954
n02277742
n03804744
n02966687
n02102318
n09835506
n01484850
n02097047
n02795169
n03673027
n02169497
n03532672
n04067472
n01944390
n02786058
n04019541
n01665541
n04162706
n01695060
n04116512
n03680355
n04548280
n04517823
n02883205
n02869837
n01871265
n01737021
n01496331
n01773797
n04562935
n03617480
n03930630
n04033901
n04270147
n03388183
n02823428
n02090622
n02504013
n04356056
n02510455
n01860187
n02492660
n02879718
n02669723
n15075141
n04263257
n02422106
n04350905
n02105056
n02102973
n03776460
n03857828
n02120505
n02105412
n02643566
n03291819
n04447861
n03938244
n07717556
n02423022
n03450230
n01770393
n04254680
n03530642
n03476991
n03710721
n04116512
n04398044
n02930766
n04370456
n02231487
n04019541
n03476991
n04366367
n02930766
n01728920
n03908618
n07615774
n06794110
n01744401
n04153751
n03187595
n02009912
n02096437
n02018207
n02363005
n07717410
n02939185
n03495258
n03787032
n03920288
n04392985
n02109961
n04325704
n03240683
n01773157
n02317335
n03929660
n02493509
n03920288
n03447721
n02486261
n04562935
n01829413
n01930112
n02104365
n02992211
n04033901
n03710193
n02797295
n01847000
n02100583
n04483307
n03874599
n04275548
n04540053
n01558993
n04560804
n04542943
n01773549
n04317175
n03935335
n07717410
n02165456
n03832673
n01692333
n03788195
n07831146
n03590841
n03840681
n02277742
n09472597
n07614500
n04548280
n03443371
n04532670
n01774750
n04486054
n03127747
n03676483
n02669723
n02017213
n01945685
n02219486
n04599235
n03530642
n04254777
n02111500
n03125729
n01631663
n07880968
n02111277
n01817953
n03776460
n01622779
n03240683
n02906734
n02391049
n01695060
n04023962
n01514668
n04133789
n02871525
n02277742
n02090721
n01693334
n04074963
n07693725
n01873310
n02279972
n02971356
n02071294
n03991062
n02088238
n03538406
n04552348
n02112706
n04229816
n03126707
n01518878
n03903868
n13054560
n04149813
n01828970
n03197337
n02443114
n03255030
n01558993
n03529860
n04069434
n02396427
n03197337
n02356798
n02504013
n02641379
n02017213
n01882714
n01514859
n04429376
n04366367
n04443257
n03075370
n03782006
n02927161
n03899768
n07715103
n03980874
n01514668
n03761084
n01773797
n02120079
n04131690
n07248320
n02133161
n02096051
n13052670
n02979186
n02113023
n03594945
n02123045
n02120505
n02119022
n02493793
n01728572
n03482405
n01980166
n07745940
n01773549
n02123394
n02093754
n03534580
n02174001
n02641379
n01693334
n01983481
n02793495
n04456115
n04141327
n02096585
n01855672
n03223299
n03544143
n02321529
n09193705
n04409515
n02105162
n03775546
n01990800
n02128757
n03769881
n03314780
n03598930
n03452741
n03388183
n03958227
n02236044
n04208210
n07693725
n01945685
n04579432
n02486410
n02791270
n02099429
n02074367
n04208210
n01981276
n03240683
n03425413
n02115913
n03124043
n02002724
n02667093
n03724870
n07730033
n03733281
n04522168
n07717556
n03977966
n03788365
n01484850
n03482405
n03623198
n07892512
n07711569
n03710637
n03376595
n04141975
n02981792
n03804744
n02107312
n03733131
n01739381
n04252077
n03445924
n04599235
n02422699
n03637318
n03673027
n03425413
n02442845
n02325366
n02410509
n02641379
n02165105
n02769748
n02859443
n01806567
n03527444
n02099601
n07715103
n01531178
n04599235
n07697313
n02091244
n04317175
n02823428
n02096437
n02236044
n02190166
n02948072
n01728920
n01728572
n03000684
n03133878
n02017213
n01978287
n03775071
n04479046
n07720875
n06785654
n01843383
n02108089
n02606052
n02794156
n02100583
n12620546
n02412080
n01677366
n03710637
n07753275
n02417914
n04019541
n01697457
n01806143
n03759954
n02115913
n12985857
n03530642
n02133161
n02086240
n02782093
n02259212
n02110806
n03733131
n02096294
n04229816
n06794110
n02699494
n03761084
n01592084
n07695742
n01631663
n03017168
n04350905
n02256656
n04285008
n01984695
n04275548
n01883070
n03047690
n02445715
n02088094
n03223299
n01729322
n03837869
n02102480
n02088364
n02102177
n04265275
n02319095
n02229544
n03759954
n02869837
n04209133
n03291819
n04371774
n02138441
n02417914
n02128757
n02098286
n04591157
n03443371
n03902125
n02422106
n04423845
n04465501
n13052670
n02087394
n04367480
n07742313
n03538406
n03492542
n03868863
n02088632
n01582220
n03876231
n03770439
n02977058
n03457902
n03874293
n03902125
n03929855
n02391049
n03180011
n03956157
n02790996
n02099712
n01980166
n04041544
n02033041
n03976657
n01751748
n02127052
n01494475
n02128385
n04204347
n03690938
n03759954
n02412080
n04204238
n03662601
n02114855
n03788365
n02104029
n02101556
n01737021
n09288635
n02096177
n02492035
n04238763
n03393912
n04149813
n02398521
n01742172
n02130308
n01534433
n04404412
n02107683
n02708093
n04209239
n07715103
n07718747
n04462240
n02510455
n02098105
n02277742
n02096437
n02802426
n02486261
n02091134
n03272010
n01491361
n04604644
n02640242
n03692522
n02229544
n07720875
n04606251
n04201297
n11939491
n02088364
n02655020
n03657121
n02112350
n02326432
n03445777
n02028035
n04326547
n03400231
n02091032
n03710193
n01742172
n01806567
n03485407
n03450230
n01735189
n02319095
n03467068
n04458633
n03394916
n02500267
n04525038
n02112137
n02107908
n12768682
n02119789
n03662601
n07860988
n04584207
n07932039
n03062245
n07745940
n03085013
n04465501
n02483708
n03379051
n01631663
n01773157
n02364673
n02917067
n02488702
n02105412
n02423022
n03868242
n02018207
n02113624
n04041544
n04548280
n03483316
n03444034
n02125311
n02281406
n04041544
n03223299
n03602883
n12144580
n04192698
n07831146
n01748264
n02096177
n01798484
n03075370
n01807496
n04479046
n03457902
n02504013
n02097047
n07583066
n02979186
n03595614
n04286575
n09246464
n02981792
n03220513
n02090379
n02037110
n02009912
n07860988
n04435653
n02486261
n02129604
n01491361
n04579432
n02165456
n03259280
n01860187
n03796401
n02356798
n01828970
n02206856
n03983396
n02783161
n03134739
n02823428
n04371430
n04118776
n02106166
n02988304
n01770081
n04465501
n03447447
n03976467
n02977058
n02058221
n02280649
n03445777
n03884397
n01797886
n03240683
n03485794
n02974003
n04548280
n02168699
n07716906
n02002556
n01632777
n02111129
n02492035
n02123159
n03424325
n02231487
n01641577
n07873807
n02363005
n02100877
n03777568
n01530575
n03998194
n01829413
n02480855
n09288635
n02321529
n02509815
n03482405
n04493381
n02319095
n03223299
n03388549
n02113186
n02093859
n07718747
n01855032
n10148035
n07753113
n04154565
n02423022
n04179913
n02486410
n02106382
n02033041
n02483708
n01537544
n02123597
n03240683
n04026417
n02108422
n09399592
n02104365
n03794056
n01776313
n02787622
n03854065
n01729977
n02127052
n03942813
n02109047
n03133878
n03775071
n02268443
n04118776
n02009912
n02111889
n04542943
n03759954
n03633091
n03124043
n03016953
n02133161
n02106030
n01773797
n03887697
n04501370
n04120489
n02096051
n01682714
n03133878
n02992211
n01795545
n02033041
n04285008
n02113978
n02006656
n01768244
n02837789
n01622779
n02091831
n02992529
n03929660
n02493793
n03447447
n02013706
n03478589
n07615774
n03530642
n02410509
n01968897
n04252077
n03976467
n07871810
n01697457
n04200800
n01806567
n03998194
n03721384
n02107683
n02950826
n02834397
n02978881
n02106166
n02098413
n04204238
n04328186
n01943899
n03494278
n01798484
n07714990
n02105056
n04033995
n03207743
n03459775
n02704792
n03379051
n04372370
n01855032
n03124170
n04039381
n04355338
n01774384
n03016953
n02486261
n01632777
n02319095
n02106550
n03476684
n01644900
n03729826
n03047690
n04179913
n02437312
n03769881
n01664065
n02107683
n09835506
n01784675
n02483362
n02089867
n04356056
n03666591
n06359193
n02277742
n04456115
n02099267
n03657121
n04149813
n07579787
n04372370
n02095314
n03496892
n02483708
n04417672
n04447861
n02804610
n03126707
n01704323
n09332890
n02090379
n03837869
n11939491
n03866082
n03733131
n02165456
n04443257
n02281787
n02398521
n07718472
n02106382
n02066245
n04428191
n03527444
n03085013
n02112350
n02094433
n03942813
n02398521
n02865351
n03908618
n02229544
n01981276
n03208938
n02236044
n04542943
n02804610
n02843684
n01687978
n02447366
n02099849
n03017168
n02999410
n02013706
n02102040
n02825657
n02091831
n01833805
n02117135
n01910747
n03724870
n04209133
n04328186
n03761084
n04509417
n04612504
n01537544
n01748264
n04542943
n02892767
n04332243
n04591713
n02116738
n07714990
n03782006
n07697313
n03692522
n02776631
n03197337
n06874185
n02089867
n02790996
n02979186
n03938244
n03028079
n02823428
n04133789
n02794156
n02815834
n03063599
n10148035
n02486261
n04435653
n01943899
n02391049
n02090622
n04542943
n02058221
n02089867
n02115641
n03930313
n02105412
n03691459
n03781244
n03721384
n01484850
n03201208
n03710721
n03384352
n02410509
n03787032
n03970156
n02105251
n03958227
n02690373
n01729322
n01518878
n04254680
n02988304
n03670208
n04033901
n02018795
n02749479
n03447721
n02093428
n02099712
n02094114
n02814860
n02167151
n04525305
n02483362
n02105251
n02817516
n04125021
n02979186
n01829413
n02097658
n02909870
n01558993
n03216828
n02280649
n02051845
n02115913
n03938244
n04522168
n01632458
n02106382
n02939185
n04111531
n01693334
n02268853
n02109525
n02125311
n03617480
n02437616
n04146614
n03832673
n02870880
n04554684
n02071294
n02971356
n03775071
n04326547
n11879895
n01531178
n02667093
n04317175
n02027492
n02002556
n02206856
n03527444
n04557648
n04467665
n01742172
n02100236
n02096437
n13054560
n02389026
n02098105
n07871810
n02488291
n04251144
n12057211
n04483307
n01917289
n03637318
n01950731
n01955084
n02869837
n04037443
n02099267
n04254120
n02493793
n12144580
n01968897
n03770679
n02910353
n04146614
n04154565
n02128757
n04380533
n03530642
n02640242
n01530575
n04325704
n04562935
n03838899
n02692877
n03692522
n03916031
n02486261
n03724870
n02099267
n03207941
n02128925
n03461385
n01950731
n02492660
n02102973
n07749582
n04310018
n02110806
n02105056
n09428293
n02087394
n15075141
n03141823
n03709823
n03930630
n02280649
n04069434
n07718747
n02480495
n07754684
n12985857
n03602883
n01665541
n04465501
n02788148
n02114548
n07753275
n03788195
n02814860
n02090379
n03425413
n01751748
n04311174
n01796340
n07613480
n03445777
n04404412
n03124170
n02364673
n01829413
n03134739
n07730033
n03379051
n04485082
n03250847
n07730033
n07714571
n02790996
n03160309
n02268443
n02093859
n13052670
n02086910
n01632458
n04259630
n01806567
n02094433
n02093647
n02111500
n03876231
n01883070
n02098286
n04483307
n03344393
n01592084
n04579432
n04152593
n04579145
n03998194
n02093256
n01616318
n03085013
n03527444
n04116512
n02514041
n03627232
n03376595
n04443257
n03095699
n02403003
n04589890
n01910747
n02978881
n02727426
n01985128
n03482405
n02132136
n04277352
n13133613
n02033041
n02100877
n01806143
n03733805
n01748264
n02483362
n03776460
n02105412
n03887697
n01773157
n02056570
n02808440
n02007558
n04146614
n02097130
n03888605
n02412080
n01806567
n02457408
n03935335
n03775071
n07697313
n01774750
n07873807
n07749582
n02091134
n02871525
n02117135
n03657121
n03661043
n02088632
n03776460
n02120505
n02165456
n03089624
n03485794
n01534433
n02835271
n03240683
n04251144
n02086910
n03447447
n04200800
n01582220
n02655020
n04458633
n04371430
n02097047
n03970156
n04418357
n04243546
n02098413
n02992529
n03384352
n02640242
n02894605
n03920288
n03250847
n02607072
n04326547
n04485082
n03868863
n09472597
n02027492
n02692877
n03388549
n03874599
n02096051
n01847000
n02328150
n01534433
n02910353
n01829413
n02107142
n03977966
n02090622
n03444034
n04418357
n04254680
n02692877
n02002724
n03535780
n02108551
n02112350
n15075141
n04141975
n04507155
n04509417
n11939491
n02112706
n02110627
n03125729
n03680355
n01644373
n01644373
n01756291
n01753488
n02098105
n02342885
n03759954
n02110958
n02797295
n02006656
n02111500
n04033901
n01784675
n04277352
n02489166
n02481823
n02398521
n01739381
n02823428
n02939185
n12985857
n04275548
n04127249
n02087394
n03920288
n04482393
n03100240
n03000684
n07248320
n02454379
n02361337
n03218198
n02106030
n03544143
n04456115
n02165105
n03188531
n01641577
n07742313
n03761084
n01518878
n04376876
n03782006
n02422699
n01773797
n02106550
n04590129
n03902125
n02823750
n03393912
n04090263
n01737021
n02129165
n01498041
n03792782
n02966687
n02504458
n03838899
n01689811
n04347754
n01608432
n01817953
n02536864
n01729977
n02096437
n03924679
n02096437
n01798484
n02869837
n04336792
n03485407
n03868863
n04376876
n03602883
n02128925
n02102973
n02447366
n07716358
n03857828
n04517823
n03837869
n07749582
n02105162
n02281787
n02769748
n02085620
n01751748
n02093647
n04423845
n02488702
n03485794
n03908714
n01498041
n02231487
n02108551
n03179701
n02786058
n01855032
n04147183
n04254680
n04557648
n01728572
n04325704
n07860988
n01847000
n13044778
n03445777
n03447447
n02169497
n03290653
n03376595
n02094114
n03854065
n02422699
n01796340
n03459775
n02091244
n04399382
n03476684
n02951585
n03207941
n02174001
n03445777
n01950731
n04562935
n01728572
n02089973
n01945685
n02791270
n04090263
n01665541
n02264363
n04228054
n03345487
n03947888
n01944390
n04153751
n01664065
n03223299
n02930766
n04404412
n03992509
n01877812
n02977058
n09835506
n12267677
n03127747
n01980166
n09835506
n07753113
n02860847
n02840245
n01748264
n03891251
n02484975
n02095314
n03063689
n04372370
n11879895
n02447366
n01795545
n03201208
n01797886
n04548362
n03028079
n03201208
n02109047
n03804744
n03417042
n02111500
n02109047
n02415577
n04456115
n02486410
n03976657
n02109525
n03602883
n03937543
n02492660
n02127052
n02641379
n03146219
n02091635
n02110185
n04389033
n04330267
n02165456
n04152593
n04548362
n02094433
n04372370
n03208938
n02356798
n02666196
n02279972
n03661043
n03187595
n03131574
n07742313
n02104029
n02172182
n02090622
n02085782
n02123159
n02105855
n02422106
n01667114
n01943899
n03692522
n03788195
n07718472
n03146219
n04553703
n09472597
n04447861
n02790996
n03673027
n02102040
n07565083
n01532829
n02276258
n04141327
n01817953
n04118538
n01990800
n02123597
n01751748
n02025239
n01644373
n03355925
n02177972
n04286575
n04009552
n03899768
n03857828
n04613696
n02120079
n02007558
n04311174
n03594945
n04355338
n03325584
n07590611
n07831146
n03899768
n02165105
n06359193
n06874185
n03657121
n02056570
n09428293
n04597913
n02114855
n04548280
n03065424
n01986214
n03623198
n04485082
n03888605
n02114855
n02917067
n04067472
n03457902
n03775071
n07579787
n02509815
n04458633
n03347037
n02098105
n12985857
n03691459
n04525305
n01817953
n03393912
n04251144
n02088364
n02526121
n02444819
n02088238
n02051845
n01667114
n04487394
n04125021
n02883205
n04162706
n02085936
n02807133
n02978881
n04350905
n01843383
n02906734
n01608432
n02950826
n04131690
n02823428
n02106030
n01818515
n03840681
n03443371
n03447447
n02492660
n11879895
n02981792
n01514668
n02701002
n04192698
n02106030
n07717410
n03492542
n06794110
n03977966
n04008634
n07768694
n04515003
n02111889
n02363005
n01930112
n04447861
n07684084
n01883070
n03250847
n02825657
n03793489
n01616318
n02110341
n06596364
n04456115
n01749939
n03180011
n02690373
n02088094
n01984695
n02493793
n09428293
n03888605
n09229709
n02128757
n04239074
n04040759
n03062245
n02168699
n02977058
n01773157
n02101388
n03459775
n04532106
n04026417
n02870880
n04179913
n02115913
n04525038
n11939491
n02165105
n04258138
n09472597
n01491361
n03706229
n03937543
n01855672
n03673027
n02443484
n03706229
n04149813
n03599486
n03272562
n01704323
n01537544
n03424325
n02085782
n02190166
n04592741
n02504458
n04086273
n07754684
n02443484
n02086910
n01756291
n01873310
n02096437
n02870880
n02106166
n07613480
n03018349
n03447721
n04335435
n02114855
n07760859
n03825788
n02107142
n02095570
n01697457
n03837869
n02018795
n02113624
n03781244
n03942813
n02445715
n02111129
n04372370
n02115641
n07802026
n02137549
n02099429
n03998194
n04162706
n03208938
n02486410
n02536864
n02437616
n02128757
n04604644
n03016953
n04404412
n02096585
n01494475
n03657121
n04259630
n04423845
n03388549
n02640242
n02988304
n02165456
n03924679
n04086273
n02492660
n02113624
n02093859
n02089867
n04192698
n01944390
n01632777
n02966687
n02107908
n02098286
n07831146
n02007558
n04536866
n02808304
n07718472
n03930630
n07754684
n01774750
n03980874
n03384352
n02104029
n02769748
n02058221
n01695060
n03929660
n13040303
n03089624
n04443257
n04428191
n03775546
n04517823
n01945685
n03216828
n02965783
n02088466
n04133789
n03838899
n02123597
n02128385
n02486410
n03124170
n03530642
n02500267
n12768682
n02128385
n01592084
n02526121
n04356056
n02137549
n03854065
n07684084
n01855032
n02992211
n02484975
n02106030
n09421951
n04367480
n09256479
n02119022
n02493509
n03803284
n01685808
n07697537
n01807496
n03733281
n03417042
n02219486
n09229709
n02526121
n03908714
n04204347
n03527444
n01740131
n02492035
n02094258
n03769881
n03026506
n02804414
n02489166
n02883205
n03482405
n04366367
n03868863
n03891332
n01797886
n03447447
n04399382
n04146614
n02423022
n02268443
n03250847
n07753592
n01984695
n03709823
n03884397
n03630383
n03814639
n02834397
n01737021
n03786901
n01775062
n01883070
n09428293
n03977966
n07754684
n03384352
n02794156
n13054560
n02132136
n02769748
n07718747
n02950826
n01930112
n02086240
n02125311
n03947888
n02840245
n03220513
n03720891
n02791270
n02802426
n03866082
n03825788
n02487347
n02169497
n02860847
n01728920
n03535780
n03710193
n02091467
n04243546
n01616318
n03942813
n02128757
n04049303
n04417672
n02127052
n03838899
n03729826
n02909870
n09421951
n04515003
n02165105
n03146219
n04423845
n03602883
n01930112
n04208210
n03887697
n03761084
n02268853
n04392985
n03649909
n03447721
n02692877
n12267677
n07715103
n04392985
n04509417
n04041544
n03538406
n01664065
n03179701
n01820546
n04204347
n03929660
n02102973
n03903868
n01742172
n01770081
n03109150
n04273569
n02123045
n07590611
n13037406
n02102177
n03000247
n02410509
n02088632
n07768694
n06785654
n03393912
n03496892
n04275548
n03854065
n04355933
n01807496
n07720875
n04584207
n03792782
n03208938
n02666196
n04149813
n02107683
n04049303
n04118538
n04418357
n02877765
n01883070
n02509815
n10565667
n02497673
n02115913
n03837869
n02190166
n04592741
n04285008
n04606251
n03075370
n04125021
n03796401
n02091134
n03792972
n01824575
n02086079
n01855032
n07742313
n03393912
n03958227
n02137549
n02113978
n02356798
n02808440
n02105412
n01797886
n04204347
n03837869
n02111277
n02777292
n02129604
n07930864
n02489166
n03459775
n01644900
n04149813
n03854065
n03125729
n04141076
n04505470
n02089973
n02172182
n04266014
n04606251
n07768694
n09472597
n02134418
n03623198
n02793495
n01484850
n02276258
n02095889
n03733281
n03535780
n03983396
n02640242
n01818515
n02051845
n03544143
n02092002
n02906734
n01518878
n03769881
n02087046
n03891332
n04392985
n03485794
n03445777
n02115913
n02321529
n03633091
n01984695
n04590129
n02268443
n02676566
n02134084
n03658185
n02091134
n03733805
n02488702
n02869837
n02640242
n03160309
n02443484
n02441942
n01775062
n02825657
n12144580
n04591713
n02783161
n01882714
n02815834
n02814860
n02102177
n02988304
n03376595
n02165105
n04081281
n03495258
n09193705
n04493381
n02815834
n11939491
n02883205
n03063689
n02095570
n04033901
n03937543
n02107908
n07742313
n02114712
n02971356
n02906734
n02814860
n01692333
n02808440
n03706229
n04335435
n03791053
n03742115
n02099429
n02877765
n02321529
n03814639
n01592084
n03272562
n02786058
n01667114
n03947888
n02100735
n04409515
n01601694
n03777568
n12620546
n06794110
n02483708
n03666591
n03759954
n01871265
n02790996
n01955084
n03868863
n03026506
n04070727
n02233338
n01983481
n02640242
n01819313
n02794156
n03017168
n02486261
n04118776
n02769748
n03250847
n02113799
n02105056
n02108422
n01806567
n04229816
n09256479
n04141327
n01692333
n01644373
n02493509
n02892201
n02346627
n07747607
n04120489
n03032252
n04081281
n09468604
n02108422
n07753113
n02441942
n03775071
n02319095
n04579145
n02097474
n03697007
n02769748
n02129604
n04141076
n04476259
n02442845
n04442312
n02012849
n01806567
n03337140
n02097209
n03207941
n01632458
n01818515
n02233338
n02088094
n02727426
n04239074
n03095699
n04606251
n03902125
n02099267
n02086240
n03337140
n02085782
n02412080
n03637318
n01734418
n02113023
n04251144
n03764736
n02114855
n02799071
n01675722
n02843684
n01756291
n04417672
n02835271
n04141076
n04389033
n04482393
n02087394
n02115641
n03017168
n01753488
n02514041
n04509417
n02089973
n03075370
n01644373
n03791053
n04265275
n02111500
n02097209
n04458633
n07802026
n04141076
n04597913
n02281787
n12057211
n02277742
n07716906
n03920288
n04326547
n03127747
n03404251
n02108915
n02127052
n02391049
n04229816
n02837789
n03314780
n02089973
n04296562
n02791270
n03000134
n01644900
n04209133
n01669191
n02107142
n03908714
n03045698
n03485794
n02108551
n02807133
n02892767
n04525305
n02493509
n10148035
n03201208
n03690938
n04505470
n02206856
n02098105
n03478589
n02123597
n02783161
n01667114
n02106550
n03733805
n03424325
n01882714
n01855672
n01855672
n01983481
n01695060
n01847000
n02799071
n04428191
n03223299
n13052670
n02101556
n04265275
n03016953
n01775062
n04033901
n01753488
n03146219
n04235860
n03759954
n03788195
n07749582
n01829413
n02093256
n02231487
n04536866
n03146219
n04004767
n02493793
n04371774
n02395406
n02114712
n02747177
n01560419
n03814906
n04141327
n01833805
n03825788
n02128925
n02120079
n03658185
n03935335
n03530642
n01968897
n02114548
n03873416
n01985128
n01514859
n02669723
n04311174
n03141823
n01872401
n03920288
n02927161
n02397096
n04357314
n03535780
n03127925
n01807496
n02895154
n02794156
n03666591
n04004767
n04039381
n04179913
n01828970
n02128385
n02095570
n04592741
n02793495
n02096177
n01631663
n02111500
n12057211
n04356056
n02894605
n02226429
n04482393
n01950731
n03452741
n01632777
n03197337
n04505470
n04599235
n01484850
n04501370
n02095570
n02276258
n02410509
n04037443
n02276258
n04418357
n02892767
n02099267
n03791053
n04599235
n03642806
n03530642
n07718472
n07693725
n11939491
n02793495
n02988304
n02096051
n01514668
n01616318
n04243546
n02808440
n04270147
n02106030
n04344873
n07930864
n03444034
n07860988
n02119022
n02108000
n04562935
n02105162
n02492035
n02823750
n03481172
n02108000
n04310018
n02107142
n02226429
n02074367
n03785016
n04553703
n03495258
n07579787
n07745940
n02111277
n04476259
n03476684
n04487081
n02091134
n07714571
n02105251
n04404412
n04398044
n01924916
n02487347
n12620546
n03255030
n04325704
n02093647
n02814533
n03125729
n03000247
n02492035
n01530575
n02108915
n02114367
n01796340
n13044778
n04522168
n02443114
n04589890
n04201297
n03733805
n02168699
n01616318
n03594945
n04479046
n02391049
n02892201
n04447861
n02134084
n02096294
n01484850
n03930630
n02090721
n04118538
n02445715
n06596364
n03599486
n04579145
n09468604
n01986214
n01820546
n02526121
n02408429
n03854065
n01855032
n03272562
n09288635
n02106550
n02095314
n01667778
n02137549
n02483708
n02804610
n04125021
n03769881
n02814533
n07718472
n04263257
n03877472
n02107312
n03042490
n01697457
n09468604
n03146219
n02799071
n03764736
n02493793
n03787032
n02808304
n03485407
n01740131
n04589890
n01914609
n02883205
n04254680
n03777568
n02280649
n02102040
n02823750
n04147183
n02091467
n04069434
n01729977
n01818515
n04023962
n03584254
n02095314
n03983396
n03956157
n02097209
n02095314
n02825657
n02107142
n02219486
n03796401
n01687978
n03944341
n02097658
n07718747
n04552348
n04263257
n03942813
n02037110
n03787032
n03642806
n01689811
n02102973
n02480495
n07684084
n02408429
n04356056
n02117135
n07584110
n04265275
n02493793
n01682714
n01981276
n04592741
n03976467
n02948072
n04086273
n04277352
n13054560
n02480495
n01983481
n02085782
n03598930
n03345487
n02017213
n03179701
n01984695
n04296562
n04507155
n04328186
n01534433
n02494079
n03916031
n04376876
n02093428
n01843383
n01924916
n03207743
n07747607
n03785016
n03388549
n02113624
n03961711
n02086646
n02134084
n04606251
n04493381
n02096585
n02992529
n03891332
n01616318
n01496331
n01694178
n01695060
n04026417
n01695060
n02117135
n03584254
n04336792
n01698640
n02177972
n04532670
n02859443
n02095889
n01682714
n11879895
n02114855
n02484975
n02097047
n04204238
n04604644
n01775062
n03775071
n01773549
n03956157
n03792972
n04404412
n09835506
n07717556
n02037110
n02361337
n02105412
n04447861
n02835271
n03240683
n07613480
n02422699
n02488702
n01776313
n04579432
n04116512
n03857828
n02676566
n03063599
n02397096
n02977058
n02089867
n04429376
n03018349
n13037406
n03998194
n01693334
n01770081
n03991062
n03141823
n03691459
n04039381
n02894605
n02096177
n02093256
n02917067
n03791053
n03976467
n02795169
n02112706
n01692333
n02111129
n03110669
n03803284
n01592084
n02514041
n02104365
n02089867
n07860988
n02093256
n02403003
n04522168
n02837789
n01855032
n02793495
n02093991
n02437312
n02980441
n04116512
n02120079
n04371774
n02104365
n04153751
n02091635
n01775062
n04310018
n03529860
n02105162
n02814860
n02088364
n02116738
n03630383
n02229544
n04111531
n01882714
n01917289
n03877472
n02346627
n03476991
n02115641
n03110669
n02799071
n03272562
n01729322
n03599486
n03445777
n04099969
n02536864
n03026506
n03899768
n04485082
n01440764
n04370456
n04125021
n07565083
n02012849
n02437616
n02281406
n03141823
n01440764
n04548362
n03584254
n04366367
n04069434
n02108551
n07697313
n02916936
n03124043
n01697457
n02095570
n03016953
n02441942
n02106382
n01833805
n03045698
n04404412
n03888605
n04259630
n03075370
n03124170
n03534580
n04277352
n03717622
n02526121
n01797886
n04133789
n02105855
n03530642
n02130308
n01980166
n04192698
n04336792
n07742313
n01692333
n02279972
n04371430
n01592084
n09332890
n04332243
n04392985
n07720875
n03478589
n03291819
n04560804
n02106030
n04049303
n02927161
n07753113
n04065272
n02835271
n03047690
n03538406
n01582220
n02113624
n03792782
n04116512
n02093859
n03961711
n02109047
n07831146
n02825657
n13054560
n02951585
n02442845
n02817516
n03874599
n02093859
n01755581
n02860847
n02167151
n01537544
n02099601
n02111500
n03670208
n03179701
n02093647
n03444034
n03131574
n02111500
n04069434
n01744401
n03220513
n03393912
n02486261
n03372029
n01728572
n02422106
n01833805
n03594734
n13044778
n02074367
n02391049
n07873807
n09468604
n02799071
n03832673
n02361337
n02111277
n04204238
n02172182
n04562935
n02100735
n02007558
n03630383
n01484850
n02484975
n02096051
n02206856
n03770679
n04265275
n09246464
n09835506
n07614500
n09472597
n03379051
n03457902
n01855032
n04201297
n02951585
n13133613
n03770439
n02172182
n03992509
n03617480
n02802426
n02676566
n01687978
n07711569
n03690938
n02869837
n03942813
n04332243
n01491361
n12768682
n01910747
n04179913
n03627232
n13037406
n07745940
n04152593
n01806143
n07565083
n03627232
n12267677
n03837869
n02094433
n04238763
n03496892
n04612504
n02807133
n02106166
n02484975
n03208938
n04065272
n02107574
n07715103
n04517823
n10565667
n02807133
n03717622
n04557648
n04591157
n02326432
n06874185
n04442312
n03042490
n03188531
n04487394
n02006656
n01729322
n03929660
n03425413
n03216828
n02346627
n02526121
n02089078
n01669191
n10565667
n04376876
n04258138
n02489166
n02493793
n03584829
n03379051
n02094114
n01514668
n03770439
n02231487
n01855032
n03180011
n04606251
n03916031
n01774750
n02087394
n03297495
n01968897
n02105056
n01491361
n02114712
n02097130
n02692877
n04125021
n03476684
n03658185
n02966687
n02259212
n03355925
n13133613
n03394916
n02107312
n02788148
n02109961
n01440764
n03124043
n06359193
n04133789
n02500267
n04209133
n03344393
n03494278
n02977058
n03710637
n01622779
n09421951
n02790996
n02089078
n02256656
n01531178
n04479046
n04141327
n03000134
n02504013
n03627232
n02114712
n03325584
n03773504
n04004767
n04266014
n02977058
n02125311
n02281406
n03291819
n01675722
n02138441
n03804744
n03000684
n02114367
n03187595
n01943899
n02125311
n02113624
n02823428
n02233338
n03110669
n02500267
n03594734
n03347037
n01990800
n02074367
n02396427
n03954731
n02687172
n02883205
n03127925
n02111500
n07718747
n02447366
n04286575
n02930766
n01664065
n04153751
n01687978
n02422699
n02791270
n02835271
n02504458
n01917289
n04252077
n04548280
n03089624
n07590611
n07754684
n01739381
n04483307
n01914609
n02087046
n03697007
n04039381
n01820546
n04355338
n02100735
n03032252
n02091467
n01728572
n02002556
n03874599
n02859443
n04146614
n03534580
n04532106
n01981276
n03814639
n01689811
n06359193
n01675722
n03888605
n07714990
n04476259
n02536864
n02492035
n04265275
n02948072
n03804744
n04380533
n01518878
n04005630
n07590611
n04417672
n03709823
n02105412
n02363005
n01494475
n03680355
n02951358
n04597913
n03998194
n01855032
n02018795
n03271574
n02167151
n02009912
n03825788
n04482393
n01774750
n02500267
n01514859
n03908618
n03761084
n03633091
n02096177
n03729826
n07717556
n03670208
n01773797
n04554684
n01697457
n03691459
n02138441
n03764736
n02123394
n04192698
n04120489
n07615774
n03929855
n02494079
n01669191
n01498041
n03250847
n03924679
n02356798
n02823750
n03447721
n02058221
n07930864
n01530575
n04428191
n04372370
n03840681
n02027492
n01498041
n07718472
n03954731
n04099969
n03954731
n01770081
n03445924
n03045698
n03527444
n02840245
n04201297
n01735189
n01986214
n02002724
n02113978
n02177972
n03908714
n03888257
n02100236
n02437312
n02236044
n07871810
n03775071
n03947888
n03933933
n02066245
n02128385
n01491361
n02493509
n07717556
n02865351
n03187595
n02666196
n01917289
n01770081
n02788148
n03661043
n02481823
n02085620
n02799071
n03590841
n01749939
n01614925
n02950826
n02088632
n01498041
n02105162
n01737021
n02690373
n03584254
n02791124
n02088238
n04328186
n01582220
n02231487
n03717622
n01751748
n03721384
n02108422
n01669191
n02980441
n04243546
n03982430
n02422106
n03014705
n04371774
n04125021
n02090622
n01930112
n04552348
n03764736
n01582220
n02056570
n02089973
n09399592
n03450230
n03770679
n03445924
n02007558
n02268443
n02396427
n01440764
n03062245
n02134418
n03594734
n02094433
n04264628
n02992211
n02093428
n02100735
n04367480
n03764736
n03041632
n01443537
n03476684
n09229709
n04355338
n02128385
n04550184
n01806567
n02098413
n04086273
n02090379
n03958227
n02091467
n02108000
n03658185
n02843684
n01440764
n02981792
n07892512
n03297495
n03692522
n03937543
n03691459
n03240683
n02977058
n07730033
n04591713
n11939491
n03902125
n02783161
n04355338
n02281406
n03538406
n01608432
n03935335
n01983481
n02730930
n01968897
n03769881
n04493381
n02112018
n02391049
n04389033
n03775546
n02172182
n09399592
n02093991
n01806143
n02226429
n01669191
n04125021
n02113712
n02860847
n02074367
n02447366
n02783161
n02454379
n01984695
n03721384
n03633091
n03376595
n02120505
n02105505
n04517823
n03372029
n03527444
n03786901
n03478589
n02066245
n07892512
n01491361
n02108089
n03325584
n03717622
n03773504
n01582220
n03676483
n04540053
n07248320
n04118538
n02095314
n12267677
n03602883
n02815834
n03379051
n02172182
n02107142
n06874185
n01776313
n07714571
n01775062
n03452741
n03916031
n04118538
n01580077
n02497673
n01518878
n03673027
n02101388
n03187595
n04350905
n02408429
n03417042
n02514041
n02116738
n03476684
n02497673
n04285008
n03126707
n03544143
n04147183
n03481172
n04041544
n02268443
n09472597
n02085782
n03400231
n03954731
n04074963
n03782006
n02281787
n04023962
n04008634
n07875152
n07716906
n02109525
n03995372
n02096177
n01981276
n03884397
n02509815
n03529860
n03584829
n02268853
n04141975
n04599235
n03759954
n02894605
n02454379
n03014705
n02786058
n04505470
n02172182
n02979186
n02091635
n02007558
n02797295
n02817516
n02233338
n04099969
n03250847
n02950826
n02124075
n01484850
n02096294
n02965783
n01943899
n02028035
n04486054
n02417914
n03445777
n04009552
n02125311
n03770439
n02018207
n02219486
n04111531
n09288635
n03825788
n03223299
n04606251
n02396427
n07717410
n02111277
n04515003
n02643566
n03733131
n02093428
n01807496
n02480855
n03527444
n02099849
n04482393
n02361337
n02107574
n04201297
n03633091
n04033995
n02641379
n02790996
n02190166
n03127747
n02483362
n03126707
n03590841
n07717410
n04033901
n02676566
n07875152
n02100236
n04584207
n01737021
n02493509
n02105251
n03930630
n03873416
n02396427
n02493793
n03250847
n02088466
n02814533
n02108000
n01443537
n02988304
n01944390
n04285008
n04356056
n01930112
n03630383
n02281406
n02346627
n04493381
n03709823
n01755581
n02018795
n07802026
n11939491
n07836838
n04429376
n03967562
n02113023
n03724870
n03792972
n01753488
n07875152
n07753592
n04357314
n03642806
n04131690
n04258138
n01667114
n02782093
n02493509
n04465501
n07583066
n02256656
n01532829
n01872401
n07684084
n03763968
n04579145
n03492542
n04417672
n04350905
n04069434
n03866082
n04311174
n01756291
n02797295
n03642806
n03676483
n03697007
n02087046
n03207941
n04201297
n02074367
n01608432
n02111500
n03633091
n02804610
n04562935
n02093859
n03935335
n02051845
n01990800
n02799071
n04228054
n02100877
n01755581
n02129604
n02727426
n01860187
n04326547
n03776460
n02206856
n02093256
n01968897
n02326432
n03770679
n02509815
n02978881
n03018349
n03394916
n02977058
n03891332
n01665541
n04141327
n02233338
n02092339
n03388549
n04548362
n04296562
n04067472
n03014705
n02747177
n02441942
n04081281
n03290653
n02066245
n01983481
n02085936
n01518878
n02085620
n04346328
n01601694
n01532829
n03992509
n01694178
n02437616
n04612504
n02666196
n03950228
n02093754
n02123597
n01817953
n02190166
n04067472
n03933933
n02398521
n02097130
n03444034
n03792972
n04418357
n01871265
n03208938
n01768244
n02174001
n02219486
n01774384
n07742313
n04355933
n02129165
n07742313
n01697457
n04310018
n02669723
n04367480
n01592084
n02105251
n02113799
n07565083
n02091032
n02011460
n03773504
n02445715
n04275548
n02112018
n01632458
n02486261
n07714990
n02106550
n03478589
n02963159
n03743016
n04146614
n03970156
n03874293
n07749582
n06874185
n01950731
n01498041
n04090263
n02077923
n02106662
n02786058
n04591157
n03481172
n03924679
n02500267
n04258138
n04540053
n03160309
n02087394
n03494278
n04325704
n01669191
n02108551
n01980166
n03314780
n02808440
n04447861
n02281787
n02095889
n02489166
n02114367
n04344873
n02058221
n02444819
n02988304
n03495258
n02002556
n03874293
n02085782
n01695060
n02870880
n01608432
n02948072
n04067472
n02098286
n02093428
n04009552
n12267677
n02085782
n03376595
n04335435
n03891332
n03733281
n02264363
n02132136
n04263257
n01698640
n01753488
n07714990
n03417042
n03259280
n01737021
n04118538
n01773797
n03124170
n03874293
n09421951
n02747177
n09288635
n04136333
n03956157
n02093256
n03729826
n03538406
n01774384
n04355338
n02105251
n02403003
n01697457
n01828970
n02892767
n02018207
n02134084
n03733805
n07930864
n02097474
n04507155
n04344873
n02950826
n03721384
n01943899
n07920052
n02319095
n04149813
n02364673
n01742172
n04428191
n03450230
n09399592
n01689811
n01978287
n07716358
n02074367
n04557648
n03062245
n02105251
n07716906
n03623198
n03125729
n03876231
n04509417
n03041632
n04347754
n06359193
n04118538
n01806143
n07749582
n02105855
n13052670
n02094114
n03775071
n01873310
n03788195
n04311004
n03018349
n03089624
n02087046
n03379051
n04493381
n07714990
n03895866
n15075141
n07684084
n01755581
n07715103
n04285008
n03476991
n04049303
n03496892
n03041632
n02403003
n03832673
n04131690
n04479046
n04479046
n02259212
n01734418
n02002556
n03179701
n03992509
n07932039
n04467665
n02099712
n04456115
n03690938
n04367480
n01729322
n03961711
n03841143
n02963159
n03476991
n04074963
n02077923
n01532829
n02865351
n02966687
n01694178
n03017168
n04429376
n03935335
n09246464
n04004767
n03208938
n04111531
n04389033
n07760859
n04326547
n04209239
n07697537
n03785016
n04367480
n04037443
n04311174
n02814533
n02113799
n02825657
n02672831
n02114855
n02090622
n09399592
n04482393
n01910747
n04417672
n04162706
n02098413
n07717556
n01580077
n02092002
n03014705
n04370456
n02835271
n03047690
n03944341
n07613480
n02361337
n02356798
n02835271
n02011460
n02096051
n01843065
n03498962
n07583066
n07734744
n04277352
n02088632
n09835506
n04141327
n01820546
n03218198
n03825788
n04310018
n02099849
n02025239
n07753275
n03876231
n02099267
n03794056
n07590611
n01740131
n02091032
n04200800
n01770081
n02869837
n03379051
n01833805
n03929855
n02749479
n01644900
n03445777
n02110627
n01630670
n04273569
n04483307
n02138441
n07892512
n01983481
n02108422
n02948072
n02094258
n03141823
n01632458
n04517823
n04380533
n09472597
n02165456
n01930112
n03018349
n02268853
n01770081
n04141975
n03998194
n03384352
n04147183
n03045698
n03791053
n03944341
n02536864
n01829413
n02088466
n01694178
n02106382
n01748264
n03759954
n12985857
n04254680
n04465501
n02795169
n02096177
n02444819
n01558993
n02115641
n03445924
n02701002
n06359193
n01773549
n03637318
n02437312
n04332243
n02865351
n02088632
n04067472
n02092002
n03956157
n04326547
n02786058
n01784675
n01847000
n04146614
n03666591
n04310018
n01914609
n07695742
n03404251
n03891251
n06874185
n03062245
n03355925
n12267677
n04254120
n07714990
n02233338
n02804414
n03062245
n02018795
n07720875
n03075370
n03530642
n01980166
n01667114
n04553703
n09468604
n06794110
n04367480
n02963159
n03710193
n01980166
n03000134
n03938244
n02231487
n02493509
n03447721
n07583066
n09472597
n03877845
n04147183
n04229816
n12998815
n03877472
n07718472
n03063599
n01665541
n02111889
n06596364
n02094433
n01817953
n02091635
n01755581
n01740131
n01592084
n03673027
n03467068
n03924679
n04467665
n03733805
n01833805
n03089624
n02091635
n02489166
n02112350
n04192698
n02102040
n02823428
n04074963
n01872401
n04579145
n03788365
n04086273
n02009229
n07753113
n02504458
n02002724
n02097474
n07754684
n03134739
n02113978
n02403003
n03998194
n01688243
n03891332
n04133789
n02111500
n02916936
n07248320
n04404412
n04209239
n07590611
n03673027
n04008634
n03272010
n13040303
n09399592
n02007558
n02488291
n07716906
n04009552
n02111889
n03658185
n01980166
n04367480
n02892201
n04423845
n03131574
n04041544
n04266014
n03825788
n02033041
n02002724
n01871265
n04099969
n02321529
n02666196
n01698640
n03709823
n02356798
n03089624
n03873416
n02097130
n02108089
n04258138
n01667778
n04456115
n03492542
n02363005
n01871265
n01950731
n04153751
n01984695
n01614925
n02110958
n01824575
n01981276
n15075141
n03814906
n03874599
n04118776
n01675722
n02939185
n03742115
n01697457
n02326432
n02090622
n04532106
n03983396
n02415577
n02412080
n02102480
n03459775
n04380533
n04254777
n01631663
n03404251
n07871810
n02123045
n02226429
n01871265
n01820546
n01688243
n02825657
n01689811
n02095570
n04019541
n03777754
n01748264
n02123045
n02129604
n02105056
n02125311
n02089973
n03649909
n04540053
n03670208
n02097209
n01819313
n03110669
n02124075
n02437616
n01843383
n03935335
n02782093
n07753113
n03791053
n02111129
n07614500
n03761084
n03676483
n01978455
n03857828
n02488702
n02165456
n07734744
n03991062
n02860847
n03954731
n03045698
n03944341
n02111129
n02092002
n03891251
n02130308
n01945685
n03188531
n02457408
n03085013
n03796401
n13052670
n02398521
n03743016
n02229544
n03160309
n02276258
n02276258
n02504013
n02281406
n02877765
n03649909
n07697313
n02058221
n02077923
n03394916
n02256656
n04328186
n02009229
n03476684
n03388549
n07714571
n09193705
n02396427
n01806567
n02090379
n02100583
n04483307
n02120079
n01914609
n01630670
n04259630
n07695742
n02106030
n02883205
n02398521
n03995372
n07590611
n04099969
n02110063
n03785016
n02669723
n03125729
n04442312
n07920052
n02497673
n02454379
n02091831
n02454379
n02088632
n02115641
n03761084
n02606052
n02264363
n01843065
n03623198
n03445777
n02481823
n01773157
n03109150
n04458633
n02165456
n02190166
n04111531
n03197337
n04542943
n04507155
n02089867
n02342885
n02099601
n03787032
n03483316
n02454379
n04041544
n02086079
n04485082
n07831146
n02106030
n03445777
n02398521
n02666196
n02009912
n01534433
n03126707
n12057211
n04355933
n02025239
n04336792
n02906734
n02002556
n04487394
n03291819
n01614925
n04235860
n04270147
n03291819
n03837869
n04192698
n04120489
n02930766
n02128385
n02837789
n02105505
n01704323
n02481823
n03384352
n02167151
n07753592
n07614500
n02134084
n04515003
n01729322
n04033901
n02134418
n01514668
n03942813
n02101556
n03642806
n03733131
n03290653
n02174001
n01784675
n03777754
n03942813
n02802426
n04049303
n03535780
n02492035
n04070727
n03075370
n04372370
n07860988
n04367480
n03786901
n04562935
n07590611
n02102973
n07248320
n03095699
n04009552
n07614500
n09288635
n03724870
n04258138
n01698640
n07753113
n04263257
n01755581
n04447861
n02666196
n03733281
n02051845
n02058221
n03958227
n02403003
n02097474
n02099429
n02484975
n07836838
n10565667
n07720875
n02486261
n02321529
n01755581
n03100240
n03063599
n01664065
n02783161
n03803284
n03110669
n02086240
n02487347
n02097209
n04310018
n02012849
n04120489
n03482405
n02447366
n01749939
n03478589
n02963159
n04428191
n04285008
n01530575
n02111129
n03109150
n07697313
n02802426
n03690938
n01914609
n02481823
n02259212
n03538406
n15075141
n03649909
n04483307
n04613696
n10565667
n02488702
n02094258
n02096585
n02127052
n02391049
n01734418
n09332890
n03379051
n02133161
n12144580
n02099429
n04447861
n04120489
n07860988
n02129604
n03065424
n02095314
n04154565
n02655020
n02165105
n04275548
n02415577
n02786058
n02091467
n03444034
n01498041
n07590611
n04554684
n02109047
n04552348
n03814639
n03125729
n03888257
n03950228
n02089973
n03967562
n02749479
n03729826
n02018207
n04487081
n03017168
n03976657
n03938244
n02769748
n07836838
n02002724
n03100240
n03598930
n04479046
n01644373
n02708093
n02134418
n13054560
n09332890
n03133878
n04554684
n03041632
n02869837
n03014705
n02510455
n03954731
n02788148
n02859443
n02640242
n02087046
n03891332
n02124075
n03476684
n04270147
n04542943
n03916031
n02051845
n02104029
n04270147
n02422106
n03692522
n02115641
n02447366
n03710721
n02112018
n03000134
n02105162
n02097047
n02356798
n04037443
n02071294
n07892512
n03924679
n01687978
n02098286
n03345487
n04254777
n03680355
n02963159
n01582220
n04090263
n03761084
n04604644
n02097209
n03109150
n02088632
n03937543
n01943899
n02093647
n02093428
n03461385
n04270147
n04389033
n03534580
n09468604
n02107312
n01797886
n02090379
n02871525
n01667778
n01773549
n01755581
n02093991
n04350905
n03995372
n02280649
n03933933
n02226429
n03207941
n09399592
n02106030
n03590841
n02966193
n03787032
n02115913
n04099969
n04273569
n02037110
n01917289
n04254777
n03888257
n02807133
n04589890
n02091032
n01685808
n07714571
n03777568
n03379051
n03028079
n04275548
n02395406
n04040759
n02109961
n01872401
n03825788
n02112706
n03692522
n02086910
n02321529
n03131574
n04311004
n03929855
n01514859
n03804744
n03417042
n02794156
n07730033
n04120489
n02342885
n04041544
n04366367
n02116738
n02992211
n02276258
n02895154
n01984695
n03661043
n03207941
n02025239
n02123045
n02117135
n02107908
n02815834
n04355933
n03598930
n07742313
n03876231
n02259212
n01775062
n03617480
n03840681
n03902125
n02930766
n03633091
n04404412
n03825788
n03337140
n02018795
n02447366
n07613480
n02493793
n01694178
n12620546
n06874185
n02443484
n04209133
n04515003
n04540053
n01796340
n03623198
n02108551
n03763968
n02410509
n11879895
n03832673
n03930630
n02490219
n03937543
n02111889
n02096437
n04154565
n02971356
n02865351
n03776460
n02777292
n02190166
n04612504
n04081281
n02747177
n03777754
n02445715
n03857828
n11939491
n01981276
n04041544
n04458633
n03447721
n02106030
n02834397
n02097474
n01877812
n02085936
n02096051
n03272562
n03793489
n02099849
n03649909
n01882714
n02860847
n04039381
n04264628
n02484975
n02167151
n02074367
n01773549
n04367480
n07718747
n02841315
n02910353
n02106550
n03602883
n04153751
n03992509
n09468604
n02129604
n09229709
n02056570
n03594734
n02111277
n07590611
n02704792
n03868863
n02115641
n02444819
n02808304
n04355338
n02281787
n02138441
n03814906
n04409515
n01739381
n03495258
n03627232
n02085620
n02190166
n03355925
n03188531
n02100735
n03961711
n02823428
n07860988
n01740131
n09229709
n03777568
n03908618
n02108551
n02177972
n09288635
n01693334
n02106382
n04026417
n03388183
n02002724
n03208938
n04517823
n04336792
n03658185
n02097474
n02690373
n13044778
n02281787
n02641379
n02130308
n02704792
n01582220
n02027492
n04525305
n02119789
n13054560
n03724870
n02488291
n07697313
n02132136
n04336792
n03983396
n03944341
n01774384
n02027492
n02091134
n07860988
n02106550
n04357314
n03662601
n03868242
n03804744
n02112350
n01774750
n02088238
n07718472
n01742172
n02992529
n04404412
n02089867
n03345487
n02437312
n02930766
n13133613
n02206856
n02486410
n03843555
n04476259
n02094433
n01843065
n07714571
n02389026
n04099969
n01843065
n03180011
n09472597
n03670208
n01751748
n01807496
n02229544
n02101006
n03188531
n03290653
n02403003
n02699494
n04266014
n02708093
n04399382
n02804414
n07747607
n02749479
n03424325
n04522168
n01843065
n01682714
n02138441
n11879895
n04355338
n03662601
n03658185
n03483316
n07718747
n03476684
n02110958
n04040759
n03814906
n04461696
n02492660
n04044716
n04596742
n01770081
n01806143
n04589890
n03016953
n02493793
n01983481
n01484850
n02981792
n03710637
n02104029
n01498041
n03976657
n04009552
n02790996
n04235860
n04447861
n01910747
n03481172
n04090263
n03929660
n07248320
n03271574
n03661043
n03954731
n03016953
n07614500
n03920288
n02091244
n02676566
n13044778
n03843555
n07871810
n03832673
n04252225
n02174001
n03832673
n10148035
n02280649
n09229709
n06874185
n02823428
n02692877
n02823428
n07753592
n02782093
n03459775
n09288635
n04204347
n02483708
n04461696
n02791124
n03710193
n12768682
n04435653
n04204347
n02669723
n03657121
n01518878
n04026417
n02319095
n03791053
n02110063
n02281787
n03197337
n04152593
n02025239
n03633091
n02259212
n02423022
n03891332
n03874293
n02071294
n01773797
n07711569
n02007558
n13133613
n02017213
n04270147
n02113624
n02916936
n01675722
n07614500
n03673027
n02109961
n02950826
n02966193
n01685808
n02804610
n02095314
n03929855
n10565667
n02013706
n02123394
n03590841
n07711569
n02113799
n07860988
n04367480
n07873807
n02096585
n02002724
n02134418
n02398521
n04033901
n02110063
n09468604
n01990800
n04423845
n02177972
n04447861
n02096585
n02442845
n04265275
n04317175
n01807496
n04366367
n03814906
n12998815
n03482405
n03884397
n03673027
n03673027
n03793489
n02443114
n02988304
n02422106
n04326547
n02992529
n01860187
n03895866
n03180011
n04118776
n03461385
n04275548
n15075141
n03761084
n01944390
n04317175
n04152593
n02927161
n03956157
n02085620
n02727426
n01667114
n04493381
n01729322
n04081281
n01484850
n03124043
n02841315
n02108089
n03345487
n02892201
n07875152
n02093991
n03697007
n02119789
n01739381
n02319095
n02361337
n01883070
n02492035
n02107312
n07715103
n04264628
n01843065
n07860988
n01795545
n01592084
n03676483
n04254120
n03223299
n03220513
n02108915
n03873416
n02128925
n02389026
n01698640
n15075141
n03028079
n01644900
n01694178
n03761084
n03873416
n03710637
n03924679
n03627232
n04542943
n03095699
n02100236
n01784675
n01744401
n04153751
n03770439
n02107142
n03297495
n07753275
n04008634
n07615774
n04550184
n02110806
n04404412
n03976467
n07715103
n04525038
n02776631
n02099267
n02095314
n03028079
n02100236
n03930630
n03188531
n02094258
n04554684
n03887697
n02116738
n02007558
n02102973
n02130308
n04328186
n04141076
n03220513
n02444819
n04458633
n01735189
n02701002
n02071294
n01498041
n04070727
n04423845
n02089973
n04141975
n01729322
n01824575
n04251144
n01692333
n01484850
n04208210
n01667114
n04458633
n04141076
n02058221
n02088466
n07760859
n04560804
n02099267
n03000134
n02481823
n02788148
n02097047
n04487081
n04286575
n02233338
n04344873
n02490219
n02123159
n02120079
n02114855
n02088238
n01775062
n04136333
n03344393
n03535780
n02074367
n03782006
n02487347
n02134418
n02500267
n03208938
n04162706
n02410509
n02091635
n04417672
n01537544
n02951358
n02116738
n03594734
n03775071
n03594945
n04532670
n01695060
n02277742
n02123597
n02883205
n07932039
n02497673
n07754684
n02112018
n03538406
n03895866
n01494475
n02177972
n03197337
n02105641
n02992529
n04070727
n02109525
n02125311
n04456115
n02980441
n03841143
n03938244
n03661043
n01756291
n03794056
n02018207
n03126707
n01614925
n03992509
n03127925
n02115913
n03773504
n02776631
n09472597
n02177972
n03532672
n04476259
n04517823
n13052670
n07753275
n01685808
n04120489
n02120079
n02123159
n02087046
n03598930
n02487347
n03065424
n04517823
n02797295
n02804414
n02843684
n02018795
n03976657
n04005630
n02699494
n03814906
n09332890
n02493793
n04442312
n02100877
n04532670
n03047690
n02077923
n03733281
n04266014
n09835506
n02492660
n04330267
n07716358
n01601694
n04579432
n04380533
n01749939
n03444034
n03400231
n03584254
n03710721
n03895866
n04591713
n03903868
n02088364
n04141975
n01774384
n02112018
n04485082
n04259630
n03041632
n02097130
n03775546
n02093991
n01742172
n09193705
n01984695
n01924916
n02190166
n03706229
n13037406
n04604644
n03602883
n02504458
n03467068
n04536866
n04398044
n01986214
n03777754
n02066245
n02346627
n04370456
n02108551
n04204238
n04371430
n03792972
n02441942
n02096294
n02699494
n04589890
n02085936
n02105056
n02415577
n07734744
n02098286
n02113186
n02096294
n02871525
n03873416
n01784675
n02788148
n02051845
n07930864
n01692333
n02111889
n03662601
n02097474
n02165456
n03595614
n03452741
n04606251
n03796401
n03452741
n07693725
n02112018
n03388549
n04562935
n13133613
n04461696
n01796340
n04270147
n03187595
n03666591
n04120489
n04522168
n02111500
n03976467
n01729322
n02364673
n04356056
n02797295
n02114855
n02749479
n04357314
n07565083
n02676566
n02088466
n02823750
n02093256
n02256656
n02119022
n02883205
n03584254
n03775071
n01682714
n03124170
n04201297
n04044716
n01629819
n12998815
n07584110
n04532106
n03825788
n04501370
n01560419
n03065424
n02106030
n04229816
n03623198
n02280649
n06785654
n02342885
n02488291
n02606052
n03271574
n04070727
n03717622
n02447366
n03065424
n03527444
n01943899
n02095889
n02132136
n04204347
n03026506
n01749939
n03742115
n02105162
n03733281
n02006656
n04552348
n02493793
n02992211
n02089867
n04111531
n04590129
n03982430
n03495258
n02640242
n02099429
n02132136
n02444819
n02056570
n03494278
n01773157
n02137549
n01534433
n02018795
n03630383
n02281787
n04120489
n02104029
n02098413
n02488702
n03379051
n02807133
n04591713
n02110185
n04209239
n01558993
n04325704
n04264628
n03291819
n02793495
n02133161
n03908714
n03584254
n02091831
n02099429
n09835506
n01798484
n03041632
n02808304
n04136333
n09428293
n04465501
n01688243
n02093428
n02129165
n07749582
n03197337
n04392985
n04367480
n02484975
n02607072
n03089624
n04116512
n04286575
n02233338
n04118538
n04254777
n02410509
n02091244
n03016953
n03026506
n02113978
n02091032
n02096585
n04179913
n01775062
n03903868
n04277352
n02841315
n04597913
n01614925
n04067472
n03876231
n02095889
n02100877
n03444034
n01484850
n02490219
n03272010
n12057211
n03980874
n02097474
n04270147
n04429376
n04111531
n09399592
n04005630
n03595614
n02123045
n03657121
n07892512
n03840681
n04296562
n02807133
n01806567
n04258138
n02114367
n01675722
n02794156
n01698640
n04296562
n07717556
n03476991
n04005630
n02099712
n02099429
n03721384
n04277352
n03127925
n02256656
n03201208
n02088466
n02086079
n01632458
n04376876
n03998194
n01440764
n02704792
n01855032
n03095699
n04355933
n04465501
n03841143
n04501370
n01558993
n03042490
n01950731
n03935335
n04584207
n01984695
n02747177
n03775546
n04525038
n01632777
n04485082
n04116512
n02486410
n02096585
n02096051
n02110627
n03272010
n03775546
n02123597
n02992529
n01632458
n02089078
n03954731
n02437616
n02120505
n04507155
n02114712
n03532672
n03983396
n02108000
n01514859
n07802026
n02951358
n01882714
n04505470
n02231487
n03388043
n04482393
n02112018
n04008634
n02606052
n04273569
n03594734
n04532670
n01855032
n02342885
n03950228
n02093859
n02841315
n02025239
n03930630
n01797886
n03240683
n01775062
n02321529
n02342885
n02108551
n03216828
n02281406
n03710721
n04201297
n01950731
n03216828
n07880968
n04208210
n02514041
n02123597
n04517823
n04553703
n03482405
n07697313
n03690938
n02444819
n04049303
n03085013
n01843065
n03709823
n02117135
n02787622
n07579787
n02099601
n04229816
n03776460
n01644900
n07579787
n03733281
n09472597
n01797886
n07802026
n01806567
n02108551
n02093754
n02132136
n04254120
n03877472
n02480855
n04285008
n15075141
n04325704
n09332890
n03947888
n01828970
n02106030
n04501370
n07730033
n02113186
n03026506
n04266014
n11939491
n04270147
n03777754
n04522168
n01860187
n02443484
n02835271
n04125021
n02794156
n06596364
n04265275
n04136333
n10565667
n04483307
n02277742
n02094433
n07716906
n01514859
n02397096
n02102318
n04442312
n03680355
n02086240
n02174001
n02277742
n03832673
n01768244
n01739381
n02361337
n02607072
n01843383
n02091467
n02090721
n01756291
n02099429
n01806567
n02966687
n02094258
n01986214
n07697537
n02909870
n03967562
n04296562
n03388043
n04482393
n09421951
n07614500
n02865351
n02089973
n04557648
n01537544
n01819313
n03929855
n04136333
n03977966
n04099969
n01675722
n03832673
n02643566
n07749582
n04275548
n04005630
n02074367
n03623198
n03495258
n04296562
n02437312
n02113799
n03874599
n02454379
n02877765
n02109525
n04270147
n01729977
n02950826
n02110063
n03216828
n01484850
n03062245
n02128385
n04228054
n03179701
n01796340
n01694178
n02088094
n03942813
n02869837
n03770439
n02097658
n03047690
n03742115
n03724870
n02966687
n02098286
n01687978
n02100236
n01616318
n04442312
n02396427
n03998194
n01773549
n07747607
n01944390
n03891332
n03045698
n03877472
n03207941
n02494079
n01819313
n02093754
n02088238
n02168699
n04515003
n01675722
n02018207
n02690373
n03777568
n03026506
n02342885
n02102040
n07583066
n03961711
n02916936
n03958227
n01698640
n07714990
n02483708
n03680355
n04141975
n02085936
n07930864
n03691459
n02892767
n03770679
n03450230
n02165456
n04560804
n01614925
n04458633
n02500267
n02190166
n04380533
n02950826
n07860988
n02346627
n03814906
n02494079
n01817953
n09421951
n03041632
n04371430
n04371430
n03743016
n01630670
n04074963
n04326547
n02894605
n02086910
n03935335
n04461696
n03476991
n03697007
n01818515
n04263257
n02088238
n07697313
n02110806
n07747607
n02108422
n02641379
n04507155
n02124075
n12985857
n02342885
n07697537
n03742115
n12998815
n04591713
n03450230
n02110185
n02091831
n03424325
n01795545
n04507155
n01616318
n01704323
n03887697
n02128925
n01824575
n02099712
n03498962
n04273569
n04090263
n01775062
n03970156
n02480855
n02730930
n02326432
n04355933
n03355925
n01734418
n02107908
n01978287
n03874599
n03478589
n03788365
n02325366
n02445715
n03180011
n03792782
n01667778
n02490219
n01882714
n04005630
n04118538
n03775071
n03792782
n02123045
n02264363
n02776631
n01773157
n01614925
n04548362
n02009912
n02487347
n03272562
n01685808
n02835271
n02110063
n04153751
n02123045
n02417914
n04208210
n03476684
n01768244
n07697313
n02100583
n02504013
n04040759
n04067472
n01798484
n07248320
n02094258
n02483708
n04557648
n01828970
n02172182
n03658185
n02493509
n03991062
n03494278
n03291819
n02410509
n03733805
n04579432
n03124043
n02966193
n02190166
n02526121
n07753592
n07753592
n07768694
n09246464
n07711569
n02018795
n02105056
n01669191
n02268853
n02488291
n02793495
n02101556
n04476259
n07584110
n04542943
n03670208
n03929855
n04204347
n02094433
n09472597
n04479046
n01667778
n03459775
n02056570
n12620546
n04286575
n02795169
n04209239
n02101556
n04532670
n02009229
n04584207
n02795169
n02112350
n01667778
n02939185
n03908618
n01753488
n02841315
n03388183
n03218198
n02776631
n02363005
n02130308
n06596364
n02814860
n02110063
n02117135
n07684084
n04254680
n03109150
n02408429
n04389033
n04483307
n01797886
n02095889
n03958227
n04548280
n02410509
n03837869
n03720891
n04435653
n01498041
n02749479
n07718747
n04461696
n03388043
n02133161
n02165105
n02817516
n04532670
n02013706
n01682714
n02102177
n03290653
n04086273
n02090379
n01797886
n01440764
n01818515
n04562935
n02782093
n03793489
n11879895
n02814860
n02669723
n02974003
n07693725
n02104029
n03372029
n03045698
n03100240
n02127052
n07579787
n03874599
n02504458
n02132136
n03692522
n04517823
n03223299
n04418357
n02110806
n01728572
n04259630
n03930313
n02321529
n02105251
n04317175
n01491361
n07753275
n02028035
n04476259
n03742115
n03032252
n02328150
n04591713
n02088094
n02190166
n04067472
n03134739
n02102318
n03026506
n04371430
n03535780
n01614925
n02111889
n03977966
n03131574
n02071294
n02110627
n02109961
n02412080
n01580077
n06359193
n04209133
n03775546
n03630383
n01753488
n02672831
n02092339
n01644900
n07730033
n03124043
n04065272
n03697007
n01616318
n01558993
n02107683
n04044716
n03877472
n02786058
n02087046
n07717410
n04019541
n01622779
n03337140
n02978881
n04131690
n03887697
n01582220
n02536864
n04065272
n02977058
n03825788
n01687978
n01756291
n04486054
n01737021
n01968897
n03047690
n02106166
n02259212
n02326432
n04476259
n02115913
n02006656
n04254120
n02871525
n03220513
n03769881
n03692522
n02730930
n04235860
n02112018
n02107142
n02834397
n04008634
n02100583
n01729977
n07714571
n01629819
n02028035
n03724870
n04355933
n01614925
n07714571
n07584110
n02870880
n13054560
n02727426
n03877472
n04263257
n04127249
n03630383
n01978287
n13044778
n02509815
n04251144
n04141327
n12620546
n03388043
n02951358
n02412080
n03110669
n03937543
n04044716
n02101388
n07716358
n04462240
n03933933
n02840245
n03485407
n03461385
n02119789
n01944390
n01924916
n04127249
n04209239
n03908618
n03133878
n03992509
n02410509
n03796401
n01798484
n04557648
n02088632
n03000247
n02971356
n03840681
n01776313
n01773157
n04366367
n03325584
n03873416
n01807496
n02790996
n09421951
n07734744
n03000247
n04597913
n04332243
n02408429
n01677366
n02229544
n03891251
n02110063
n03532672
n03937543
n01558993
n04540053
n12057211
n03388183
n02841315
n09399592
n03933933
n02823428
n02102040
n02690373
n02895154
n02085936
n04458633
n02415577
n04579432
n04557648
n03630383
n02009912
n02113978
n03000247
n09246464
n03498962
n02992211
n03249569
n03930313
n01632458
n02086910
n02097209
n03032252
n01496331
n04118538
n03272010
n02095314
n02930766
n02112137
n03697007
n04127249
n04141076
n03376595
n07613480
n04023962
n03958227
n04515003
n04596742
n02108000
n03874599
n01776313
n02088238
n01950731
n02086910
n03384352
n02093859
n02088632
n02749479
n01631663
n01955084
n04275548
n02493793
n03690938
n02802426
n02110341
n02906734
n02124075
n03991062
n03584254
n03444034
n02979186
n03888605
n01534433
n02129165
n01614925
n02397096
n12985857
n02123159
n01984695
n02097047
n01616318
n02117135
n01682714
n03814906
n02105251
n01877812
n04367480
n01770081
n02099849
n02328150
n07590611
n07734744
n03673027
n02129165
n02111500
n04090263
n02129604
n02894605
n02128757
n04238763
n03720891
n03793489
n03424325
n07716358
n02493509
n02099849
n02091244
n02097658
n02138441
n03047690
n02093647
n02108915
n04263257
n02129165
n04335435
n07760859
n02091831
n03445924
n02280649
n02640242
n04613696
n03527444
n01798484
n03995372
n01728572
n04004767
n02099267
n07920052
n03709823
n02095570
n02018795
n03642806
n04074963
n04141327
n01917289
n04131690
n03250847
n02104365
n03602883
n02093428
n03109150
n03240683
n02086079
n02114712
n02093256
n02102040
n03495258
n04584207
n02870880
n02916936
n07875152
n07583066
n02730930
n04019541
n04254120
n02666196
n03141823
n03063689
n06596364
n02906734
n03445777
n02971356
n03891332
n07892512
n02442845
n03527444
n02667093
n01806143
n03902125
n02457408
n01693334
n02799071
n02814533
n06874185
n02088466
n03825788
n01484850
n03355925
n02095889
n02086646
n03942813
n03425413
n04550184
n02817516
n04049303
n04483307
n02097209
n03388549
n02815834
n02487347
n02074367
n02113186
n02536864
n02114855
n07697313
n03938244
n02492035
n02085620
n02085620
n03223299
n04273569
n03496892
n03866082
n03065424
n03877845
n02871525
n03404251
n04462240
n02113799
n02093859
n03742115
n02123045
n04487081
n02107312
n03938244
n02966687
n02342885
n03781244
n02493509
n02134084
n02749479
n07749582
n12144580
n02114548
n13052670
n07753113
n03777754
n07615774
n02483708
n01784675
n01978287
n02536864
n02443484
n03877472
n04074963
n01632777
n02815834
n01669191
n02104029
n02093859
n01883070
n01774750
n01667778
n01728920
n02219486
n03124170
n02123394
n01740131
n04228054
n01592084
n02128925
n02281787
n02093647
n01667778
n02128925
n01978287
n02130308
n03065424
n12620546
n13052670
n02480855
n03376595
n07734744
n04019541
n02536864
n04350905
n01773549
n03782006
n02111129
n01806567
n07753275
n02256656
n01984695
n04443257
n02410509
n02092339
n02115913
n01806143
n02815834
n03908618
n02279972
n03691459
n03216828
n04370456
n02676566
n03710721
n01629819
n03967562
n03482405
n04487081
n01744401
n02454379
n02007558
n03201208
n03793489
n03902125
n02672831
n03447447
n02749479
n01440764
n03538406
n03794056
n02097130
n04332243
n02814860
n02488291
n03032252
n02137549
n02281406
n01494475
n02749479
n04458633
n01847000
n03825788
n01819313
n01847000
n03908618
n03444034
n02483362
n04254680
n02123597
n03838899
n02104029
n03633091
n03775546
n01807496
n03692522
n03721384
n04208210
n02892767
n02086240
n02492660
n04049303
n04238763
n03793489
n02107574
n02364673
n02134084
n02092339
n02906734
n04371774
n02097658
n02102040
n01968897
n02090622
n03916031
n03658185
n02536864
n03697007
n03924679
n02325366
n03337140
n02999410
n01983481
n03141823
n03662601
n01729322
n02676566
n02992211
n03089624
n01632777
n02443484
n03534580
n01847000
n02102318
n01855032
n03961711
n03895866
n02892767
n01601694
n02443484
n03930313
n03062245
n02988304
n02090622
n02107908
n03290653
n04542943
n04296562
n01986214
n02233338
n02093991
n03482405
n02966193
n03786901
n02027492
n04392985
n03376595
n07714990
n02504013
n04606251
n03724870
n02093991
n03933933
n02804414
n03063599
n01698640
n03498962
n04252225
n02013706
n03026506
n03787032
n04536866
n02100583
n01582220
n02500267
n03388183
n07693725
n02033041
n03908714
n02219486
n02730930
n03710193
n02108915
n01749939
n02817516
n01729977
n02086910
n02107908
n03450230
n07565083
n02128385
n03141823
n04259630
n01914609
n07697537
n04447861
n02099849
n03126707
n01943899
n04118776
n02791124
n03763968
n03492542
n02094433
n04366367
n01614925
n02007558
n02128757
n04019541
n04612504
n02841315
n13044778
n04147183
n03933933
n02110627
n02226429
n01631663
n03676483
n02487347
n04507155
n03216828
n07718472
n02058221
n03127747
n07745940
n02102177
n02113712
n02965783
n03840681
n04310018
n01774384
n02177972
n03063599
n01697457
n03759954
n02085620
n07753113
n03393912
n02692877
n03868242
n02403003
n03249569
n03884397
n02396427
n03457902
n07718747
n02167151
n04154565
n04147183
n04118538
n03124043
n04372370
n01667114
n03998194
n03995372
n10565667
n01798484
n04591157
n03127747
n02105641
n03485407
n02102177
n04461696
n01824575
n02066245
n04317175
n02107312
n06874185
n04465501
n02939185
n04019541
n03459775
n04548280
n03047690
n04325704
n07871810
n01819313
n03782006
n02086079
n03584254
n03929660
n02492035
n03670208
n02412080
n02109525
n02397096
n01582220
n03188531
n02105641
n02033041
n03992509
n02328150
n03000684
n03126707
n07590611
n02102480
n07684084
n07590611
n09421951
n04285008
n02930766
n04604644
n03584829
n03447721
n01693334
n02910353
n03532672
n04127249
n04154565
n03014705
n13052670
n03483316
n02817516
n03759954
n03733805
n04204238
n02110341
n04147183
n02007558
n02268443
n03133878
n03255030
n02442845
n02018207
n04069434
n02667093
n03866082
n02113978
n02108000
n03832673
n04039381
n01677366
n01955084
n02113023
n04371430
n03134739
n03840681
n07714571
n01955084
n03785016
n03924679
n04443257
n03709823
n04204347
n02086079
n02361337
n04317175
n09229709
n04270147
n01518878
n02105412
n07720875
n02177972
n02098105
n03534580
n02492660
n03954731
n03874599
n04243546
n04344873
n04252077
n02009229
n01774384
n03843555
n02988304
n02422699
n03045698
n03775071
n02098105
n04099969
n01582220
n03026506
n02099849
n02814860
n02980441
n07875152
n01873310
n02117135
n02510455
n02108422
n04599235
n03450230
n02105505
n04239074
n04131690
n04033995
n03445924
n01558993
n02791270
n03770679
n02480855
n02134084
n02098286
n03478589
n01744401
n04532670
n02105412
n03874599
n04125021
n01682714
n02747177
n02992211
n03710193
n01514859
n01687978
n04418357
n02017213
n01677366
n02281406
n02138441
n03594945
n02106030
n03017168
n02105251
n04273569
n02488291
n09332890
n03873416
n02895154
n02494079
n02437616
n01692333
n04311004
n03218198
n02110185
n02256656
n07880968
n02666196
n03337140
n04399382
n04265275
n04254120
n01798484
n03602883
n03825788
n01833805
n02704792
n01734418
n03594734
n02701002
n02085620
n01582220
n03623198
n03000134
n02992211
n03691459
n02526121
n03998194
n01990800
n03933933
n02950826
n01748264
n15075141
n10565667
n15075141
n02116738
n02643566
n02837789
n04005630
n02091134
n02071294
n10148035
n02951358
n04127249
n03866082
n04579145
n04239074
n02492035
n02107683
n04239074
n04004767
n04550184
n03961711
n03201208
n03207941
n03134739
n02892767
n03394916
n02398521
n03868863
n02486410
n04487394
n03394916
n01496331
n04418357
n02168699
n02097209
n01537544
n01687978
n02799071
n04009552
n03345487
n04346328
n12057211
n03485794
n02443484
n02229544
n02840245
n02415577
n02104029
n03792782
n03888605
n02128925
n03045698
n03837869
n02749479
n04033995
n02422106
n03404251
n04208210
n02113712
n03459775
n02514041
n04371430
n01644373
n03447721
n13052670
n03492542
n04366367
n01968897
n02033041
n02114712
n02804414
n01796340
n04009552
n04597913
n03141823
n04612504
n01729322
n02492660
n03792972
n02130308
n03400231
n01632777
n03085013
n01729322
n02095570
n03970156
n04009552
n03950228
n02086646
n02108000
n03196217
n01580077
n04275548
n04599235
n01774750
n03498962
n03457902
n03930630
n04590129
n01968897
n04462240
n04554684
n02840245
n02804414
n07614500
n03482405
n02871525
n04192698
n02699494
n03388183
n04153751
n03733281
n01797886
n01689811
n02777292
n02389026
n03788365
n01514859
n02102480
n03942813
n02111129
n03017168
n02105855
n04328186
n02115641
n02093647
n02415577
n02536864
n13044778
n02113712
n02123394
n01735189
n03085013
n03127747
n02105641
n04606251
n02814533
n02980441
n02910353
n02098105
n04380533
n02098286
n02018795
n02788148
n01807496
n03908714
n03388549
n02100877
n03982430
n01986214
n04201297
n03347037
n04008634
n04557648
n03445924
n02980441
n03131574
n02948072
n01797886
n04005630
n02111889
n02325366
n01728920
n02129165
n02168699
n04465501
n01728572
n02105641
n01774384
n04418357
n02325366
n03888605
n04149813
n02281406
n03599486
n03124170
n02100583
n03956157
n03788195
n04286575
n04136333
n04344873
n03743016
n01494475
n01910747
n02787622
n04562935
n02909870
n02974003
n02111500
n03388549
n04550184
n07745940
n03673027
n02727426
n03207743
n04487081
n04009552
n02130308
n02105412
n03476991
n01632458
n02790996
n04505470
n04380533
n02108422
n07920052
n03467068
n03249569
n03633091
n02124075
n03763968
n03710637
n03100240
n02256656
n03461385
n02869837
n02948072
n03991062
n02091244
n04476259
n02099429
n02346627
n02782093
n02457408
n02009229
n02910353
n02087046
n01877812
n03787032
n02281406
n04461696
n03782006
n01924916
n03223299
n01768244
n04023962
n07717410
n03062245
n07875152
n03393912
n02364673
n03937543
n02101388
n04548280
n12620546
n03584829
n04606251
n02776631
n04443257
n02788148
n03838899
n02051845
n07768694
n03498962
n02100583
n02102177
n07716358
n04589890
n02128757
n02489166
n03417042
n03355925
n02111889
n03297495
n03180011
n03196217
n02859443
n02321529
n04443257
n03089624
n07730033
n03874293
n03594945
n02423022
n11879895
n02104029
n02916936
n02403003
n03709823
n04467665
n01833805
n02119022
n02687172
n02492660
n02877765
n02099429
n03942813
n02105855
n02168699
n07565083
n03895866
n03126707
n02346627
n02606052
n03670208
n02114548
n02109047
n03916031
n01871265
n04523525
n02690373
n03014705
n02356798
n02128385
n02133161
n03884397
n02108915
n03759954
n03630383
n02106382
n02256656
n02085936
n03197337
n03661043
n04590129
n03958227
n04525038
n02037110
n03956157
n03717622
n02326432
n03249569
n01631663
n01687978
n12144580
n02277742
n03692522
n04507155
n04389033
n04548280
n01914609
n01776313
n03125729
n02096051
n02769748
n04131690
n02669723
n04376876
n01818515
n02091244
n03207743
n03134739
n03838899
n02641379
n02666196
n02397096
n02009229
n02410509
n02276258
n03062245
n02097130
n02093754
n02123045
n04357314
n03089624
n02091244
n01685808
n02412080
n03841143
n01807496
n02098286
n02124075
n02086646
n03627232
n09468604
n01768244
n07920052
n03976467
n03534580
n03617480
n04467665
n07584110
n04040759
n02090379
n03393912
n01945685
n04482393
n01537544
n02231487
n02137549
n03045698
n04346328
n04597913
n02114367
n07613480
n02892767
n04209133
n02097047
n02100877
n02480855
n03259280
n03272010
n07684084
n03743016
n01773549
n02708093
n02939185
n03617480
n01753488
n07880968
n03218198
n02871525
n02093256
n01798484
n02417914
n02108915
n04125021
n03126707
n04285008
n02526121
n04111531
n02089078
n02927161
n02971356
n04553703
n02442845
n01945685
n01491361
n04347754
n04371774
n09428293
n04370456
n01682714
n01664065
n02085620
n02114855
n03255030
n02130308
n04200800
n02447366
n04127249
n02110185
n02793495
n03944341
n03196217
n02096294
n04133789
n07754684
n03384352
n03459775
n04579145
n01682714
n03041632
n07860988
n06596364
n04296562
n04152593
n01698640
n03792972
n04067472
n03394916
n01728920
n04597913
n04090263
n03445777
n13040303
n07717556
n01914609
n07730033
n02108089
n04597913
n02786058
n06785654
n03956157
n04584207
n03697007
n02114712
n02749479
n07248320
n03673027
n02090379
n04501370
n01917289
n04265275
n04515003
n03710721
n03495258
n04532670
n04040759
n01829413
n02840245
n02699494
n02106550
n03089624
n02105056
n02860847
n02487347
n02085782
n03888257
n03691459
n02398521
n04398044
n01687978
n04371774
n02777292
n01664065
n04476259
n04548280
n12144580
n02669723
n02095314
n02877765
n04429376
n03400231
n03729826
n02825657
n02802426
n03733281
n03124043
n07871810
n02169497
n04263257
n01689811
n04485082
n04099969
n03902125
n04371430
n02091635
n03344393
n02815834
n13044778
n02100877
n02130308
n09246464
n02843684
n01735189
n06874185
n02100583
n02100877
n15075141
n02109525
n02486410
n02950826
n01871265
n02823750
n07583066
n02051845
n01751748
n02483362
n03908618
n02977058
n02111889
n04447861
n02114855
n02095314
n02804414
n02489166
n04277352
n02236044
n02408429
n02655020
n01693334
n03447721
n02093647
n02791124
n02077923
n04536866
n03291819
n02093859
n02115641
n04254680
n04501370
n04019541
n02795169
n03459775
n04209133
n07860988
n04553703
n02484975
n03530642
n02906734
n04325704
n04008634
n12057211
n02342885
n04344873
n03794056
n02107142
n04090263
n02009229
n02971356
n02504458
n04273569
n09399592
n03272562
n02277742
n02279972
n07930864
n02917067
n04004767
n04392985
n07718747
n02089078
n03903868
n03208938
n02133161
n03376595
n02978881
n03201208
n02834397
n02443484
n02085620
n02111889
n03532672
n04263257
n03661043
n15075141
n04200800
n03786901
n01873310
n04423845
n01737021
n02951358
n02116738
n01798484
n03980874
n02834397
n02398521
n01531178
n07734744
n01847000
n03841143
n02110185
n13044778
n02727426
n02799071
n02107908
n01806143
n03770679
n03967562
n02086646
n02892767
n01855032
n02165105
n01514859
n04037443
n03877472
n03729826
n01728920
n02676566
n03627232
n04069434
n04192698
n02486261
n02795169
n04033901
n01824575
n02105641
n02444819
n01824575
n03908714
n04239074
n02102480
n02264363
n01498041
n02930766
n04355933
n04125021
n03481172
n02123159
n02099712
n04209239
n02111889
n02002556
n03690938
n04429376
n03814906
n04525305
n02107908
n01692333
n04127249
n01914609
n04201297
n02807133
n01985128
n02979186
n02088238
n03594945
n03388043
n09468604
n03729826
n02704792
n07930864
n03355925
n04554684
n04131690
n04026417
n02437616
n03769881
n04330267
n02091831
n01797886
n02687172
n02906734
n02091635
n02814533
n02114712
n03770439
n04099969
n04033995
n02085936
n01644900
n02930766
n01917289
n01704323
n04515003
n01950731
n03888257
n07836838
n02687172
n02102318
n02106030
n02676566
n01749939
n03314780
n03690938
n02823750
n03344393
n03666591
n04458633
n04398044
n01440764
n04482393
n03075370
n02701002
n04023962
n01558993
n07716358
n02325366
n02106382
n04590129
n10148035
n02236044
n04252077
n12144580
n02110627
n03000134
n02086079
n03032252
n02408429
n03394916
n02871525
n01806567
n02127052
n02879718
n03032252
n03935335
n04482393
n03710721
n04522168
n04371430
n04579145
n03967562
n03201208
n04355338
n04328186
n04111531
n01968897
n02115913
n01518878
n04344873
n02814533
n01697457
n04371430
n01855032
n01806143
n03598930
n02971356
n03372029
n02101388
n02963159
n02391049
n01560419
n02114367
n03933933
n03259280
n01756291
n04479046
n07583066
n03792972
n02100877
n07768694
n02007558
n03937543
n03666591
n02104029
n01910747
n02095889
n04417672
n03769881
n03929855
n02641379
n02229544
n07614500
n04311174
n02361337
n07753592
n02206856
n04090263
n03444034
n04525305
n02281406
n02526121
n01807496
n02096294
n01667778
n02480855
n07711569
n02009229
n01697457
n03271574
n01687978
n02100236
n03908714
n01531178
n02364673
n03773504
n03000684
n02981792
n04485082
n01797886
n03498962
n03538406
n03530642
n01872401
n02342885
n02457408
n02480495
n02480855
n01770393
n01560419
n01665541
n04540053
n04346328
n04485082
n02091635
n03733805
n02120505
n02988304
n04049303
n02607072
n02488702
n03026506
n07718472
n03627232
n03388043
n02403003
n03627232
n03877845
n03388043
n02487347
n04005630
n01682714
n01818515
n04311174
n01664065
n04509417
n02086910
n02219486
n04392985
n04344873
n01685808
n07717410
n03384352
n01728920
n02027492
n02012849
n04336792
n02481823
n07565083
n03868863
n03179701
n02109525
n04330267
n03982430
n03272010
n04005630
n02112137
n03770439
n02088094
n02114548
n02091032
n01728572
n03240683
n02808440
n02486410
n02930766
n01737021
n03733805
n03110669
n03016953
n01748264
n02325366
n01748264
n02364673
n02017213
n04252077
n02860847
n03124043
n03461385
n02090721
n03998194
n02095570
n07753113
n04423845
n04044716
n01695060
n01632458
n02643566
n02167151
n01860187
n02403003
n02840245
n03658185
n04116512
n02096294
n01735189
n01514859
n04131690
n02978881
n03461385
n03944341
n02441942
n07753113
n01693334
n09399592
n02105412
n03400231
n04550184
n02823428
n02112137
n03920288
n04509417
n03785016
n03534580
n02066245
n02807133
n01924916
n02017213
n03796401
n02090721
n01981276
n02497673
n09399592
n01749939
n03344393
n03344393
n02490219
n04335435
n04065272
n07873807
n03314780
n03530642
n02783161
n02114548
n02319095
n03018349
n01498041
n02859443
n02096051
n04251144
n03042490
n02167151
n02096294
n09246464
n12985857
n02100583
n03240683
n02236044
n02356798
n02317335
n02859443
n02510455
n01945685
n03792972
n02011460
n03220513
n04141076
n03662601
n07745940
n02747177
n12998815
n04209133
n02097130
n01685808
n04273569
n04515003
n02094258
n02109047
n03028079
n02408429
n03777754
n02113186
n02500267
n03891251
n02112018
n04487081
n02927161
n01664065
n03534580
n03729826
n03187595
n02105505
n07718747
n02802426
n02226429
n04116512
n01756291
n01817953
n07714990
n02457408
n03109150
n04026417
n02437312
n02124075
n02113978
n03109150
n02389026
n06785654
n03089624
n03444034
n04149813
n02091032
n04376876
n02606052
n03492542
n04579145
n01496331
n01592084
n04141975
n01580077
n02112706
n03388043
n02256656
n02087394
n04179913
n07930864
n04355338
n03874293
n04033995
n02088364
n03535780
n03476991
n04336792
n03888257
n07836838
n03028079
n03877845
n03982430
n02116738
n04596742
n03843555
n15075141
n04325704
n04398044
n02134084
n02132136
n03602883
n01955084
n02268853
n02490219
n04044716
n02492660
n01770393
n03447447
n07871810
n01739381
n03933933
n02110958
n04517823
n10565667
n02087046
n02909870
n07747607
n13037406
n03743016
n02113023
n07716358
n01828970
n04579145
n04482393
n02169497
n04371430
n01751748
n01632777
n02106382
n01697457
n04074963
n03062245
n02607072
n03868863
n04409515
n01829413
n04254680
n01728920
n02802426
n03666591
n01984695
n02708093
n02090721
n02089973
n02099849
n02134084
n13133613
n03733281
n02268853
n04347754
n02115641
n04346328
n02769748
n01665541
n03961711
n02391049
n01675722
n02017213
n03045698
n02356798
n02977058
n01873310
n02276258
n03692522
n02107908
n03954731
n04389033
n02226429
n03676483
n02107908
n01484850
n01774750
n02979186
n03761084
n03623198
n03445777
n03770679
n01728572
n03495258
n04613696
n02441942
n03594734
n02114855
n02883205
n04311174
n04532670
n02134418
n03717622
n02859443
n03930313
n03126707
n03977966
n03983396
n04456115
n07760859
n01532829
n04208210
n03991062
n04131690
n03649909
n03425413
n02017213
n02974003
n03958227
n02408429
n01614925
n03884397
n04429376
n01749939
n01756291
n01498041
n03992509
n03532672
n04286575
n03376595
n02108000
n02108551
n07565083
n03792782
n02089867
n07684084
n03404251
n03871628
n04311004
n13040303
n02111129
n02422699
n03733281
n04153751
n04179913
n02268443
n02443114
n03485794
n07579787
n02110063
n01616318
n03871628
n07697537
n02114367
n02091134
n02883205
n02814533
n03871628
n02105056
n02865351
n03991062
n02104365
n04275548
n03929660
n03814639
n02834397
n03792782
n07730033
n02445715
n02804610
n02119789
n04040759
n02415577
n02206856
n02114367
n04493381
n02276258
n03991062
n02236044
n04332243
n07760859
n02504013
n02090379
n02445715
n10565667
n04487081
n09472597
n04398044
n01873310
n02087046
n03788365
n02097658
n03467068
n07717410
n03642806
n03063689
n01914609
n03792782
n12267677
n03220513
n02119789
n02950826
n02113712
n03697007
n04009552
n03876231
n10148035
n03590841
n03461385
n02814860
n03729826
n03255030
n09288635
n02094114
n04550184
n02115913
n01990800
n02112350
n12998815
n02672831
n01860187
n04493381
n02979186
n02441942
n02128757
n01883070
n03803284
n03417042
n02992211
n04462240
n03759954
n01984695
n07584110
n04118538
n02105412
n03218198
n02835271
n03314780
n04070727
n03325584
n01742172
n04266014
n03447447
n02701002
n01877812
n03062245
n01592084
n01924916
n03781244
n01798484
n02730930
n02417914
n02791124
n02412080
n09256479
n04008634
n02493793
n07753275
n03980874
n02280649
n03400231
n03476991
n02787622
n02086240
n04041544
n04370456
n04591713
n03062245
n04254120
n02125311
n03920288
n02088364
n02002724
n02107683
n01498041
n04550184
n01984695
n04584207
n02971356
n03961711
n02447366
n01855672
n03126707
n03481172
n02640242
n03376595
n02814860
n01498041
n04442312
n03776460
n01882714
n04485082
n03201208
n01978455
n04456115
n03467068
n02086240
n02256656
n04517823
n03291819
n04263257
n02106662
n02823750
n03527444
n01807496
n02112018
n02860847
n01980166
n01514859
n02879718
n02128925
n03944341
n07831146
n04049303
n04004767
n04254120
n02108422
n07871810
n01775062
n02808304
n03929660
n02667093
n07716906
n03697007
n12057211
n03196217
n01855032
n02097047
n02444819
n07711569
n02071294
n06596364
n03584829
n02025239
n09256479
n02484975
n02840245
n02814533
n03188531
n03891332
n01560419
n02110185
n01685808
n03207941
n02096294
n02672831
n04311004
n04265275
n07730033
n04296562
n02167151
n02110341
n03832673
n03709823
n02115641
n02510455
n04325704
n02129604
n04296562
n13037406
n04554684
n03706229
n02500267
n02101388
n02206856
n02111889
n04442312
n02102973
n02098105
n02906734
n01770081
n13054560
n04325704
n02909870
n02927161
n03976467
n03014705
n02483362
n02012849
n02321529
n03841143
n04389033
n02094258
n15075141
n03733805
n03958227
n03792972
n04542943
n02979186
n07614500
n03666591
n03929855
n07802026
n02974003
n02319095
n02804414
n04325704
n02109525
n02999410
n02120079
n04404412
n01871265
n03871628
n03337140
n01667778
n01819313
n04532670
n02319095
n03457902
n02978881
n02119789
n04026417
n01693334
n01744401
n03825788
n04273569
n03942813
n01984695
n02727426
n01820546
n04487081
n03956157
n04465501
n04579145
n02117135
n04447861
n03085013
n02134084
n03769881
n03717622
n02105251
n03761084
n02088466
n01872401
n02807133
n03775546
n03590841
n03617480
n01677366
n02119789
n02226429
n04409515
n03995372
n02013706
n07697537
n02025239
n02114712
n03394916
n02494079
n01968897
n03977966
n11879895
n03492542
n03843555
n03742115
n04208210
n02423022
n04515003
n13054560
n02483708
n04507155
n07717410
n03255030
n03133878
n03877845
n04344873
n04540053
n09399592
n04517823
n04086273
n02978881
n02115641
n04461696
n02102973
n02277742
n04399382
n04330267
n03661043
n13037406
n04604644
n03958227
n02397096
n04125021
n03445924
n03492542
n02092339
n03787032
n03791053
n02804414
n01753488
n07754684
n01496331
n01990800
n04356056
n04065272
n01756291
n04136333
n03662601
n02006656
n02326432
n02018795
n03777568
n07932039
n04265275
n02268853
n03649909
n04548362
n03538406
n02104365
n03062245
n04131690
n01955084
n04606251
n04037443
n01990800
n02892767
n02113023
n03873416
n04254680
n02444819
n04606251
n02091032
n03623198
n01693334
n04162706
n04476259
n01773157
n02510455
n01616318
n02782093
n04209133
n03777568
n12998815
n04417672
n12620546
n04517823
n02259212
n02727426
n02797295
n03062245
n02794156
n04347754
n03417042
n02123159
n03530642
n07715103
n07716906
n03874599
n04179913
n01877812
n02101388
n02233338
n04141327
n02666196
n04131690
n03032252
n02114367
n03045698
n02090721
n02815834
n07873807
n02965783
n04429376
n04604644
n01855032
n02018795
n03729826
n04404412
n07615774
n02013706
n01955084
n01774750
n01644373
n02096177
n02114712
n03891332
n03482405
n03916031
n02099849
n02480855
n13044778
n02226429
n03670208
n13133613
n03670208
n04125021
n02276258
n03131574
n03929855
n02687172
n02443484
n02101006
n04367480
n02109525
n04049303
n02096051
n03929660
n02776631
n02027492
n01795545
n02109525
n03584829
n03595614
n02992211
n04243546
n03404251
n04023962
n03085013
n02128385
n02111129
n04613696
n04152593
n02978881
n02909870
n10565667
n03467068
n02280649
n03763968
n02056570
n02504458
n03958227
n03874599
n02133161
n03871628
n02099849
n03179701
n01985128
n02112137
n02098413
n01945685
n02105505
n03796401
n04152593
n02410509
n01665541
n04147183
n02655020
n02233338
n03297495
n01776313
n01945685
n03710193
n04462240
n03956157
n02229544
n02782093
n04355338
n03000684
n04542943
n02111277
n04505470
n03196217
n02112706
n03590841
n03197337
n02526121
n04522168
n01877812
n03617480
n02870880
n04591713
n06359193
n02110958
n07892512
n03796401
n03047690
n01518878
n04263257
n01910747
n07753275
n01882714
n04033901
n01784675
n02489166
n03534580
n04447861
n02403003
n07717556
n02027492
n03710721
n02281787
n02807133
n03124170
n02396427
n02981792
n04613696
n02481823
n04522168
n03930313
n10565667
n03776460
n03180011
n04235860
n02397096
n03016953
n03838899
n09193705
n04404412
n04336792
n02978881
n07720875
n04286575
n12985857
n07613480
n03063689
n02206856
n02011460
n02769748
n02317335
n02749479
n01770081
n02422699
n02088094
n02906734
n06785654
n04152593
n03916031
n02113186
n02115913
n02791124
n03764736
n02356798
n02979186
n02749479
n03630383
n03259280
n04023962
n04026417
n02909870
n03404251
n03868863
n03495258
n03899768
n03733805
n02823750
n02086079
n04356056
n03196217
n01806143
n07718472
n04335435
n03937543
n04070727
n01631663
n02643566
n11879895
n03690938
n02093428
n02105641
n02091134
n03131574
n03485407
n01677366
n02099601
n02123045
n02443114
n02134418
n04370456
n01883070
n04141076
n03467068
n02105162
n02226429
n02397096
n02692877
n02447366
n13037406
n09332890
n04482393
n03877845
n02102480
n10565667
n02791270
n02669723
n02808304
n04548362
n03658185
n02489166
n02098286
n07615774
n04532106
n01807496
n02992529
n01694178
n04428191
n03445924
n07742313
n04037443
n03887697
n01630670
n02099267
n02123597
n01981276
n02825657
n02106662
n03657121
n03249569
n03218198
n04152593
n12985857
n03160309
n02939185
n01817953
n01773157
n02999410
n03482405
n04200800
n02488702
n03272562
n03992509
n03544143
n04141327
n02099712
n03016953
n02107142
n01751748
n02009912
n02087394
n04355933
n02117135
n13054560
n02006656
n03733805
n03710193
n04141076
n01608432
n09835506
n04398044
n07579787
n02099712
n02123597
n07836838
n04131690
n04090263
n02981792
n02018795
n03602883
n02074367
n02443484
n02871525
n02457408
n02799071
n03764736
n03804744
n02190166
n03769881
n04399382
n04553703
n02058221
n02981792
n01692333
n01631663
n03868242
n06785654
n03977966
n04423845
n02791124
n02128385
n01664065
n01756291
n07802026
n02979186
n02814533
n12768682
n04201297
n07742313
n02489166
n02120079
n03743016
n03482405
n01795545
n02108551
n02096051
n02951358
n02169497
n04532106
n02268443
n03676483
n01798484
n02113712
n07697313
n02112018
n04525038
n03982430
n04239074
n02123597
n03063689
n02091134
n02138441
n03255030
n02012849
n02879718
n02111277
n02088466
n02105056
n01776313
n04584207
n02095314
n01806567
n01770393
n03271574
n03599486
n10148035
n03627232
n04275548
n03063689
n03016953
n01990800
n04141076
n03131574
n01968897
n02093256
n01774750
n01855672
n04435653
n03127747
n03657121
n03529860
n07730033
n02837789
n01828970
n02002556
n02132136
n03873416
n03424325
n04259630
n02097130
n03272562
n03496892
n04525305
n03916031
n01644373
n04591713
n02504013
n02091831
n01847000
n03000684
n01770393
n03763968
n02093754
n03063689
n02085782
n03290653
n03777568
n07718472
n02090721
n02089078
n03792782
n13037406
n02111889
n04550184
n03063599
n04229816
n04238763
n01693334
n03743016
n02108551
n04604644
n02281787
n02119789
n02808304
n09332890
n02106550
n07802026
n03249569
n07836838
n03775546
n04204347
n04592741
n01498041
n03929660
n02077923
n02108089
n02094433
n02107574
n13133613
n02749479
n03249569
n02641379
n03804744
n02321529
n01797886
n02690373
n13054560
n02950826
n01737021
n01689811
n01664065
n07693725
n02342885
n02169497
n09288635
n02087394
n03376595
n02120505
n03938244
n03345487
n02500267
n01797886
n04443257
n03492542
n02094258
n03721384
n13044778
n03868863
n07711569
n02236044
n04081281
n03838899
n04596742
n02111500
n04251144
n02100583
n07714571
n04238763
n02105412
n02443484
n04019541
n03394916
n03776460
n03000134
n02109525
n02109525
n02870880
n03393912
n03197337
n04081281
n03763968
n01688243
n02110806
n02834397
n02939185
n02279972
n03888605
n02268443
n02988304
n04310018
n04285008
n09246464
n02389026
n01558993
n01955084
n01930112
n01644373
n12620546
n02093256
n09256479
n02002724
n03160309
n04204238
n01753488
n03393912
n01641577
n02100735
n04584207
n02100236
n02879718
n02988304
n02105162
n02110806
n04258138
n03590841
n02927161
n01498041
n03720891
n04515003
n02134418
n03014705
n03344393
n02783161
n04443257
n02492660
n03218198
n01755581
n02090622
n03179701
n04252225
n04417672
n04037443
n04065272
n03721384
n02089973
n02091635
n03804744
n09288635
n04613696
n03796401
n07714990
n01770393
n01742172
n02128385
n03492542
n03916031
n01883070
n01739381
n02980441
n02966687
n04486054
n04443257
n01984695
n03026506
n02808440
n02977058
n02114367
n02094114
n02326432
n03016953
n02106166
n03710193
n01644373
n02091134
n03259280
n03018349
n03791053
n04008634
n02095570
n07718747
n03376595
n07717410
n02894605
n07583066
n02281787
n03483316
n02105505
n03837869
n04591713
n02749479
n01514668
n02090379
n03424325
n03642806
n02089973
n01532829
n02105641
n04591713
n01819313
n02127052
n03124043
n03649909
n02113186
n04067472
n02114548
n03791053
n03792782
n02093991
n03530642
n02397096
n02281787
n03661043
n03495258
n02174001
n07880968
n03459775
n02100236
n02727426
n01820546
n02988304
n02112350
n03476684
n04238763
n02028035
n02120505
n01704323
n03047690
n02268443
n02443114
n02112137
n02879718
n01697457
n04264628
n03314780
n03649909
n02133161
n07730033
n03670208
n02835271
n03584829
n02326432
n03916031
n03485794
n03314780
n02342885
n02105412
n02321529
n01669191
n07742313
n03045698
n02510455
n04201297
n03710721
n02966687
n02094258
n02109047
n03376595
n03017168
n01924916
n02017213
n02086079
n03666591
n04465501
n02981792
n03832673
n01806567
n02793495
n02110806
n01833805
n01622779
n02493509
n03495258
n03485407
n02051845
n04141975
n02909870
n01698640
n02096294
n02009912
n02097658
n02018207
n02804414
n03095699
n01665541
n03532672
n02102177
n01806143
n01847000
n07693725
n02268853
n03530642
n03908618
n03781244
n04286575
n02111129
n04273569
n04590129
n02100583
n03916031
n04404412
n02708093
n03160309
n07579787
n03476991
n04204238
n03344393
n09193705
n01665541
n01968897
n03180011
n02948072
n01871265
n01843383
n02494079
n02105505
n02356798
n02769748
n01955084
n01990800
n02113712
n03976657
n03633091
n03937543
n04252225
n02442845
n03461385
n03014705
n01644900
n03924679
n04152593
n02974003
n02804414
n03290653
n04344873
n02326432
n04371430
n03485794
n02107142
n03483316
n04330267
n01883070
n02105505
n03062245
n03924679
n02326432
n03761084
n02104029
n02074367
n04023962
n02123597
n04264628
n03902125
n02077923
n02927161
n03272562
n04399382
n07875152
n03478589
n03680355
n02093428
n03903868
n02396427
n01753488
n01914609
n04487081
n03372029
n01753488
n02096585
n07747607
n01601694
n03146219
n03733131
n03124043
n02090622
n03063599
n03599486
n03976657
n07880968
n02086910
n02494079
n02100735
n01693334
n02966193
n02089973
n03866082
n02640242
n02094433
n03947888
n01592084
n04039381
n04263257
n04326547
n02841315
n04009552
n02099712
n03271574
n02701002
n03791053
n04252077
n07717410
n02027492
n02097474
n02113799
n01773797
n11939491
n03494278
n02971356
n02509815
n02107683
n04328186
n03998194
n03938244
n03721384
n02089973
n07684084
n04613696
n03476991
n03444034
n03272010
n02219486
n07613480
n03899768
n01770393
n04532106
n04264628
n03314780
n02422106
n01689811
n04154565
n03991062
n02088094
n03384352
n02088632
n03146219
n02017213
n02123597
n01806567
n01740131
n01829413
n04004767
n04355338
n04044716
n01735189
n03218198
n02108422
n07831146
n02110185
n07932039
n03658185
n01773797
n09288635
n02133161
n01820546
n09332890
n09468604
n03935335
n04562935
n03908714
n02167151
n03216828
n02497673
n04493381
n03452741
n02117135
n04131690
n02120505
n03743016
n02364673
n03980874
n04462240
n02804414
n02051845
n02808440
n02172182
n09428293
n02093428
n03220513
n02699494
n03803284
n03804744
n02514041
n04099969
n04296562
n03388549
n12998815
n03933933
n04208210
n02410509
n04482393
n04487081
n02486261
n02113799
n04228054
n09835506
n04067472
n01664065
n04428191
n01740131
n02493509
n11939491
n03042490
n03584254
n09468604
n04120489
n02483708
n01498041
n03786901
n04523525
n02165105
n03888605
n02115913
n04201297
n04501370
n04037443
n02172182
n03793489
n03724870
n02391049
n04069434
n02807133
n02056570
n07584110
n04398044
n04398044
n03854065
n02655020
n02107312
n04366367
n04086273
n03485407
n02104029
n04251144
n03627232
n02132136
n02979186
n02317335
n03201208
n04479046
n03452741
n04258138
n07590611
n04149813
n04355933
n03207941
n04479046
n02441942
n03866082
n07583066
n03445777
n03017168
n02672831
n04204238
n04326547
n02113712
n01514668
n02415577
n03706229
n02981792
n02840245
n04389033
n03992509
n02403003
n04005630
n03637318
n04371430
n04347754
n02100583
n01518878
n02319095
n02492035
n04597913
n02206856
n02025239
n04591157
n01773549
n04081281
n07697537
n01682714
n04069434
n02085782
n02655020
n07714571
n01614925
n04008634
n07873807
n04131690
n03680355
n02422699
n07753592
n03840681
n06785654
n01530575
n02096051
n03764736
n02108089
n04044716
n03384352
n01818515
n02056570
n02097130
n01665541
n01688243
n04131690
n04606251
n01616318
n01688243
n02113186
n04613696
n01737021
n02776631
n03995372
n01806143
n01753488
n04037443
n02879718
n04009552
n02110806
n04332243
n04560804
n03884397
n02110958
n03888605
n01685808
n07565083
n02883205
n02492660
n01798484
n03100240
n02088094
n04229816
n02098286
n02841315
n03017168
n04120489
n07718747
n03933933
n04355933
n04483307
n02107142
n01744401
n02093991
n02112137
n02085936
n03929855
n02051845
n02091831
n01740131
n02948072
n02112706
n04584207
n04070727
n03584254
n04235860
n01749939
n02086079
n03424325
n04485082
n02165456
n03259280
n02132136
n03445924
n12768682
n03325584
n01644373
n02361337
n04523525
n07753592
n04067472
n04579145
n07880968
n02231487
n04486054
n03658185
n04429376
n03126707
n02085620
n02104365
n02692877
n04557648
n04606251
n03888605
n02105412
n06785654
n02101388
n03393912
n04370456
n12985857
n07871810
n03742115
n04238763
n02101006
n02090379
n09399592
n07930864
n02123597
n03494278
n02363005
n07892512
n02776631
n03785016
n07930864
n02123394
n01855032
n02883205
n02091831
n03868242
n02930766
n01945685
n03594734
n02493793
n02398521
n04501370
n03417042
n02815834
n03710637
n02100583
n02497673
n02894605
n03895866
n01756291
n02091032
n02120505
n03980874
n07745940
n02769748
n04208210
n01990800
n02397096
n01692333
n03814639
n01855672
n04154565
n02317335
n02815834
n07693725
n03720891
n02110627
n13037406
n02391049
n04131690
n01930112
n07760859
n03770679
n02111500
n04252225
n01877812
n03180011
n13044778
n02492660
n04273569
n04004767
n04238763
n03706229
n04357314
n01641577
n04311174
n03109150
n03866082
n03933933
n02412080
n03207743
n03218198
n07716906
n03218198
n02667093
n02799071
n02346627
n03874293
n01537544
n01728572
n03804744
n01855672
n01744401
n02747177
n02939185
n02676566
n02950826
n02097298
n01819313
n02276258
n09428293
n01682714
n03710637
n03920288
n02672831
n02447366
n02860847
n02412080
n04254680
n01692333
n02807133
n03394916
n13133613
n01806567
n07720875
n07836838
n02088094
n02102040
n01580077
n03775546
n04238763
n04118776
n04540053
n02096294
n02441942
n03781244
n02093256
n02988304
n02423022
n07871810
n01704323
n02132136
n01560419
n02206856
n01833805
n02980441
n11879895
n07875152
n03930313
n03042490
n03954731
n03933933
n03126707
n03461385
n02114855
n03929660
n04550184
n02783161
n03944341
n07693725
n02123045
n09288635
n03196217
n03297495
n02091831
n03670208
n04487394
n02105251
n02454379
n02099849
n04409515
n01592084
n02092002
n07590611
n03992509
n02412080
n03075370
n02447366
n02669723
n12985857
n03584254
n01753488
n02708093
n02497673
n04069434
n01484850
n07873807
n03492542
n03457902
n03670208
n04376876
n01697457
n02101556
n11879895
n02071294
n03710193
n03961711
n03930313
n02793495
n12768682
n03657121
n04596742
n04204238
n02093754
n03961711
n09472597
n03379051
n02417914
n02107312
n02489166
n01828970
n03884397
n04251144
n03792782
n02782093
n01820546
n02981792
n06359193
n03443371
n01735189
n04501370
n03673027
n03770679
n03085013
n02112706
n01978287
n02794156
n02087394
n01443537
n04286575
n02123394
n04264628
n03337140
n03710721
n03947888
n02514041
n02328150
n02110185
n03992509
n02965783
n02096177
n01824575
n03929855
n02815834
n02643566
n01744401
n02672831
n02447366
n06874185
n04325704
n02317335
n03126707
n02056570
n02457408
n03443371
n04125021
n03866082
n03127747
n04311004
n02134084
n01910747
n07716358
n02134418
n02071294
n04335435
n03594734
n06359193
n04336792
n02097474
n07717410
n02092339
n04376876
n03785016
n02087394
n02825657
n03208938
n03720891
n04366367
n02480855
n03124043
n04067472
n03180011
n04049303
n04243546
n04423845
n03127747
n02259212
n03697007
n04136333
n04590129
n03942813
n02268443
n04008634
n04254680
n04125021
n04040759
n03924679
n04485082
n02410509
n04259630
n03584829
n03196217
n03776460
n01774750
n09421951
n07802026
n04399382
n04536866
n04525038
n02091467
n03902125
n03544143
n02791270
n03888605
n03376595
n02397096
n03777754
n04592741
n03047690
n07693725
n02113978
n04398044
n02783161
n04596742
n03785016
n01582220
n02791270
n02791124
n02129165
n03404251
n03670208
n03903868
n02978881
n02094433
n04252225
n02096177
n03496892
n03000684
n03983396
n02111277
n03720891
n03782006
n01829413
n04153751
n03271574
n03538406
n03970156
n03924679
n02088094
n01806143
n02113978
n03207941
n03347037
n03633091
n03404251
n04579145
n02276258
n02086240
n02799071
n03871628
n02087394
n02264363
n03478589
n03788365
n02097658
n02093647
n07920052
n03788195
n03720891
n07717556
n02113023
n01855032
n07802026
n02037110
n03832673
n04350905
n07613480
n02814860
n03777754
n03218198
n02441942
n02115913
n02109961
n04347754
n03841143
n02786058
n02690373
n07697313
n07613480
n01873310
n03874599
n02113624
n02992211
n07871810
n03388183
n01644900
n04067472
n04039381
n02361337
n04039381
n04370456
n01843065
n01877812
n02488291
n03692522
n02669723
n03018349
n03207743
n02096177
n01514859
n02105056
n03495258
n03207743
n04523525
n03259280
n03127747
n02988304
n02096437
n02087394
n04370456
n01882714
n01644900
n11879895
n03814639
n03763968
n03788365
n04579145
n03837869
n04429376
n02219486
n03983396
n04591157
n07693725
n02281787
n01829413
n04606251
n02795169
n03467068
n02486410
n04505470
n02488702
n02108089
n02783161
n06596364
n01558993
n07871810
n02655020
n02256656
n03290653
n03131574
n01829413
n02930766
n03529860
n01871265
n01675722
n02840245
n04392985
n04286575
n03404251
n02823428
n02951585
n02077923
n03000247
n01843065
n02804414
n04525038
n01749939
n03095699
n04552348
n03532672
n03527444
n03947888
n02667093
n02346627
n01667114
n07749582
n02128385
n02093754
n02092002
n02782093
n04310018
n02104365
n02134418
n03769881
n02776631
n01984695
n02097658
n02095570
n02321529
n02108000
n02098413
n03623198
n03100240
n03109150
n02168699
n03017168
n01819313
n02117135
n03871628
n03924679
n04399382
n15075141
n03884397
n03425413
n03584829
n03976467
n02979186
n02124075
n02869837
n03998194
n02025239
n01558993
n04044716
n02107908
n04404412
n04266014
n03944341
n01751748
n02025239
n04040759
n02102973
n03930630
n09246464
n02174001
n02389026
n03764736
n01795545
n02790996
n02526121
n03133878
n03124043
n02979186
n02093754
n03598930
n03250847
n02134084
n03733281
n02226429
n04019541
n02105855
n02256656
n02787622
n04435653
n03599486
n03733131
n02325366
n03259280
n03028079
n03476684
n03133878
n03590841
n03197337
n04525038
n03494278
n04270147
n01860187
n02086910
n02457408
n03627232
n03133878
n03947888
n02823428
n02097298
n02108000
n04540053
n03141823
n03201208
n03476991
n02113023
n03777754
n03854065
n02415577
n02974003
n01820546
n02087046
n04149813
n04332243
n02090379
n04509417
n07760859
n03637318
n02672831
n03141823
n03538406
n03201208
n04286575
n02097658
n03873416
n04515003
n09193705
n02939185
n03933933
n01749939
n03483316
n02098105
n02107908
n02130308
n02105641
n04458633
n03692522
n02777292
n07565083
n02708093
n02783161
n04037443
n04259630
n02112706
n07802026
n01729977
n02168699
n04192698
n04209133
n07590611
n01729322
n02028035
n04579432
n01518878
n02443484
n07742313
n04376876
n04019541
n02791270
n02906734
n02264363
n02233338
n06874185
n04069434
n13044778
n02981792
n02117135
n03775071
n03249569
n04239074
n03868242
n02099267
n03467068
n02791270
n01632777
n01817953
n04325704
n01582220
n04081281
n03838899
n02865351
n02445715
n04009552
n02089867
n02256656
n01860187
n02815834
n04447861
n03786901
n04120489
n03584254
n03255030
n02006656
n03187595
n04152593
n03467068
n03942813
n03947888
n07831146
n02090721
n04532670
n03018349
n02093991
n01917289
n01729322
n02108422
n03197337
n02951585
n04263257
n07932039
n01537544
n03495258
n01755581
n02096051
n01737021
n04120489
n02111500
n03895866
n02106166
n04350905
n04081281
n02791124
n04501370
n02115913
n02088466
n07614500
n02410509
n01740131
n03483316
n02701002
n03792782
n03995372
n03016953
n02536864
n12144580
n02011460
n04355933
n02423022
n03658185
n03344393
n02096177
n03692522
n04423845
n02110185
n02177972
n03197337
n03924679
n01749939
n02229544
n03000247
n01744401
n02321529
n03874293
n03481172
n01872401
n02112018
n02492035
n03670208
n04372370
n01697457
n02788148
n01796340
n03272562
n02098286
n03781244
n03666591
n13037406
n04532670
n03394916
n01744401
n02114855
n04542943
n02860847
n02268443
n04254120
n02088466
n11939491
n03788195
n07860988
n03832673
n02134084
n02092339
n02797295
n04252077
n04591713
n02096177
n03134739
n03982430
n02107574
n02233338
n07697313
n03891332
n03325584
n03208938
n01518878
n02509815
n03710721
n04487394
n03014705
n02099429
n02834397
n04141975
n01978455
n03891332
n02870880
n04265275
n02497673
n01955084
n02963159
n02099712
n02793495
n03691459
n02085782
n03991062
n02088094
n07711569
n02346627
n07695742
n03218198
n01784675
n02799071
n03944341
n03179701
n02415577
n04370456
n04443257
n04254777
n01496331
n02699494
n01677366
n02514041
n02086240
n02107908
n11879895
n03770679
n02749479
n03803284
n04485082
n03201208
n03045698
n03944341
n01930112
n02113186
n04286575
n03706229
n02871525
n01774384
n01855032
n02109047
n02114548
n12998815
n03218198
n03216828
n04371774
n02114712
n04548280
n02276258
n04033995
n03393912
n03980874
n04389033
n07583066
n01704323
n03445924
n02018795
n03445777
n02098286
n03838899
n01689811
n03666591
n03000247
n02099712
n03483316
n04505470
n02490219
n04239074
n01531178
n02116738
n01950731
n02113624
n04204238
n02276258
n07715103
n03026506
n02108551
n02127052
n02088466
n02093256
n02102040
n03976657
n04532670
n03776460
n03220513
n03903868
n03792972
n03529860
n02009229
n02113624
n02447366
n03461385
n02102318
n04263257
n02114855
n02676566
n03425413
n03538406
n03666591
n03272010
n07768694
n04392985
n04330267
n03026506
n07730033
n02094258
n04515003
n04265275
n13044778
n02965783
n02120505
n02058221
n03314780
n02793495
n02708093
n03633091
n03014705
n01665541
n02526121
n04067472
n04428191
n07836838
n02177972
n01817953
n04296562
n04099969
n03956157
n02114367
n02091635
n02113978
n03838899
n02437616
n04370456
n02423022
n02112706
n02096585
n02497673
n04505470
n02098286
n02319095
n04560804
n03976657
n04330267
n02481823
n04532670
n12057211
n03584254
n04065272
n04596742
n02823428
n01494475
n03133878
n07579787
n04141975
n03794056
n03000684
n04067472
n02108422
n04254777
n01616318
n03814906
n03444034
n04277352
n04612504
n02917067
n03729826
n02095314
n03796401
n04486054
n03637318
n02786058
n03661043
n03400231
n02112350
n03980874
n04251144
n01978287
n03483316
n03633091
n04597913
n02093647
n02097474
n02097130
n03998194
n01689811
n04482393
n02231487
n04328186
n03188531
n02490219
n04579432
n09256479
n03770439
n07697537
n02389026
n04252225
n03594945
n04310018
n01978455
n03803284
n03063689
n01924916
n03240683
n03837869
n02114712
n02999410
n04371774
n03676483
n02091467
n03196217
n03347037
n04487081
n03888257
n03787032
n01631663
n03447721
n02086079
n01644373
n09468604
n07613480
n04356056
n04493381
n06785654
n03179701
n01675722
n04429376
n02966193
n03584254
n03673027
n03223299
n03443371
n02106382
n04125021
n03786901
n04467665
n03498962
n03662601
n02088632
n02510455
n12998815
n02747177
n04252077
n12267677
n04501370
n02113978
n03141823
n01817953
n03126707
n03110669
n02910353
n03417042
n09193705
n02102318
n01807496
n02268443
n01632777
n02814533
n07875152
n01484850
n02092339
n02791124
n04417672
n03160309
n02134418
n03483316
n01829413
n02095889
n07693725
n04579145
n03942813
n02091134
n04209239
n07584110
n04590129
n03873416
n02105056
n02488291
n04136333
n01855032
n04525305
n04039381
n02025239
n03476991
n01614925
n01735189
n02894605
n04505470
n02127052
n12267677
n02865351
n03481172
n02445715
n02892767
n02974003
n03249569
n01860187
n01687978
n03733805
n03445777
n02676566
n07734744
n03544143
n03676483
n03877845
n03372029
n03977966
n02090721
n03676483
n02655020
n02134418
n02364673
n02110627
n03527444
n04317175
n02280649
n02788148
n02119789
n02804610
n04435653
n02120505
n02802426
n02606052
n07717410
n03290653
n03017168
n02087046
n02093647
n04259630
n01819313
n03467068
n02113712
n03935335
n02927161
n02113186
n03673027
n04200800
n04192698
n01518878
n03417042
n02093754
n02088364
n02749479
n01688243
n04070727
n04604644
n02457408
n06874185
n04483307
n02422106
n01692333
n02834397
n03485794
n02219486
n01950731
n02028035
n01644900
n03125729
n12144580
n01682714
n03843555
n03602883
n02018795
n03447447
n02865351
n03223299
n03355925
n04592741
n02106662
n02033041
n01820546
n03761084
n02165105
n02397096
n02101556
n04328186
n03933933
n03355925
n04328186
n03950228
n03134739
n03535780
n01748264
n04330267
n02699494
n01985128
n02978881
n04141327
n02403003
n02120079
n07579787
n02317335
n02509815
n04146614
n01944390
n04467665
n02927161
n12620546
n02098286
n01914609
n02486410
n02963159
n03085013
n04525305
n04141076
n01742172
n01798484
n02102480
n01729322
n03938244
n02096585
n04099969
n02437616
n03729826
n01829413
n03527444
n04086273
n02013706
n03594734
n02105855
n04536866
n02489166
n02093991
n02109525
n01930112
n01580077
n02457408
n04328186
n01751748
n03026506
n04235860
n02113023
n03063689
n01882714
n03930630
n03710721
n04264628
n04081281
n04116512
n04044716
n01697457
n04330267
n02860847
n02107908
n04399382
n03873416
n04509417
n03792972
n02102318
n01883070
n07742313
n02033041
n12620546
n03995372
n02086646
n03485794
n07747607
n02098413
n03877472
n02106550
n04263257
n02134418
n04263257
n04606251
n01630670
n02280649
n02504013
n02871525
n04081281
n03782006
n01514668
n02396427
n02093428
n02979186
n04254777
n04009552
n03602883
n07747607
n04562935
n02033041
n04505470
n02906734
n03045698
n01629819
n04613696
n07717556
n02487347
n01917289
n01817953
n07753275
n02457408
n02992529
n01742172
n03950228
n03584254
n02526121
n01494475
n02085936
n02391049
n04355933
n03950228
n03584829
n02128385
n01872401
n02091467
n03481172
n04204347
n03899768
n02107312
n02692877
n04606251
n03770679
n07749582
n01558993
n02099712
n03792782
n03791053
n04317175
n02086079
n02480855
n01682714
n04509417
n03792972
n02108551
n02606052
n03995372
n04336792
n02490219
n07695742
n12998815
n03759954
n04265275
n02971356
n03661043
n02120505
n01530575
n03690938
n02422106
n02120079
n07873807
n04579432
n03930313
n09288635
n02509815
n03998194
n03791053
n01930112
n03991062
n02125311
n02909870
n07718747
n01729322
n02133161
n03763968
n03944341
n01943899
n02445715
n04443257
n02109047
n04141327
n03041632
n01592084
n02906734
n01828970
n03388549
n01917289
n02859443
n02110958
n03956157
n02797295
n02100583
n02776631
n03485407
n04285008
n03623198
n01753488
n03146219
n03535780
n12768682
n12768682
n02100583
n03976657
n04251144
n03444034
n03980874
n02066245
n01692333
n03223299
n04461696
n09835506
n02206856
n13040303
n02088094
n02487347
n03781244
n03832673
n02917067
n01806567
n03776460
n04208210
n04462240
n02093428
n02123045
n03047690
n04201297
n02895154
n04252225
n03837869
n01877812
n03961711
n01753488
n02105505
n02112018
n02110627
n02389026
n02782093
n02099712
n03742115
n04141076
n01735189
n02879718
n03594734
n04462240
n02788148
n02106166
n03991062
n01820546
n04259630
n04310018
n15075141
n03717622
n03595614
n03598930
n02132136
n03630383
n03692522
n04591157
n04154565
n02346627
n02687172
n07693725
n02514041
n02128757
n02095314
n01855032
n03942813
n03485407
n13133613
n03062245
n03447447
n02895154
n04380533
n02364673
n03146219
n02109961
n02113799
n02859443
n01558993
n02119789
n01930112
n04275548
n03602883
n02497673
n02037110
n03026506
n07930864
n04330267
n02480495
n02107683
n03786901
n01917289
n03133878
n04532670
n01775062
n03633091
n03777568
n01945685
n03109150
n03792972
n02895154
n04548362
n02114855
n03775071
n07717556
n02483362
n02909870
n02027492
n07584110
n03594734
n03642806
n03877845
n03379051
n02927161
n04417672
n04009552
n04004767
n02799071
n03874599
n01883070
n03933933
n03450230
n01698640
n03146219
n02113023
n03379051
n03160309
n01968897
n03976467
n04328186
n02018207
n02123597
n02791124
n01729977
n04228054
n02966687
n02094258
n03425413
n01819313
n02100236
n02389026
n02108551
n02085620
n03791053
n03916031
n01871265
n01698640
n02100877
n03146219
n03903868
n03803284
n04204238
n04037443
n02128925
n03131574
n02823428
n09421951
n03884397
n07742313
n03871628
n01770081
n04540053
n03000134
n02443114
n04476259
n04317175
n02091032
n07248320
n04146614
n04532106
n07920052
n02484975
n04612504
n01530575
n03929660
n04540053
n01796340
n01828970
n04162706
n03481172
n03983396
n02777292
n02018795
n02869837
n02835271
n03201208
n01518878
n12057211
n03787032
n02641379
n04554684
n02791124
n01819313
n02389026
n04090263
n03908618
n03792972
n02484975
n07590611
n01530575
n12985857
n09229709
n01755581
n03627232
n02123159
n03775546
n04596742
n04346328
n02669723
n07753592
n07613480
n03884397
n02892201
n01924916
n04467665
n02488291
n03868242
n02356798
n04265275
n02077923
n02102973
n03457902
n02190166
n03259280
n02105162
n02091831
n02256656
n01872401
n02493793
n02408429
n02106550
n03929660
n03325584
n04332243
n04270147
n01630670
n03250847
n02114367
n02106166
n03134739
n02814860
n02110063
n03903868
n02395406
n04311174
n03532672
n02840245
n01986214
n04429376
n02119022
n03218198
n02783161
n03770439
n02089867
n02966687
n03658185
n09193705
n03085013
n02971356
n04049303
n11939491
n02105641
n03494278
n02364673
n01534433
n01735189
n02105855
n03743016
n07718472
n02113799
n04443257
n02096294
n02128925
n02264363
n03796401
n02444819
n03770679
n02093647
n03483316
n02107574
n04127249
n02978881
n13054560
n02823750
n03794056
n03000684
n01496331
n01807496
n02791270
n01860187
n03218198
n02364673
n03498962
n04153751
n01688243
n03388183
n01968897
n02172182
n02112018
n02883205
n03854065
n12267677
n02094258
n04254120
n01855672
n02100877
n03344393
n07693725
n02669723
n02264363
n03763968
n03637318
n04447861
n01984695
n12267677
n04335435
n02120505
n02104365
n03450230
n04286575
n03207941
n02106166
n03325584
n03793489
n03788365
n03877845
n02190166
n02051845
n02100583
n02104029
n06359193
n01514859
n02106550
n02165456
n02276258
n01514859
n03485407
n01632777
n02408429
n03124043
n03717622
n04252225
n04517823
n03425413
n04310018
n03017168
n03832673
n01770081
n03127925
n02089867
n03461385
n03485407
n01592084
n02256656
n03146219
n01795545
n03947888
n07693725
n04483307
n02002556
n04532670
n04049303
n02892201
n03857828
n01494475
n01601694
n04131690
n02666196
n02098286
n02641379
n04228054
n03980874
n04590129
n01616318
n03690938
n04127249
n03345487
n02113023
n01749939
n04229816
n02927161
n03956157
n02111500
n01756291
n02492035
n02119022
n02443114
n02950826
n02319095
n04346328
n02128757
n03998194
n02667093
n01943899
n04467665
n01530575
n01614925
n04346328
n02093754
n03733805
n03742115
n03197337
n02107908
n01737021
n02281787
n03141823
n04254120
n01532829
n02526121
n02966687
n02484975
n03832673
n02113799
n03958227
n04350905
n03623198
n06874185
n03337140
n02097658
n04311174
n04201297
n03908714
n01740131
n03929855
n02509815
n03903868
n03658185
n01843065
n04557648
n04392985
n02454379
n02493793
n04275548
n03220513
n02606052
n04118776
n02514041
n07684084
n03388183
n02794156
n01632777
n04238763
n04372370
n03876231
n02948072
n02096437
n02497673
n03843555
n07565083
n02097130
n04509417
n03255030
n02129165
n01682714
n07753275
n09472597
n02134418
n02219486
n02097047
n03063689
n02091467
n03781244
n02807133
n03814906
n04355338
n04579145
n03272010
n02086646
n02106662
n03956157
n02783161
n02112137
n03188531
n03126707
n01608432
n03337140
n01847000
n04125021
n04147183
n07720875
n02319095
n02510455
n04311174
n03584254
n04542943
n02102480
n02114712
n02268443
n07718472
n03792972
n03724870
n04239074
n02091134
n02129604
n03127925
n02086646
n03207941
n01819313
n04522168
n03271574
n04487394
n03710193
n02105855
n03131574
n02105251
n02095889
n03384352
n07880968
n02259212
n04069434
n01669191
n03710193
n01855672
n13037406
n01484850
n04476259
n03871628
n01774750
n02108551
n02090622
n03733281
n03724870
n03976657
n02099267
n04127249
n02097474
n02056570
n01795545
n07714571
n02107142
n01608432
n02113023
n04486054
n03876231
n04270147
n03461385
n13040303
n02102318
n02910353
n02094114
n02786058
n02992211
n02396427
n04344873
n02097130
n01443537
n04325704
n02093428
n04258138
n07584110
n03443371
n03481172
n02110341
n04141975
n02226429
n02281406
n04141327
n04118538
n02037110
n02226429
n01692333
n03916031
n02787622
n03594945
n07860988
n03729826
n04515003
n04612504
n02007558
n01560419
n02951358
n02837789
n04456115
n04239074
n02094433
n04553703
n03045698
n03874599
n03595614
n02514041
n03876231
n04467665
n04146614
n02089973
n04005630
n04266014
n04074963
n03527444
n04355338
n09246464
n03980874
n01990800
n03697007
n13133613
n07613480
n02655020
n03240683
n04111531
n01871265
n01695060
n03478589
n04265275
n02094433
n02009229
n02708093
n03447447
n03216828
n04371430
n03991062
n02607072
n02481823
n02102318
n09256479
n02123597
n02927161
n01737021
n01675722
n11939491
n03937543
n03729826
n01820546
n01847000
n02112137
n01675722
n04613696
n02974003
n03384352
n03627232
n04429376
n01756291
n03496892
n02398521
n02168699
n03000247
n01739381
n04371430
n04335435
n03532672
n02441942
n03400231
n03793489
n01795545
n01740131
n02110806
n03063599
n02095314
n04579432
n04591157
n02321529
n03661043
n01440764
n04228054
n04462240
n03877472
n03720891
n02514041
n03272562
n01601694
n02091467
n04041544
n03796401
n03594734
n02089078
n02493793
n01440764
n09399592
n03775071
n04296562
n02099849
n02804610
n03384352
n02088632
n04026417
n02794156
n01968897
n02133161
n03777754
n02494079
n02107142
n03710193
n02640242
n04209133
n02443114
n03259280
n02172182
n02089078
n04049303
n02093647
n06785654
n03733131
n03476991
n04259630
n01768244
n13037406
n02168699
n02013706
n02089078
n01817953
n02280649
n02877765
n04273569
n02097209
n06785654
n02104365
n02107908
n02484975
n02906734
n09468604
n01632777
n01494475
n01983481
n04372370
n02364673
n02730930
n02100583
n04127249
n03355925
n02108089
n03197337
n03857828
n01496331
n02110341
n04074963
n02087046
n03000684
n03485794
n02500267
n02105162
n03425413
n01944390
n02112018
n04005630
n01582220
n04275548
n07754684
n02011460
n02132136
n01748264
n04228054
n02980441
n02113624
n04597913
n02123159
n02027492
n04590129
n02114548
n03208938
n02099267
n03538406
n03218198
n04254120
n03337140
n02089078
n02701002
n02086240
n02088632
n01943899
n13052670
n04606251
n09229709
n01687978
n03929660
n02093754
n01729322
n02107908
n07715103
n03773504
n04592741
n02107908
n02264363
n04154565
n02098105
n03485794
n02791270
n06874185
n02488702
n03014705
n03657121
n03854065
n02107574
n02669723
n03950228
n02317335
n04133789
n01685808
n03933933
n02097047
n02011460
n01819313
n03982430
n01784675
n03670208
n03220513
n04118538
n02782093
n02783161
n03496892
n02107574
n04040759
n02013706
n02777292
n01775062
n01748264
n03018349
n04111531
n02089867
n09246464
n04548280
n07734744
n03291819
n04552348
n03871628
n07753113
n01729322
n07715103
n04596742
n02128385
n03976467
n04548280
n02497673
n02134418
n02105251
n03970156
n01749939
n01795545
n01855032
n02395406
n02098413
n02111500
n02895154
n07565083
n03742115
n02108089
n02321529
n02971356
n02437616
n03208938
n01667114
n02226429
n03877845
n02910353
n04070727
n04152593
n01883070
n02870880
n02504458
n04243546
n02096051
n03899768
n02321529
n03877845
n03450230
n03290653
n01664065
n03908714
n01537544
n02088238
n01882714
n01773549
n04418357
n02727426
n01872401
n02106382
n03991062
n02017213
n02018207
n04370456
n02219486
n02669723
n01694178
n01784675
n03443371
n02114548
n01806567
n04090263
n07932039
n01608432
n02281406
n04238763
n01664065
n02028035
n01917289
n03793489
n04209239
n03042490
n03400231
n02356798
n03065424
n04335435
n01664065
n01692333
n07880968
n03297495
n02841315
n03095699
n07697313
n09399592
n01917289
n03724870
n13133613
n03787032
n02493793
n03843555
n01629819
n03843555
n04461696
n01669191
n03976657
n02097047
n03773504
n02951585
n04398044
n03599486
n03250847
n03796401
n01737021
n02776631
n03599486
n02110806
n04254680
n02138441
n02483362
n02747177
n03733805
n04118538
n01829413
n02112137
n02102318
n02097474
n02119789
n04136333
n04579432
n02493509
n01667778
n02442845
n02097209
n03404251
n02488291
n02091032
n01882714
n04081281
n02963159
n02088632
n01491361
n04380533
n04423845
n01629819
n03956157
n04548362
n02804610
n04310018
n04251144
n07860988
n02692877
n03938244
n01484850
n04325704
n01560419
n02916936
n02442845
n03998194
n04330267
n03425413
n07932039
n01984695
n03345487
n03259280
n07768694
n02444819
n01675722
n02328150
n04070727
n04423845
n03729826
n07684084
n03485794
n03498962
n01753488
n03958227
n02895154
n03100240
n02110806
n04118776
n02105056
n03874293
n04037443
n03496892
n07745940
n03871628
n03372029
n02100735
n02132136
n03623198
n03666591
n02823750
n01735189
n02106382
n07697537
n02454379
n04311004
n03110669
n04009552
n02074367
n02442845
n02099601
n09246464
n03814906
n04049303
n01749939
n03803284
n02667093
n03908714
n04409515
n03290653
n07730033
n02268443
n03028079
n02514041
n04592741
n07720875
n02988304
n02606052
n03877472
n01798484
n03742115
n04461696
n02917067
n01629819
n04486054
n04548362
n02860847
n02107683
n01944390
n03786901
n04044716
n01824575
n01440764
n02279972
n01914609
n03272562
n07590611
n01728572
n01687978
n03791053
n01518878
n02950826
n03982430
n02966193
n03841143
n02672831
n02787622
n02165105
n04525038
n03662601
n12057211
n04522168
n04613696
n02088632
n01985128
n09472597
n03271574
n01687978
n04147183
n07875152
n01580077
n03393912
n03903868
n04074963
n03788365
n01843065
n03690938
n02105056
n04525305
n01631663
n02097047
n02486410
n04152593
n02879718
n04443257
n02102040
n02093859
n02127052
n09332890
n01770393
n03527444
n03697007
n04515003
n07873807
n04429376
n03991062
n03085013
n01828970
n01608432
n03930313
n02105641
n01756291
n02500267
n04039381
n02168699
n03259280
n01855032
n10565667
n02115641
n04515003
n02669723
n02988304
n03825788
n02025239
n03706229
n01914609
n03344393
n04049303
n03259280
n02091244
n02514041
n03065424
n12057211
n02027492
n04118538
n04141076
n03899768
n04462240
n02096051
n02978881
n02114855
n04509417
n04505470
n03201208
n01986214
n02417914
n01677366
n07747607
n04409515
n01685808
n04599235
n03187595
n03657121
n15075141
n04372370
n02966687
n01820546
n03344393
n03476991
n03763968
n04070727
n03041632
n01877812
n07248320
n07875152
n02892767
n03355925
n01685808
n04228054
n03843555
n01755581
n04347754
n02277742
n03000247
n07742313
n07875152
n03075370
n02799071
n03133878
n06596364
n01806143
n03930313
n03930313
n02730930
n01773797
n03902125
n03721384
n02951358
n02119022
n01744401
n02112706
n02396427
n03633091
n01514668
n03791053
n02395406
n04370456
n03657121
n02096585
n02107312
n03970156
n03126707
n02105251
n02442845
n04461696
n07715103
n03873416
n01677366
n02012849
n03527444
n01798484
n04562935
n02279972
n02423022
n03992509
n01592084
n03788195
n02259212
n04462240
n03929660
n02090622
n04254120
n01592084
n02109961
n03769881
n02268443
n02909870
n01641577
n04550184
n04507155
n01630670
n04152593
n02090379
n01983481
n09421951
n04517823
n01744401
n07745940
n01843383
n03476684
n01735189
n03930313
n03916031
n02093991
n03207743
n02787622
n02106166
n04398044
n04428191
n04209133
n02085620
n09835506
n01871265
n03459775
n02089973
n02643566
n02481823
n02123159
n07875152
n04557648
n03196217
n04033995
n02037110
n01955084
n03089624
n01751748
n02099429
n03325584
n03445777
n03902125
n02116738
n02799071
n02843684
n03109150
n02869837
n06794110
n03908618
n02105251
n02790996
n02966687
n09256479
n02939185
n04417672
n02113624
n04266014
n02174001
n02483362
n03127925
n03717622
n01744401
n01739381
n02606052
n03290653
n04330267
n02486410
n02457408
n04355338
n01498041
n02134418
n01440764
n04552348
n02319095
n03781244
n07730033
n04525038
n02018795
n03494278
n04589890
n01829413
n04456115
n04118776
n02687172
n02992529
n07932039
n03075370
n04557648
n01728920
n01688243
n02443484
n03843555
n03786901
n03016953
n02536864
n04125021
n01514668
n04461696
n01983481
n02493509
n07614500
n01776313
n02091467
n02106030
n02814860
n02002556
n01818515
n03160309
n02092339
n02013706
n01753488
n01739381
n02981792
n01753488
n02704792
n09332890
n02317335
n03255030
n04201297
n02093256
n01688243
n03792782
n03028079
n01944390
n02107908
n03803284
n03775546
n02128757
n04542943
n04560804
n02514041
n04204347
n02916936
n03344393
n02364673
n03942813
n01614925
n02494079
n04542943
n07742313
n02490219
n03843555
n02281406
n02493793
n02123597
n04613696
n01796340
n07753592
n03384352
n03916031
n03908714
n03992509
n04201297
n03637318
n02977058
n02091032
n02494079
n03673027
n04548362
n01950731
n03721384
n02999410
n02483362
n02111277
n03709823
n02087046
n03929660
n07930864
n03954731
n03063599
n03692522
n02018207
n03788195
n04040759
n02011460
n07871810
n03690938
n04486054
n01986214
n04591713
n04127249
n01807496
n02095570
n01981276
n02128925
n02992529
n02815834
n01698640
n01632458
n02492660
n02319095
n03938244
n03876231
n01798484
n03666591
n02110806
n03782006
n01943899
n02643566
n04120489
n04399382
n02085782
n04389033
n07714571
n01614925
n03494278
n04141076
n03388043
n04118776
n03291819
n02389026
n04209133
n01685808
n03769881
n04074963
n04458633
n04532670
n02484975
n07579787
n02058221
n03000134
n01704323
n04044716
n03000684
n03179701
n07716906
n01518878
n02497673
n03445924
n02093647
n02410509
n03026506
n04153751
n04141076
n03532672
n04201297
n07836838
n03188531
n02486410
n04275548
n02133161
n03394916
n02098105
n04376876
n02106382
n03483316
n02490219
n03032252
n03770439
n02025239
n03840681
n03496892
n03633091
n02837789
n03126707
n02104365
n04584207
n04347754
n04243546
n02110185
n02865351
n02167151
n02871525
n02088466
n02138441
n02804610
n03935335
n02782093
n01744401
n09472597
n03445924
n01737021
n02102480
n02086646
n02137549
n02481823
n02107574
n02096437
n02701002
n03272562
n02978881
n01737021
n01824575
n03887697
n02097298
n03692522
n02437312
n03814639
n02236044
n02094433
n07742313
n04398044
n03255030
n04258138
n02422106
n06785654
n02319095
n03692522
n04350905
n04252077
n03804744
n03131574
n02107312
n07583066
n02006656
n01608432
n04428191
n04346328
n02493793
n04040759
n03733281
n02093754
n01677366
n02481823
n11939491
n13044778
n04070727
n02500267
n03347037
n03942813
n03218198
n02747177
n04286575
n01530575
n02437312
n02090379
n04447861
n01843383
n01629819
n01871265
n02077923
n02105162
n03873416
n02106662
n02096437
n02132136
n03000684
n01917289
n02777292
n02077923
n02110063
n02027492
n02124075
n04467665
n04192698
n04525305
n12057211
n02894605
n02108551
n04392985
n01742172
n02825657
n04336792
n04265275
n02172182
n02483362
n02168699
n02088094
n02128925
n03764736
n02113712
n03197337
n03393912
n03804744
n07697313
n03770679
n02795169
n02104365
n10148035
n01534433
n03089624
n10565667
n04536866
n02259212
n01828970
n01667114
n02110958
n03841143
n03325584
n03450230
n04423845
n04149813
n02802426
n03876231
n03868242
n07614500
n04356056
n02128925
n03379051
n02099712
n02870880
n02085936
n13044778
n03388043
n02113712
n02113624
n03141823
n02110627
n03394916
n04548362
n02927161
n01914609
n04275548
n03271574
n03527444
n01530575
n03775546
n02965783
n02105505
n03982430
n04258138
n03201208
n07684084
n02437616
n03388043
n04389033
n02841315
n03250847
n02480495
n01749939
n12998815
n02114712
n02056570
n03602883
n02281406
n02086079
n03769881
n03791053
n02165456
n02747177
n13040303
n04023962
n02948072
n04243546
n02690373
n04442312
n03837869
n04417672
n13054560
n02106166
n01776313
n02667093
n07565083
n13133613
n07730033
n02488291
n04423845
n03623198
n03977966
n03866082
n02100735
n02834397
n04461696
n02089078
n01694178
n01944390
n03706229
n03223299
n03980874
n03991062
n04004767
n04201297
n03761084
n03443371
n02033041
n02138441
n01924916
n04133789
n06359193
n02091032
n02981792
n03180011
n04522168
n04317175
n02106662
n01847000
n12768682
n03496892
n02892767
n07684084
n01877812
n03345487
n03495258
n03661043
n01990800
n03417042
n04330267
n01443537
n02397096
n01582220
n01910747
n02025239
n03724870
n02787622
n02892201
n02086079
n04417672
n04550184
n04525305
n03877845
n07718472
n04266014
n02396427
n01773797
n02009912
n01795545
n02120079
n02105505
n04252077
n07734744
n02793495
n04372370
n02667093
n01629819
n02493793
n02640242
n01748264
n02134418
n04335435
n02966687
n01608432
n03325584
n02013706
n02364673
n02791124
n02979186
n04493381
n03045698
n03032252
n02092339
n01806143
n03535780
n02319095
n04562935
n01873310
n02279972
n02124075
n03482405
n02056570
n02823750
n02823428
n01443537
n02860847
n02690373
n03825788
n04461696
n02106030
n01983481
n01632777
n04562935
n01847000
n03661043
n03272010
n02113978
n04550184
n02699494
n04505470
n01629819
n03944341
n03792782
n02071294
n02114367
n04536866
n02910353
n03355925
n03908618
n02786058
n02097047
n02088094
n02089867
n04356056
n02095570
n01756291
n02441942
n04208210
n07693725
n02088094
n06596364
n02992529
n04081281
n03467068
n01847000
n01693334
n03680355
n04501370
n03763968
n01917289
n02669723
n01924916
n02110958
n04041544
n02110806
n02134084
n02130308
n02443484
n02843684
n01968897
n01855672
n02113799
n03584829
n12768682
n01531178
n03197337
n01784675
n03075370
n04252077
n03935335
n02999410
n07716358
n04238763
n07753275
n02279972
n02666196
n02007558
n02105251
n02226429
n01751748
n02127052
n04579145
n02051845
n02445715
n02102177
n03759954
n03179701
n02007558
n03649909
n03992509
n03447721
n02916936
n03196217
n01883070
n01983481
n03000684
n01756291
n02111277
n03857828
n04479046
n02177972
n04067472
n03444034
n03854065
n03720891
n04208210
n01740131
n04423845
n01855672
n03388549
n02206856
n04606251
n03887697
n02865351
n04579145
n01496331
n02804414
n02787622
n04004767
n02097047
n02490219
n03529860
n03680355
n03942813
n01632458
n03733281
n03584829
n02797295
n02966687
n01824575
n07831146
n04366367
n03666591
n03788195
n02966193
n03042490
n06874185
n03345487
n02123597
n02895154
n01664065
n01819313
n12985857
n01855672
n02095314
n02102973
n02966193
n02115913
n03590841
n02093991
n02169497
n02814860
n02089078
n02138441
n02113712
n02883205
n01601694
n01774384
n04111531
n03000134
n02088364
n02489166
n01914609
n04009552
n03680355
n03843555
n03950228
n03680355
n04597913
n04347754
n04116512
n02747177
n01514668
n02840245
n03483316
n07715103
n04153751
n02500267
n03998194
n15075141
n03930313
n02112706
n03888257
n02110063
n02108000
n02102973
n02483708
n02097474
n02011460
n02492035
n02814860
n02009229
n03877845
n06596364
n07248320
n04344873
n04536866
n02823750
n03291819
n01770081
n02892767
n03481172
n02066245
n04370456
n02264363
n03670208
n02397096
n03075370
n02087394
n02536864
n04599235
n03982430
n04523525
n04522168
n13052670
n03633091
n04067472
n02988304
n04486054
n01677366
n02492660
n03127747
n02112350
n04336792
n03417042
n13133613
n01608432
n02865351
n02129165
n01773157
n04258138
n04041544
n04252077
n03197337
n03794056
n03877845
n04346328
n02086910
n01694178
n03445924
n04532670
n03781244
n04141975
n03124170
n03874293
n03498962
n01739381
n02791270
n07892512
n03444034
n02105162
n01734418
n04070727
n02916936
n03840681
n04399382
n07749582
n02480495
n04515003
n01688243
n02107142
n01914609
n01742172
n07753113
n01828970
n01797886
n04606251
n03062245
n03400231
n03483316
n02978881
n02109047
n02795169
n01728920
n03530642
n04209133
n02105641
n02111277
n01737021
n02092339
n04589890
n02454379
n12267677
n03627232
n01990800
n02109047
n03314780
n01798484
n03691459
n02669723
n03781244
n03467068
n01770081
n01796340
n03930313
n02226429
n02514041
n02356798
n07880968
n04131690
n02807133
n03841143
n02346627
n02397096
n02963159
n02641379
n02093428
n01537544
n02814860
n04074963
n02109525
n02085782
n02102973
n02319095
n02437616
n02395406
n02488291
n03777568
n03710193
n09421951
n03838899
n04004767
n02011460
n02526121
n02112018
n02687172
n02825657
n01882714
n01968897
n03196217
n02101556
n04389033
n04127249
n04254680
n03063689
n04125021
n01689811
n04325704
n02137549
n10565667
n02391049
n07836838
n04584207
n02423022
n02088364
n03961711
n02457408
n03535780
n02412080
n03017168
n02979186
n02676566
n01860187
n02423022
n03891332
n01494475
n01704323
n04423845
n03976467
n02091831
n02101006
n01491361
n03063689
n01910747
n01784675
n03967562
n02094114
n04065272
n01534433
n04372370
n02879718
n02871525
n02168699
n01784675
n03492542
n02101388
n07718472
n02110185
n12998815
n03127925
n03207743
n12057211
n07565083
n04525038
n04118776
n01616318
n02965783
n02206856
n03899768
n01687978
n03379051
n02104029
n04229816
n03124170
n02281406
n03032252
n02101556
n02980441
n03485794
n04366367
n02492035
n03599486
n04548362
n03764736
n07760859
n01978287
n04505470
n02488291
n02782093
n03417042
n02486261
n03843555
n02319095
n02493509
n01798484
n03857828
n03950228
n02791124
n03207941
n01751748
n03916031
n04074963
n03724870
n13133613
n03937543
n03255030
n04372370
n02168699
n03920288
n02514041
n02112350
n01443537
n01807496
n04070727
n01675722
n01518878
n03599486
n04162706
n04147183
n01795545
n01698640
n01873310
n07718472
n04033995
n04418357
n04429376
n02110806
n01944390
n09835506
n02092339
n02948072
n01978455
n02100236
n03710193
n04517823
n04154565
n03761084
n02346627
n02672831
n02422106
n01664065
n04125021
n03450230
n03980874
n03642806
n03866082
n01494475
n01910747
n02229544
n01770393
n02114367
n07920052
n01872401
n02109047
n03884397
n02704792
n07716906
n03843555
n03095699
n04532106
n02093754
n02879718
n04515003
n07718747
n02094258
n03838899
n03126707
n07730033
n03085013
n03680355
n02123045
n02279972
n02086240
n02134418
n03388549
n03637318
n03345487
n04517823
n03476991
n07734744
n03602883
n04371774
n04229816
n03249569
n02676566
n02011460
n02916936
n01806567
n02814533
n01560419
n03970156
n01978455
n02823750
n02883205
n02110627
n03787032
n10148035
n04596742
n04033995
n02444819
n03954731
n04311174
n02095889
n01914609
n03710193
n02782093
n01820546
n02091134
n04355933
n02389026
n04090263
n04254120
n01820546
n01641577
n02106550
n02326432
n03532672
n03065424
n07836838
n02786058
n04235860
n04264628
n02091244
n03773504
n02013706
n04458633
n04270147
n07711569
n04325704
n03017168
n02112350
n04192698
n02769748
n02096051
n04149813
n02483708
n04040759
n04265275
n02071294
n07873807
n02488702
n04200800
n02134084
n04418357
n04552348
n02999410
n02817516
n01981276
n02233338
n02504458
n02116738
n03633091
n03372029
n07714990
n04552348
n02504458
n02172182
n03691459
n02089078
n03594734
n02643566
n01665541
n01818515
n02802426
n03662601
n03495258
n01773797
n02206856
n03710721
n04442312
n02137549
n03657121
n04311004
n03775071
n03630383
n02412080
n01443537
n03874293
n03874599
n07590611
n04162706
n02108551
n07749582
n02804414
n03777754
n03584829
n02699494
n02097298
n03661043
n01774750
n03594945
n04005630
n07697313
n02009229
n03529860
n04355933
n03899768
n03337140
n02110958
n02092339
n02097130
n03337140
n01818515
n03345487
n01496331
n03124043
n02095570
n01558993
n03814906
n03216828
n03930630
n06874185
n02113799
n07720875
n03887697
n03697007
n02231487
n02669723
n02480855
n04366367
n03706229
n03529860
n03924679
n03527444
n01770393
n04493381
n04532670
n02883205
n04192698
n02129604
n02669723
n04259630
n02091831
n09332890
n01883070
n04026417
n03485407
n01877812
n01644900
n09256479
n04286575
n01601694
n04428191
n03065424
n03770439
n02174001
n02110341
n02916936
n04086273
n03393912
n02701002
n03991062
n01608432
n04273569
n04522168
n07760859
n02493793
n02804414
n02229544
n04009552
n03874599
n03649909
n07614500
n02094433
n02097298
n03662601
n03450230
n02093256
n04033995
n02113023
n09246464
n01704323
n02488702
n02096294
n04536866
n07873807
n03770439
n04409515
n04532106
n04542943
n07584110
n02808304
n03903868
n03888605
n02051845
n02115641
n02099267
n03452741
n03498962
n01945685
n01692333
n03930630
n02794156
n04311004
n03482405
n04540053
n09256479
n02607072
n02281406
n03991062
n02056570
n04243546
n03100240
n01532829
n03127747
n02119022
n02666196
n03379051
n04417672
n07920052
n03617480
n01818515
n03998194
n03388183
n02113799
n04344873
n03590841
n04228054
n04228054
n02231487
n03888257
n04086273
n02090622
n03933933
n02422106
n03720891
n02093991
n04347754
n01630670
n03843555
n03729826
n01644900
n02264363
n03126707
n12057211
n04461696
n02098286
n02276258
n04552348
n01514668
n04243546
n02871525
n02106382
n02100583
n02085936
n04487081
n03995372
n01601694
n02279972
n03444034
n07730033
n02011460
n02099601
n04536866
n03014705
n02486261
n04590129
n04265275
n03447447
n02102177
n03388043
n01665541
n03924679
n06874185
n03018349
n02403003
n03196217
n02132136
n01514859
n02397096
n02113186
n03924679
n02096437
n07831146
n04584207
n03777568
n02276258
n02108915
n04540053
n03874293
n02033041
n04270147
n02114367
n07730033
n02342885
n03929660
n03032252
n02992211
n03658185
n02777292
n02879718
n02319095
n07760859
n03888257
n02910353
n03868863
n04133789
n04136333
n04356056
n02028035
n03000134
n03355925
n04326547
n02494079
n04099969
n02966193
n04147183
n02966193
n07697313
n03877472
n02486261
n02510455
n07720875
n03764736
n04239074
n02443484
n07720875
n02840245
n03782006
n02119789
n04328186
n02417914
n03216828
n02108551
n02013706
n01734418
n03729826
n01689811
n04522168
n02422106
n04004767
n12620546
n04041544
n04116512
n03478589
n02174001
n04486054
n02107142
n02422699
n03400231
n07930864
n04200800
n01582220
n07753592
n02690373
n07880968
n03958227
n01665541
n01847000
n12768682
n03478589
n02091467
n02787622
n02776631
n03000247
n04074963
n03743016
n03325584
n09246464
n03871628
n01740131
n09288635
n02730930
n03884397
n03775546
n02114712
n07718472
n01728920
n02494079
n01774750
n03967562
n07718747
n02906734
n03444034
n02408429
n02319095
n04330267
n02113624
n02231487
n04141076
n04552348
n03759954
n04120489
n02869837
n03838899
n02268443
n02321529
n04023962
n03843555
n04525038
n02361337
n03924679
n02236044
n01530575
n02877765
n01980166
n03777568
n04008634
n04579145
n07873807
n03207743
n03970156
n04254680
n03345487
n02454379
n03110669
n01980166
n02536864
n04285008
n07684084
n01924916
n02108915
n04074963
n03837869
n01882714
n03873416
n02169497
n02687172
n02268853
n02906734
n03018349
n04310018
n02978881
n01693334
n04542943
n03770679
n02123045
n02974003
n02086646
n01530575
n03786901
n03710193
n03388183
n02112350
n02113186
n01883070
n04552348
n04344873
n01773157
n02109961
n02123159
n04404412
n01917289
n02169497
n03899768
n03697007
n03874599
n02669723
n07717556
n04147183
n03424325
n03498962
n07715103
n01632777
n02264363
n03018349
n01669191
n04204238
n01829413
n03785016
n01871265
n02992529
n04127249
n01774384
n13040303
n02090721
n07615774
n02231487
n03126707
n04399382
n02127052
n02480495
n04357314
n04597913
n04311174
n04376876
n03344393
n04146614
n01622779
n04325704
n03527444
n07753275
n02422699
n03759954
n01824575
n01704323
n04067472
n01872401
n02114712
n02979186
n07615774
n02094433
n02106550
n01930112
n02086079
n07754684
n02088238
n03764736
n02077923
n01770081
n03763968
n03544143
n03777568
n03706229
n07871810
n02100583
n02096585
n03538406
n02794156
n04325704
n04127249
n02277742
n03314780
n13037406
n02607072
n07720875
n02277742
n02412080
n13054560
n02865351
n03467068
n03891251
n02089973
n02002724
n02017213
n02917067
n01665541
n07714990
n03372029
n03584254
n03662601
n03337140
n02692877
n02110627
n04201297
n04154565
n03637318
n03255030
n07745940
n02056570
n03895866
n02169497
n01818515
n04493381
n03041632
n02110627
n04553703
n02099429
n09428293
n03495258
n02483708
n04336792
n02825657
n03891251
n01860187
n09472597
n01753488
n04540053
n02895154
n02321529
n03259280
n01630670
n03000134
n03866082
n01514859
n07873807
n02105056
n01978455
n02009912
n03794056
n03720891
n03995372
n02869837
n02169497
n03425413
n04355338
n02977058
n02916936
n03840681
n04560804
n03042490
n07734744
n03706229
n01774384
n03530642
n02346627
n02105251
n02229544
n04522168
n03535780
n02105505
n02168699
n02138441
n04131690
n02172182
n02111129
n02776631
n03785016
n03895866
n02457408
n03146219
n02134084
n02097130
n02361337
n07720875
n01871265
n02231487
n07717556
n04328186
n04317175
n03065424
n02442845
n03729826
n02892201
n02489166
n03721384
n02096437
n02093647
n03376595
n01692333
n02134084
n01978287
n01592084
n02504458
n03544143
n04039381
n02690373
n01756291
n03814639
n03443371
n03633091
n02066245
n03868242
n02133161
n01496331
n02108915
n03325584
n03372029
n02085782
n04026417
n02111500
n03482405
n04149813
n02108551
n03337140
n03970156
n02443484
n03657121
n03633091
n01675722
n02965783
n03908714
n03777754
n03394916
n06794110
n02492660
n02099429
n01828970
n04404412
n01532829
n02109047
n07768694
n02104365
n01632777
n02794156
n02807133
n07615774
n01532829
n13040303
n04149813
n01828970
n03345487
n02096585
n03291819
n07754684
n02123597
n04266014
n02114855
n02018207
n04532106
n04579432
n09246464
n02088364
n07615774
n04487394
n04612504
n07613480
n02058221
n03980874
n02134418
n01622779
n04209239
n02692877
n01560419
n02870880
n03445924
n02117135
n04356056
n02097047
n02281406
n04243546
n02129604
n02395406
n02089973
n09332890
n07747607
n09246464
n04417672
n02859443
n02105251
n02012849
n03724870
n04562935
n02790996
n02825657
n02510455
n03884397
n04069434
n01843383
n01440764
n02909870
n04344873
n13054560
n03976657
n04270147
n02804610
n03792972
n01704323
n01689811
n03908714
n03062245
n03376595
n02442845
n04589890
n02114855
n04465501
n01664065
n07711569
n02457408
n02165105
n02389026
n03207743
n04081281
n04458633
n01843065
n04335435
n03444034
n04311174
n02128385
n01819313
n02098413
n02110341
n06874185
n02098413
n02007558
n02077923
n04461696
n01514859
n03388549
n03447721
n03207743
n02443114
n01664065
n03825788
n02799071
n01753488
n03642806
n01847000
n09421951
n02086910
n02441942
n03141823
n01664065
n03642806
n02364673
n03884397
n02033041
n04019541
n04266014
n07749582
n01818515
n02415577
n02804414
n04599235
n01910747
n02965783
n04111531
n03794056
n02088364
n03733805
n02497673
n04296562
n01983481
n04041544
n07892512
n02085936
n03929855
n02396427
n03854065
n02802426
n01751748
n01632458
n03207941
n02110627
n04554684
n03729826
n02480495
n01914609
n04200800
n02480495
n01630670
n03825788
n04458633
n07754684
n01756291
n02807133
n02099712
n03223299
n03394916
n02100735
n04548362
n01774750
n03085013
n02974003
n04004767
n02111129
n02113799
n02963159
n04275548
n06874185
n02105855
n03710193
n02916936
n03125729
n04209239
n04033995
n07930864
n03443371
n04604644
n03788195
n04238763
n02174001
n03637318
n07615774
n04200800
n02107142
n03709823
n03786901
n02086079
n03201208
n03000684
n04099969
n02102480
n01950731
n07753113
n02013706
n04536866
n02423022
n02687172
n04208210
n04596742
n02051845
n01833805
n02058221
n03344393
n03857828
n01978287
n04118538
n03976657
n03717622
n02097130
n09399592
n01768244
n02317335
n04204238
n01580077
n02097298
n03673027
n02013706
n02105251
n07697313
n03980874
n02804610
n02125311
n03781244
n02095570
n03344393
n02408429
n02110627
n02807133
n02129604
n04332243
n04398044
n13044778
n02098413
n02129604
n03763968
n03028079
n02108000
n03825788
n02116738
n04344873
n03924679
n02486261
n02667093
n03584254
n04554684
n07932039
n01872401
n02128757
n02966687
n02101556
n03207941
n04476259
n07684084
n02109525
n02268443
n03793489
n02106662
n04335435
n03146219
n01774384
n03980874
n01930112
n03485794
n03710193
n04525305
n03916031
n07565083
n02264363
n03676483
n04235860
n02808304
n03796401
n12620546
n02098286
n02091831
n02319095
n02264363
n04317175
n04120489
n02788148
n02110341
n04252077
n07715103
n04540053
n03016953
n02091244
n02640242
n04612504
n03000134
n02112706
n01532829
n02115913
n02101556
n02119789
n04252225
n03492542
n03272010
n03770679
n01629819
n04517823
n04366367
n02410509
n03623198
n03777754
n03899768
n04367480
n04525305
n03208938
n02951358
n03110669
n04483307
n04517823
n02422699
n04509417
n03590841
n09332890
n01629819
n04557648
n09421951
n13052670
n01677366
n02058221
n02102318
n03126707
n04548280
n03187595
n02966687
n03938244
n02486261
n02096177
n02165105
n02979186
n04310018
n01669191
n04356056
n01644373
n03676483
n04311174
n03617480
n02107908
n04310018
n02100236
n03623198
n03841143
n02488702
n04507155
n02097130
n02769748
n03781244
n02441942
n03240683
n02115641
n02117135
n02137549
n02113023
n02129165
n04532106
n04118538
n01774750
n02917067
n03394916
n04458633
n01704323
n04399382
n02410509
n02111277
n02102177
n03000247
n02107683
n04037443
n03445777
n04296562
n02971356
n04418357
n02730930
n03841143
n01774384
n03271574
n02443114
n12144580
n02097298
n02948072
n04179913
n02105251
n03888605
n03208938
n04265275
n09421951
n02408429
n02101388
n02105056
n07836838
n04591713
n02011460
n04532106
n01698640
n04330267
n04039381
n04542943
n02317335
n02504013
n01704323
n01829413
n04357314
n04252077
n01601694
n02006656
n03124043
n02965783
n02814533
n03347037
n03920288
n03874599
n02364673
n03496892
n01978455
n03544143
n04252077
n03630383
n03717622
n03141823
n04259630
n03785016
n02174001
n02869837
n04335435
n02687172
n01729977
n02018795
n01494475
n03529860
n02106166
n04553703
n04523525
n02445715
n03891332
n02747177
n03676483
n02667093
n07920052
n02910353
n02097209
n03991062
n04204238
n02110341
n02089867
n01776313
n02328150
n03180011
n07717410
n03047690
n04505470
n03014705
n01518878
n01807496
n04591713
n02999410
n04254777
n02870880
n02002556
n02095889
n02487347
n03944341
n03770679
n03794056
n03759954
n02093991
n01968897
n03743016
n03388183
n03775546
n02437312
n04120489
n03642806
n02808440
n04099969
n03891332
n03958227
n02113799
n03998194
n02104029
n03250847
n02100877
n07714990
n03110669
n02676566
n03347037
n03530642
n10565667
n02108000
n03110669
n03690938
n02095314
n02012849
n02277742
n01532829
n04553703
n02051845
n04456115
n03998194
n02417914
n03594734
n01775062
n02105855
n03903868
n02096294
n04371774
n02927161
n03657121
n03937543
n04532106
n01883070
n01537544
n02667093
n02104029
n02487347
n02104365
n02051845
n04243546
n02006656
n02808304
n04251144
n02356798
n02391049
n07753275
n02974003
n03482405
n09193705
n01694178
n02168699
n12768682
n03272562
n03710193
n03843555
n03126707
n03196217
n06785654
n04350905
n07873807
n04310018
n02264363
n02492660
n10565667
n04275548
n04147183
n04366367
n02114855
n02100236
n04154565
n02276258
n03424325
n03777568
n03494278
n01806143
n03459775
n03598930
n03967562
n03775546
n04418357
n02412080
n04591157
n01770081
n03877472
n01531178
n03794056
n04485082
n03786901
n01773797
n04254680
n02128925
n02128757
n02442845
n02606052
n02099429
n04442312
n01807496
n02107312
n03710637
n02027492
n03016953
n02017213
n12768682
n04192698
n02747177
n04532106
n01537544
n04254777
n03259280
n02025239
n09835506
n02096437
n04372370
n02797295
n03871628
n02481823
n03837869
n02268443
n04522168
n03690938
n04550184
n03657121
n02105251
n01833805
n01755581
n07734744
n01873310
n03538406
n01688243
n03452741
n02120505
n02412080
n04254120
n04019541
n02112706
n02100735
n03201208
n03134739
n02514041
n04065272
n02165105
n04443257
n04149813
n03871628
n02100236
n02412080
n02992211
n02951358
n03776460
n02666196
n03000134
n12144580
n03141823
n02110341
n02094114
n02504458
n04389033
n02085936
n04553703
n03594734
n09468604
n03980874
n07831146
n03141823
n13054560
n01704323
n02356798
n03970156
n02071294
n06794110
n02860847
n03970156
n11879895
n04389033
n01770393
n02104365
n02033041
n07754684
n02666196
n03658185
n03447447
n03840681
n01990800
n03992509
n02319095
n04540053
n04141975
n03026506
n02009229
n07880968
n03459775
n02488291
n02108551
n03793489
n03041632
n03887697
n12057211
n07875152
n01828970
n01796340
n03494278
n02281787
n01698640
n01537544
n02110185
n04209133
n02536864
n07714990
n02100236
n04317175
n04265275
n01983481
n01833805
n02808440
n01443537
n07697313
n02109525
n03935335
n03903868
n04074963
n01807496
n03729826
n04111531
n07860988
n04133789
n03873416
n03991062
n03028079
n03207743
n02487347
n03207941
n03920288
n02100735
n02105855
n03544143
n02071294
n03496892
n03461385
n01443537
n04239074
n03956157
n04553703
n04371430
n12057211
n04118776
n02793495
n02808304
n03709823
n02099267
n03063599
n03018349
n02009912
n03467068
n03637318
n12998815
n04153751
n03063599
n02132136
n02879718
n02835271
n03089624
n01734418
n02027492
n04133789
n01491361
n03041632
n02361337
n03710637
n02169497
n02268443
n03291819
n02492660
n04069434
n03457902
n04200800
n04429376
n01945685
n02910353
n02096177
n04204347
n03347037
n01806567
n02002724
n01675722
n04404412
n03476684
n03868242
n01773157
n02102040
n02088094
n02797295
n07831146
n03764736
n03000684
n02536864
n01983481
n02106550
n04065272
n01685808
n02090622
n04579432
n04204238
n13054560
n03016953
n03937543
n04229816
n02492660
n03445924
n11939491
n03544143
n02894605
n07697537
n04153751
n02483362
n02134084
n04208210
n03197337
n01753488
n03680355
n03938244
n03857828
n03761084
n02105162
n03742115
n02536864
n02930766
n01514668
n03876231
n02493509
n02095314
n04517823
n01729977
n04442312
n11939491
n01614925
n03496892
n02281787
n02095570
n02105505
n04127249
n04579432
n03804744
n04613696
n01440764
n04133789
n02115641
n02099849
n04493381
n02102480
n11939491
n07565083
n03425413
n01756291
n02132136
n02109525
n03995372
n12057211
n07697537
n04023962
n03690938
n03676483
n03868863
n04147183
n02895154
n01773549
n01667114
n12267677
n04507155
n03658185
n01644373
n06785654
n02114548
n04065272
n04118538
n01491361
n03792782
n03773504
n07831146
n02092002
n02808304
n04330267
n02437312
n03481172
n03706229
n02100583
n04347754
n02666196
n04074963
n03976467
n02090721
n02002556
n01728572
n02129165
n02483362
n01910747
n03887697
n02422106
n04039381
n02356798
n04350905
n02871525
n02086079
n04485082
n04116512
n02346627
n02840245
n03345487
n04336792
n03777568
n02797295
n02093428
n04037443
n03188531
n03538406
n02108089
n02268853
n02219486
n02415577
n02113978
n04367480
n02111277
n07754684
n03207941
n02708093
n02791124
n04239074
n01872401
n03124043
n02788148
n03933933
n01798484
n03065424
n03658185
n09421951
n03000247
n02669723
n04592741
n02097130
n02105641
n01629819
n02793495
n03954731
n04141327
n02966687
n02769748
n02281787
n01687978
n04229816
n04009552
n04418357
n04461696
n02006656
n03770439
n02017213
n07716358
n02445715
n02389026
n02948072
n06785654
n02268443
n03457902
n04118776
n12768682
n02095314
n01518878
n04275548
n02894605
n01843383
n02840245
n07697313
n07930864
n02690373
n02788148
n04081281
n03127925
n03706229
n03721384
n01632458
n04265275
n01924916
n02979186
n01872401
n04235860
n04476259
n07697537
n02488702
n03920288
n03670208
n04493381
n02113712
n01682714
n03271574
n03018349
n01641577
n02422699
n02807133
n02749479
n02749479
n02480495
n02120505
n02277742
n03935335
n03759954
n02113186
n02100236
n03126707
n04458633
n02281406
n01775062
n04204347
n02116738
n03388043
n04418357
n02100583
n03584829
n01592084
n04456115
n01728920
n02091635
n03637318
n02105056
n02110627
n02776631
n03788365
n03179701
n02009912
n02219486
n04179913
n07590611
n03903868
n04560804
n01917289
n04133789
n02085620
n03259280
n02484975
n01744401
n07836838
n07753592
n03673027
n01494475
n01728572
n02174001
n07873807
n02058221
n04252225
n03782006
n04133789
n15075141
n02106662
n02346627
n03769881
n03630383
n03871628
n01984695
n01514668
n01749939
n03457902
n04347754
n04370456
n02892201
n01693334
n03109150
n02102973
n02098413
n01930112
n02834397
n02091032
n02489166
n12985857
n02092339
n03995372
n02089078
n03709823
n02111500
n02268443
n02410509
n01798484
n03720891
n03868863
n02092002
n03018349
n04487394
n03240683
n03803284
n07579787
n02804414
n03887697
n04542943
n02113023
n02607072
n01882714
n02102040
n07697537
n02443114
n01986214
n02777292
n02939185
n02009229
n03769881
n04554684
n02037110
n02817516
n02089078
n03691459
n03680355
n04591713
n03804744
n03617480
n01795545
n02865351
n02840245
n02909870
n02101006
n04208210
n04487081
n02111889
n04264628
n01629819
n02111129
n12768682
n03134739
n03075370
n13037406
n02100735
n04330267
n04540053
n01498041
n03874599
n03874599
n04485082
n03095699
n04252225
n02172182
n01667114
n04557648
n02119022
n02091467
n04350905
n01817953
n01985128
n04067472
n02504013
n04476259
n09229709
n02865351
n02105251
n03255030
n02325366
n04200800
n03065424
n04330267
n02403003
n02123159
n02326432
n02097130
n02966687
n04591157
n03538406
n02107908
n02009912
n01644900
n02356798
n04201297
n04235860
n02110185
n03544143
n02787622
n04296562
n02804414
n02114367
n02894605
n02119022
n02965783
n03837869
n01955084
n02701002
n02137549
n03794056
n03759954
n03956157
n03461385
n02939185
n07892512
n07715103
n01742172
n04350905
n01817953
n02865351
n02002556
n01644900
n02795169
n03617480
n03207743
n02403003
n03109150
n03590841
n02480855
n02091032
n07584110
n02102318
n02111277
n02692877
n04604644
n03793489
n01877812
n02412080
n01698640
n02110806
n04019541
n04476259
n04584207
n02012849
n03720891
n04311174
n03459775
n03781244
n09428293
n02106550
n02132136
n03630383
n02128925
n03903868
n03814639
n01630670
n02106550
n01855672
n01807496
n02088364
n03290653
n02109525
n03902125
n07583066
n04542943
n03937543
n07583066
n04008634
n04532670
n02095314
n04118538
n07584110
n02747177
n03929855
n01950731
n07742313
n03649909
n02319095
n01697457
n02092339
n09332890
n04347754
n02480495
n03478589
n07880968
n03935335
n03976657
n02835271
n04367480
n02177972
n04070727
n04277352
n04125021
n03134739
n02128757
n02504013
n04111531
n04152593
n04591713
n03400231
n01704323
n12768682
n02110806
n04418357
n02536864
n04409515
n04542943
n03763968
n03662601
n02490219
n02086240
n04404412
n07718747
n02096051
n04599235
n01944390
n01990800
n04152593
n02807133
n02086910
n03347037
n01847000
n02107683
n02279972
n04019541
n01695060
n02087046
n03891251
n04154565
n04398044
n02504013
n02138441
n04285008
n03942813
n04239074
n02704792
n03794056
n04476259
n04483307
n03982430
n02109047
n11939491
n04335435
n02727426
n03781244
n01978455
n03887697
n02268853
n02607072
n02009229
n04371774
n07892512
n04523525
n01748264
n03924679
n04200800
n04026417
n04208210
n04548362
n04389033
n04152593
n02910353
n07697313
n03196217
n04200800
n02279972
n01917289
n02488291
n02808304
n03992509
n02804414
n01774750
n04442312
n03535780
n02802426
n04044716
n02128385
n07697313
n04179913
n03400231
n03095699
n03871628
n02129165
n01773797
n03691459
n02018795
n04116512
n03089624
n02127052
n02111129
n02093256
n03742115
n04429376
n02009229
n02815834
n07747607
n03481172
n03220513
n03495258
n02974003
n01704323
n04277352
n07684084
n02107574
n02276258
n12998815
n03617480
n03721384
n02992529
n02321529
n03933933
n03764736
n03764736
n02317335
n04235860
n02808440
n02110341
n04542943
n02442845
n02869837
n01742172
n02088632
n02120079
n04259630
n03447447
n03876231
n02037110
n01914609
n02102040
n13054560
n03930630
n03759954
n07584110
n04259630
n03291819
n07697537
n01614925
n03814906
n04540053
n02116738
n01776313
n03954731
n04479046
n03658185
n04357314
n03763968
n01755581
n01749939
n02981792
n03485407
n02442845
n04548280
n07880968
n02825657
n09332890
n04596742
n04596742
n02930766
n01843383
n03532672
n13133613
n02963159
n03759954
n02098413
n04367480
n02643566
n04254777
n02415577
n04560804
n04485082
n03781244
n04597913
n04482393
n01530575
n03250847
n02108089
n04404412
n02687172
n03786901
n02108000
n02687172
n02317335
n02606052
n02165105
n03045698
n03218198
n02415577
n04069434
n04482393
n01806143
n01443537
n02100735
n04153751
n04254777
n02091467
n03482405
n02794156
n07754684
n03495258
n04542943
n01797886
n03085013
n03792972
n01980166
n02782093
n03920288
n03666591
n01695060
n02486410
n02088364
n02389026
n07753592
n07248320
n03355925
n01737021
n04266014
n02167151
n03930630
n02133161
n02107142
n03180011
n04023962
n01443537
n02443114
n02892201
n03109150
n01872401
n07565083
n02815834
n02206856
n03729826
n10565667
n02111129
n02704792
n02117135
n03000247
n02129604
n04550184
n03089624
n03785016
n01689811
n02441942
n01641577
n02229544
n01622779
n02089973
n02791270
n02102177
n02114855
n13040303
n03944341
n01667114
n04149813
n03792972
n02869837
n02112706
n13044778
n01688243
n02097658
n02109961
n03791053
n04286575
n01985128
n03014705
n04265275
n04467665
n01985128
n04344873
n04335435
n02676566
n01806143
n04599235
n02093859
n04486054
n01601694
n02966193
n02965783
n02099712
n02808440
n03785016
n04285008
n04141076
n07760859
n03717622
n01917289
n03942813
n04409515
n01819313
n03255030
n02328150
n07590611
n01985128
n03998194
n12985857
n03014705
n02823428
n03127747
n02825657
n03935335
n02793495
n04509417
n02655020
n07873807
n02906734
n03720891
n04037443
n04254120
n07614500
n01667114
n02415577
n03710637
n02361337
n04081281
n04070727
n03649909
n07720875
n02011460
n01443537
n04525305
n02894605
n02113712
n09229709
n04367480
n04266014
n02105056
n09421951
n02814860
n02167151
n01744401
n02808304
n02106030
n02074367
n02536864
n04485082
n03538406
n02108915
n02114548
n01698640
n04286575
n02797295
n02124075
n02927161
n02747177
n02641379
n02325366
n02536864
n03697007
n02281406
n03017168
n02090721
n03776460
n02037110
n03100240
n04398044
n02871525
n03792782
n02787622
n03180011
n04522168
n04266014
n03218198
n02088094
n02097298
n04548362
n03196217
n02095889
n01873310
n02088466
n01968897
n04548280
n04604644
n02090379
n03787032
n04229816
n03891251
n02356798
n04350905
n03782006
n01664065
n03950228
n01601694
n01558993
n02777292
n02091134
n02088632
n02442845
n02137549
n01669191
n02007558
n03782006
n03692522
n02916936
n04357314
n02132136
n03930630
n04019541
n04005630
n02102480
n03443371
n04523525
n03814906
n07693725
n04371774
n04209239
n03720891
n02086079
n02071294
n01774384
n01560419
n04204238
n02101556
n03998194
n04486054
n04505470
n02089867
n04179913
n02112018
n04201297
n03673027
n03908714
n02105056
n02791270
n03775071
n03785016
n02088238
n04376876
n03272562
n02132136
n01748264
n02939185
n03485794
n02105412
n02814860
n03527444
n03803284
n02396427
n03877845
n07614500
n01514859
n02105056
n03047690
n04254120
n03218198
n02910353
n04328186
n03776460
n02109961
n03467068
n02704792
n04136333
n02169497
n02094114
n03837869
n03131574
n02090622
n04238763
n01682714
n03388043
n04493381
n04040759
n02099601
n03803284
n02101388
n13044778
n04483307
n03404251
n02090622
n12768682
n04367480
n03134739
n02356798
n02408429
n02974003
n02101388
n03124170
n04435653
n02105855
n07920052
n03272010
n03180011
n07717556
n04235860
n07716358
n02088094
n07873807
n03775071
n02110341
n02817516
n03146219
n02113186
n09246464
n02119022
n03240683
n03706229
n02701002
n04154565
n03467068
n03843555
n02107683
n02088094
n02108915
n02786058
n02326432
n01629819
n01614925
n12267677
n02108422
n02481823
n02892201
n02877765
n01955084
n12057211
n03063689
n02113978
n02777292
n03717622
n02787622
n02437312
n03992509
n01930112
n02500267
n03627232
n04505470
n03250847
n03400231
n02977058
n04554684
n04456115
n04147183
n03676483
n04465501
n02094114
n04532106
n07892512
n04557648
n03482405
n02088238
n03991062
n01751748
n02104029
n03733281
n02536864
n01860187
n03133878
n02110627
n03208938
n04192698
n02106166
n03028079
n04515003
n03787032
n04317175
n03447721
n02326432
n03535780
n03998194
n04560804
n04507155
n03134739
n01697457
n04270147
n02107683
n04525305
n02410509
n02099712
n02132136
n02268853
n01817953
n03929855
n07615774
n02100735
n01833805
n03207743
n04584207
n04266014
n07248320
n03467068
n03908618
n02133161
n02486410
n01755581
n02445715
n01914609
n02841315
n02877765
n01697457
n01981276
n06794110
n04485082
n02119022
n02481823
n02802426
n01689811
n01796340
n02667093
n01622779
n01980166
n02442845
n04328186
n01871265
n03729826
n02123394
n01630670
n02106166
n10148035
n02437616
|
TensorFlow/Detection/SSD/models/research/slim/nets | nets | inception_v4 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the definition of the Inception V4 architecture.
As described in http://arxiv.org/abs/1602.07261.
Inception-v4, Inception-ResNet and the Impact of Residual Connections
on Learning
Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import inception_utils
slim = tf.contrib.slim
def block_inception_a(inputs, scope=None, reuse=None):
"""Builds Inception-A block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockInceptionA', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 96, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 96, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(inputs, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0b_3x3')
branch_2 = slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(inputs, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 96, [1, 1], scope='Conv2d_0b_1x1')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
def block_reduction_a(inputs, scope=None, reuse=None):
"""Builds Reduction-A block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockReductionA', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 384, [3, 3], stride=2, padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 224, [3, 3], scope='Conv2d_0b_3x3')
branch_1 = slim.conv2d(branch_1, 256, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.max_pool2d(inputs, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2])
def block_inception_b(inputs, scope=None, reuse=None):
"""Builds Inception-B block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockInceptionB', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 384, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 224, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, 256, [7, 1], scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 192, [7, 1], scope='Conv2d_0b_7x1')
branch_2 = slim.conv2d(branch_2, 224, [1, 7], scope='Conv2d_0c_1x7')
branch_2 = slim.conv2d(branch_2, 224, [7, 1], scope='Conv2d_0d_7x1')
branch_2 = slim.conv2d(branch_2, 256, [1, 7], scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(inputs, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 128, [1, 1], scope='Conv2d_0b_1x1')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
def block_reduction_b(inputs, scope=None, reuse=None):
"""Builds Reduction-B block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockReductionB', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_0 = slim.conv2d(branch_0, 192, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 256, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 256, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, 320, [7, 1], scope='Conv2d_0c_7x1')
branch_1 = slim.conv2d(branch_1, 320, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.max_pool2d(inputs, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2])
def block_inception_c(inputs, scope=None, reuse=None):
"""Builds Inception-C block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockInceptionC', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 256, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 384, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = tf.concat(axis=3, values=[
slim.conv2d(branch_1, 256, [1, 3], scope='Conv2d_0b_1x3'),
slim.conv2d(branch_1, 256, [3, 1], scope='Conv2d_0c_3x1')])
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(inputs, 384, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 448, [3, 1], scope='Conv2d_0b_3x1')
branch_2 = slim.conv2d(branch_2, 512, [1, 3], scope='Conv2d_0c_1x3')
branch_2 = tf.concat(axis=3, values=[
slim.conv2d(branch_2, 256, [1, 3], scope='Conv2d_0d_1x3'),
slim.conv2d(branch_2, 256, [3, 1], scope='Conv2d_0e_3x1')])
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(inputs, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 256, [1, 1], scope='Conv2d_0b_1x1')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
def inception_v4_base(inputs, final_endpoint='Mixed_7d', scope=None):
"""Creates the Inception V4 network up to the given final endpoint.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
final_endpoint: specifies the endpoint to construct the network up to.
It can be one of [ 'Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'Mixed_3a', 'Mixed_4a', 'Mixed_5a', 'Mixed_5b', 'Mixed_5c', 'Mixed_5d',
'Mixed_5e', 'Mixed_6a', 'Mixed_6b', 'Mixed_6c', 'Mixed_6d', 'Mixed_6e',
'Mixed_6f', 'Mixed_6g', 'Mixed_6h', 'Mixed_7a', 'Mixed_7b', 'Mixed_7c',
'Mixed_7d']
scope: Optional variable_scope.
Returns:
logits: the logits outputs of the model.
end_points: the set of end_points from the inception model.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values,
"""
end_points = {}
def add_and_check_final(name, net):
end_points[name] = net
return name == final_endpoint
with tf.variable_scope(scope, 'InceptionV4', [inputs]):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# 299 x 299 x 3
net = slim.conv2d(inputs, 32, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
if add_and_check_final('Conv2d_1a_3x3', net): return net, end_points
# 149 x 149 x 32
net = slim.conv2d(net, 32, [3, 3], padding='VALID',
scope='Conv2d_2a_3x3')
if add_and_check_final('Conv2d_2a_3x3', net): return net, end_points
# 147 x 147 x 32
net = slim.conv2d(net, 64, [3, 3], scope='Conv2d_2b_3x3')
if add_and_check_final('Conv2d_2b_3x3', net): return net, end_points
# 147 x 147 x 64
with tf.variable_scope('Mixed_3a'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_0a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 96, [3, 3], stride=2, padding='VALID',
scope='Conv2d_0a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1])
if add_and_check_final('Mixed_3a', net): return net, end_points
# 73 x 73 x 160
with tf.variable_scope('Mixed_4a'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_0 = slim.conv2d(branch_0, 96, [3, 3], padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 64, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, 64, [7, 1], scope='Conv2d_0c_7x1')
branch_1 = slim.conv2d(branch_1, 96, [3, 3], padding='VALID',
scope='Conv2d_1a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1])
if add_and_check_final('Mixed_4a', net): return net, end_points
# 71 x 71 x 192
with tf.variable_scope('Mixed_5a'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 192, [3, 3], stride=2, padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1])
if add_and_check_final('Mixed_5a', net): return net, end_points
# 35 x 35 x 384
# 4 x Inception-A blocks
for idx in range(4):
block_scope = 'Mixed_5' + chr(ord('b') + idx)
net = block_inception_a(net, block_scope)
if add_and_check_final(block_scope, net): return net, end_points
# 35 x 35 x 384
# Reduction-A block
net = block_reduction_a(net, 'Mixed_6a')
if add_and_check_final('Mixed_6a', net): return net, end_points
# 17 x 17 x 1024
# 7 x Inception-B blocks
for idx in range(7):
block_scope = 'Mixed_6' + chr(ord('b') + idx)
net = block_inception_b(net, block_scope)
if add_and_check_final(block_scope, net): return net, end_points
# 17 x 17 x 1024
# Reduction-B block
net = block_reduction_b(net, 'Mixed_7a')
if add_and_check_final('Mixed_7a', net): return net, end_points
# 8 x 8 x 1536
# 3 x Inception-C blocks
for idx in range(3):
block_scope = 'Mixed_7' + chr(ord('b') + idx)
net = block_inception_c(net, block_scope)
if add_and_check_final(block_scope, net): return net, end_points
raise ValueError('Unknown final endpoint %s' % final_endpoint)
def inception_v4(inputs, num_classes=1001, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionV4',
create_aux_logits=True):
"""Creates the Inception V4 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
num_classes: number of predicted classes. If 0 or None, the logits layer
is omitted and the input features to the logits layer (before dropout)
are returned instead.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
create_aux_logits: Whether to include the auxiliary logits.
Returns:
net: a Tensor with the logits (pre-softmax activations) if num_classes
is a non-zero integer, or the non-dropped input to the logits layer
if num_classes is 0 or None.
end_points: the set of end_points from the inception model.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionV4', [inputs], reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_v4_base(inputs, scope=scope)
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# Auxiliary Head logits
if create_aux_logits and num_classes:
with tf.variable_scope('AuxLogits'):
# 17 x 17 x 1024
aux_logits = end_points['Mixed_6h']
aux_logits = slim.avg_pool2d(aux_logits, [5, 5], stride=3,
padding='VALID',
scope='AvgPool_1a_5x5')
aux_logits = slim.conv2d(aux_logits, 128, [1, 1],
scope='Conv2d_1b_1x1')
aux_logits = slim.conv2d(aux_logits, 768,
aux_logits.get_shape()[1:3],
padding='VALID', scope='Conv2d_2a')
aux_logits = slim.flatten(aux_logits)
aux_logits = slim.fully_connected(aux_logits, num_classes,
activation_fn=None,
scope='Aux_logits')
end_points['AuxLogits'] = aux_logits
# Final pooling and prediction
# TODO(sguada,arnoegw): Consider adding a parameter global_pool which
# can be set to False to disable pooling here (as in resnet_*()).
with tf.variable_scope('Logits'):
# 8 x 8 x 1536
kernel_size = net.get_shape()[1:3]
if kernel_size.is_fully_defined():
net = slim.avg_pool2d(net, kernel_size, padding='VALID',
scope='AvgPool_1a')
else:
net = tf.reduce_mean(net, [1, 2], keep_dims=True,
name='global_pool')
end_points['global_pool'] = net
if not num_classes:
return net, end_points
# 1 x 1 x 1536
net = slim.dropout(net, dropout_keep_prob, scope='Dropout_1b')
net = slim.flatten(net, scope='PreLogitsFlatten')
end_points['PreLogitsFlatten'] = net
# 1536
logits = slim.fully_connected(net, num_classes, activation_fn=None,
scope='Logits')
end_points['Logits'] = logits
end_points['Predictions'] = tf.nn.softmax(logits, name='Predictions')
return logits, end_points
inception_v4.default_image_size = 299
inception_v4_arg_scope = inception_utils.inception_arg_scope
|
TensorFlow2/Detection/Efficientdet | Efficientdet | inspector | # Copyright 2020 Google Research. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tool to inspect a model."""
import os
from absl import app
from absl import flags
from absl import logging
import numpy as np
from PIL import Image
import tensorflow as tf
from dllogger import StdOutBackend, JSONStreamBackend, Verbosity
import dllogger as DLLogger
from model import inference
from utils import hparams_config
from utils import model_utils
from utils import setup
flags.DEFINE_string('model_name', 'efficientdet-d0', 'Model.')
flags.DEFINE_string('mode', 'benchmark',
'Run mode: {dry, export, benchmark}')
flags.DEFINE_string('trace_filename', None, 'Trace file name.')
flags.DEFINE_integer('bm_runs', 100, 'Number of benchmark runs.')
flags.DEFINE_string('tensorrt', None, 'TensorRT mode: {None, FP32, FP16, INT8}')
flags.DEFINE_integer('batch_size', 1, 'Batch size for inference.')
flags.DEFINE_string('ckpt_path', '_', 'checkpoint dir used for eval.')
flags.DEFINE_string('export_ckpt', None, 'Output model ckpt path.')
flags.DEFINE_string(
'hparams', '', 'Comma separated k=v pairs of hyperparameters or a module'
' containing attributes to use as hyperparameters.')
flags.DEFINE_bool('amp', True, 'Enable mixed precision training')
flags.DEFINE_bool('use_xla', True, 'Use XLA')
flags.DEFINE_string('input_image', None, 'Input image path for inference.')
flags.DEFINE_string('output_image_dir', None, 'Output dir for inference.')
flags.DEFINE_string('dllogger_path', '/tmp/time_log.txt', 'Filepath for dllogger logs')
# For video.
flags.DEFINE_string('input_video', None, 'Input video path for inference.')
flags.DEFINE_string('output_video', None,
'Output video path. If None, play it online instead.')
# For visualization.
flags.DEFINE_integer('max_boxes_to_draw', 100, 'Max number of boxes to draw.')
flags.DEFINE_float('min_score_thresh', 0.4, 'Score threshold to show box.')
flags.DEFINE_string('nms_method', 'hard', 'nms method, hard or gaussian.')
# For saved model.
flags.DEFINE_string('saved_model_dir', None,
'Folder path for saved model.')
flags.DEFINE_string('tflite_path', None, 'Path for exporting tflite file.')
flags.DEFINE_bool('debug', False, 'Debug mode.')
FLAGS = flags.FLAGS
def main(_):
model_config = hparams_config.get_detection_config(FLAGS.model_name)
model_config.override(FLAGS.hparams) # Add custom overrides
model_config.is_training_bn = False
model_config.image_size = model_utils.parse_image_size(model_config.image_size)
# A hack to make flag consistent with nms configs.
if FLAGS.min_score_thresh:
model_config.nms_configs.score_thresh = FLAGS.min_score_thresh
if FLAGS.nms_method:
model_config.nms_configs.method = FLAGS.nms_method
if FLAGS.max_boxes_to_draw:
model_config.nms_configs.max_output_size = FLAGS.max_boxes_to_draw
model_config.mixed_precision = FLAGS.amp
setup.set_flags(FLAGS, model_config, training=False)
model_params = model_config.as_dict()
ckpt_path_or_file = FLAGS.ckpt_path
if tf.io.gfile.isdir(ckpt_path_or_file):
ckpt_path_or_file = tf.train.latest_checkpoint(ckpt_path_or_file)
driver = inference.ServingDriver(FLAGS.model_name, ckpt_path_or_file,
FLAGS.batch_size or None,
FLAGS.min_score_thresh,
FLAGS.max_boxes_to_draw, model_params)
# dllogger setup
backends = []
backends+=[
JSONStreamBackend(verbosity=Verbosity.VERBOSE, filename=FLAGS.dllogger_path),
StdOutBackend(verbosity=Verbosity.DEFAULT)]
DLLogger.init(backends=backends)
DLLogger.metadata('inference_fps', {'unit': 'images/s'})
DLLogger.metadata('inference_latency_ms', {'unit': 'ms'})
DLLogger.metadata('latency_avg', {'unit': 's'})
DLLogger.metadata('latency_90', {'unit': 's'})
DLLogger.metadata('latency_95', {'unit': 's'})
DLLogger.metadata('latency_99', {'unit': 's'})
if FLAGS.mode == 'export':
if tf.io.gfile.exists(FLAGS.saved_model_dir):
tf.io.gfile.rmtree(FLAGS.saved_model_dir)
driver.export(FLAGS.saved_model_dir, FLAGS.tflite_path, FLAGS.tensorrt)
elif FLAGS.mode == 'benchmark':
if FLAGS.saved_model_dir:
driver.load(FLAGS.saved_model_dir)
batch_size = FLAGS.batch_size or 1
if FLAGS.input_image:
image_file = tf.io.read_file(FLAGS.input_image)
image_arrays = tf.image.decode_image(image_file)
image_arrays.set_shape((None, None, 3))
image_arrays = tf.expand_dims(image_arrays, 0)
if batch_size > 1:
image_arrays = tf.tile(image_arrays, [batch_size, 1, 1, 1])
else:
# use synthetic data if no image is provided.
image_arrays = tf.ones((batch_size, *model_config.image_size, 3),
dtype=tf.uint8)
driver.benchmark(image_arrays, FLAGS.bm_runs, FLAGS.trace_filename)
elif FLAGS.mode == 'dry':
# transfer to tf2 format ckpt
driver.build()
if FLAGS.export_ckpt:
driver.model.save_weights(FLAGS.export_ckpt)
elif FLAGS.mode == 'video':
import cv2 # pylint: disable=g-import-not-at-top
if tf.saved_model.contains_saved_model(FLAGS.saved_model_dir):
driver.load(FLAGS.saved_model_dir)
cap = cv2.VideoCapture(FLAGS.input_video)
if not cap.isOpened():
print('Error opening input video: {}'.format(FLAGS.input_video))
out_ptr = None
if FLAGS.output_video:
frame_width, frame_height = int(cap.get(3)), int(cap.get(4))
out_ptr = cv2.VideoWriter(FLAGS.output_video,
cv2.VideoWriter_fourcc('m', 'p', '4', 'v'), 25,
(frame_width, frame_height))
while cap.isOpened():
# Capture frame-by-frame
ret, frame = cap.read()
if not ret:
break
raw_frames = np.array([frame])
detections_bs = driver.serve(raw_frames)
boxes, scores, classes, _ = tf.nest.map_structure(np.array, detections_bs)
new_frame = driver.visualize(
raw_frames[0],
boxes[0],
scores[0],
classes[0],
min_score_thresh=model_config.nms_configs.score_thresh,
max_boxes_to_draw=model_config.nms_configs.max_output_size)
if out_ptr:
# write frame into output file.
out_ptr.write(new_frame)
else:
# show the frame online, mainly used for real-time speed test.
cv2.imshow('Frame', new_frame)
# Press Q on keyboard to exit
if cv2.waitKey(1) & 0xFF == ord('q'):
break
if __name__ == '__main__':
logging.set_verbosity(logging.ERROR)
app.run(main)
|
TensorFlow2/LanguageModeling/BERT/official/utils/flags | flags | _benchmark | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Flags for benchmarking models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from official.utils.flags._conventions import help_wrap
def define_benchmark(benchmark_log_dir=True, bigquery_uploader=True):
"""Register benchmarking flags.
Args:
benchmark_log_dir: Create a flag to specify location for benchmark logging.
bigquery_uploader: Create flags for uploading results to BigQuery.
Returns:
A list of flags for core.py to marks as key flags.
"""
key_flags = []
flags.DEFINE_enum(
name="benchmark_logger_type", default="BaseBenchmarkLogger",
enum_values=["BaseBenchmarkLogger", "BenchmarkFileLogger",
"BenchmarkBigQueryLogger"],
help=help_wrap("The type of benchmark logger to use. Defaults to using "
"BaseBenchmarkLogger which logs to STDOUT. Different "
"loggers will require other flags to be able to work."))
flags.DEFINE_string(
name="benchmark_test_id", short_name="bti", default=None,
help=help_wrap("The unique test ID of the benchmark run. It could be the "
"combination of key parameters. It is hardware "
"independent and could be used compare the performance "
"between different test runs. This flag is designed for "
"human consumption, and does not have any impact within "
"the system."))
flags.DEFINE_integer(
name='log_steps', default=100,
help='For every log_steps, we log the timing information such as '
'examples per second. Besides, for every log_steps, we store the '
'timestamp of a batch end.')
if benchmark_log_dir:
flags.DEFINE_string(
name="benchmark_log_dir", short_name="bld", default=None,
help=help_wrap("The location of the benchmark logging.")
)
if bigquery_uploader:
flags.DEFINE_string(
name="gcp_project", short_name="gp", default=None,
help=help_wrap(
"The GCP project name where the benchmark will be uploaded."))
flags.DEFINE_string(
name="bigquery_data_set", short_name="bds", default="test_benchmark",
help=help_wrap(
"The Bigquery dataset name where the benchmark will be uploaded."))
flags.DEFINE_string(
name="bigquery_run_table", short_name="brt", default="benchmark_run",
help=help_wrap("The Bigquery table name where the benchmark run "
"information will be uploaded."))
flags.DEFINE_string(
name="bigquery_run_status_table", short_name="brst",
default="benchmark_run_status",
help=help_wrap("The Bigquery table name where the benchmark run "
"status information will be uploaded."))
flags.DEFINE_string(
name="bigquery_metric_table", short_name="bmt",
default="benchmark_metric",
help=help_wrap("The Bigquery table name where the benchmark metric "
"information will be uploaded."))
@flags.multi_flags_validator(
["benchmark_logger_type", "benchmark_log_dir"],
message="--benchmark_logger_type=BenchmarkFileLogger will require "
"--benchmark_log_dir being set")
def _check_benchmark_log_dir(flags_dict):
benchmark_logger_type = flags_dict["benchmark_logger_type"]
if benchmark_logger_type == "BenchmarkFileLogger":
return flags_dict["benchmark_log_dir"]
return True
return key_flags
|
PyTorch/Recommendation/DLRM/preproc | preproc | preproc_NVTabular | # Copyright (c) 2021 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Preprocess Criteo 1TB Click Logs dataset with frequency thresholding and filling missing values.
This script accepts input in either tsv or parquet format.
"""
import argparse
from collections import OrderedDict
import json
import os
import subprocess
from time import time
from typing import List, Optional
import numpy as np
import nvtabular as nvt
import rmm
import cudf
from dask.base import tokenize
from dask.dataframe.io.parquet.utils import _analyze_paths
from dask.delayed import Delayed
from dask.distributed import Client
from dask.highlevelgraph import HighLevelGraph
from dask.utils import natural_sort_key
from dask_cuda import LocalCUDACluster
from fsspec.core import get_fs_token_paths
from nvtabular import Workflow
from nvtabular.io import Dataset, Shuffle
from nvtabular.utils import device_mem_size
from nvtabular.ops import Normalize, Categorify, LogOp, FillMissing, Clip, get_embedding_sizes, \
LambdaOp
from cudf.io.parquet import ParquetWriter
CRITEO_CONTINUOUS_COLUMNS = [f'_c{x}' for x in range(1, 14)]
CRITEO_CATEGORICAL_COLUMNS = [f'_c{x}' for x in range(14, 40)]
CRITEO_CLICK_COLUMNS = ['_c0']
COLUMNS = CRITEO_CONTINUOUS_COLUMNS + CRITEO_CATEGORICAL_COLUMNS + CRITEO_CLICK_COLUMNS
CRITEO_TRAIN_DAYS = list(range(0, 23))
ALL_DS_MEM_FRAC = 0.04
TRAIN_DS_MEM_FRAC = 0.045
TEST_DS_MEM_FRAC = 0.3
VALID_DS_MEM_FRAC = 0.3
def _pool(frac=0.8):
initial_pool_size = frac * device_mem_size()
if initial_pool_size % 256 != 0:
new_initial_pool_size = initial_pool_size // 256 * 256
print(
f"Initial pool size for rmm has to be a multiply of 256. Got {initial_pool_size}, reducing to {new_initial_pool_size}")
initial_pool_size = new_initial_pool_size
rmm.reinitialize(
pool_allocator=True,
initial_pool_size=initial_pool_size,
)
def _convert_file(path, name, out_dir, gpu_mem_frac, fs, cols, dtypes):
fn = f"{name}.parquet"
out_path = fs.sep.join([out_dir, f"{name}.parquet"])
writer = ParquetWriter(out_path, compression=None)
for gdf in nvt.Dataset(
path,
engine="csv",
names=cols,
part_memory_fraction=gpu_mem_frac,
sep='\t',
dtypes=dtypes,
).to_iter():
writer.write_table(gdf)
del gdf
md = writer.close(metadata_file_path=fn)
return md
def _write_metadata(md_list, fs, path):
if md_list:
metadata_path = fs.sep.join([path, "_metadata"])
_meta = (
cudf.io.merge_parquet_filemetadata(md_list)
if len(md_list) > 1
else md_list[0]
)
with fs.open(metadata_path, "wb") as f:
_meta.tofile(f)
return True
def convert_criteo_to_parquet(
input_path: str,
output_path: str,
client,
gpu_mem_frac: float = 0.05,
):
print("Converting tsv to parquet files")
if not output_path:
raise RuntimeError("Intermediate directory must be defined, if the dataset is tsv.")
os.makedirs(output_path, exist_ok=True)
# split last day into two parts
number_of_lines = int(
subprocess.check_output((f'wc -l {os.path.join(input_path, "day_23")}').split()).split()[0])
valid_set_size = number_of_lines // 2
test_set_size = number_of_lines - valid_set_size
with open(os.path.join(input_path, "day_23.part1"), "w") as f:
subprocess.run(['head', '-n', str(test_set_size), str(os.path.join(input_path, "day_23"))], stdout=f)
with open(os.path.join(input_path, "day_23.part2"), "w") as f:
subprocess.run(['tail', '-n', str(valid_set_size), str(os.path.join(input_path, "day_23"))], stdout=f)
fs = get_fs_token_paths(input_path, mode="rb")[0]
file_list = [
x for x in fs.glob(fs.sep.join([input_path, "day_*"]))
if not x.endswith("parquet")
]
file_list = sorted(file_list, key=natural_sort_key)
name_list = _analyze_paths(file_list, fs)[1]
cols = CRITEO_CLICK_COLUMNS + CRITEO_CONTINUOUS_COLUMNS + CRITEO_CATEGORICAL_COLUMNS
dtypes = {}
dtypes[CRITEO_CLICK_COLUMNS[0]] = np.int64
for x in CRITEO_CONTINUOUS_COLUMNS:
dtypes[x] = np.int64
for x in CRITEO_CATEGORICAL_COLUMNS:
dtypes[x] = "hex"
dsk = {}
token = tokenize(file_list, name_list, output_path, gpu_mem_frac, fs, cols, dtypes)
convert_file_name = "convert_file-" + token
for i, (path, name) in enumerate(zip(file_list, name_list)):
key = (convert_file_name, i)
dsk[key] = (_convert_file, path, name, output_path, gpu_mem_frac, fs, cols, dtypes)
write_meta_name = "write-metadata-" + token
dsk[write_meta_name] = (
_write_metadata,
[(convert_file_name, i) for i in range(len(file_list))],
fs,
output_path,
)
graph = HighLevelGraph.from_collections(write_meta_name, dsk, dependencies=[])
conversion_delayed = Delayed(write_meta_name, graph)
if client:
conversion_delayed.compute()
else:
conversion_delayed.compute(scheduler="synchronous")
print("Converted")
def save_model_size_config(workflow: Workflow, output_path: str):
embeddings = {}
for k, v in get_embedding_sizes(workflow).items():
embeddings[k] = v[0] - 1 # we have to subtract one, as the model expects to get a maximal id for each category
ordered_dict = OrderedDict()
for k, v in sorted(list(embeddings.items()), key=lambda x: x[0]):
ordered_dict[k] = v
with open(os.path.join(output_path, "model_size.json"), 'w') as file:
file.write(json.dumps(ordered_dict))
def preprocess_criteo_parquet(
input_path: str,
output_path: str,
client,
frequency_threshold: int,
):
train_days = [str(x) for x in CRITEO_TRAIN_DAYS]
train_files = [
os.path.join(input_path, x)
for x in os.listdir(input_path)
if x.startswith("day") and x.split(".")[0].split("_")[-1] in train_days
]
valid_file = os.path.join(input_path, "day_23.part2.parquet")
test_file = os.path.join(input_path, "day_23.part1.parquet")
all_set = train_files + [valid_file] + [test_file]
print(all_set, train_files, valid_file, test_file)
print("Creating Workflow Object")
workflow = Workflow(
cat_names=CRITEO_CATEGORICAL_COLUMNS,
cont_names=CRITEO_CONTINUOUS_COLUMNS,
label_name=CRITEO_CLICK_COLUMNS
)
# We want to assign 0 to all missing values, and calculate log(x+3) for present values
# so if we set missing values to -2, then the result of log(1+2+(-2)) would be 0
workflow.add_cont_feature([
FillMissing(fill_val=-2.0),
LambdaOp(op_name='Add3ButMinusOneCauseLogAddsOne', f=lambda col, _: col.add(2.0)),
LogOp(), # Log(1+x)
])
workflow.add_cat_preprocess(
Categorify(freq_threshold=frequency_threshold, out_path=output_path)
)
workflow.finalize()
print("Creating Dataset Iterator")
all_ds = Dataset(all_set, engine="parquet", part_mem_fraction=ALL_DS_MEM_FRAC)
trains_ds = Dataset(train_files, engine="parquet", part_mem_fraction=TRAIN_DS_MEM_FRAC)
valid_ds = Dataset(valid_file, engine="parquet", part_mem_fraction=TEST_DS_MEM_FRAC)
test_ds = Dataset(test_file, engine="parquet", part_mem_fraction=VALID_DS_MEM_FRAC)
print("Running apply")
out_train = os.path.join(output_path, "train")
out_valid = os.path.join(output_path, "validation")
out_test = os.path.join(output_path, "test")
start = time()
workflow.update_stats(all_ds)
print(f"Gathering statistics time: {time() - start}")
start = time()
workflow.apply(
trains_ds,
record_stats=False,
output_path=out_train
)
print(f"train preprocess time: {time() - start}")
start = time()
workflow.apply(
valid_ds,
record_stats=False,
output_path=out_valid
)
print(f"valid preprocess time: {time() - start}")
start = time()
workflow.apply(
test_ds,
record_stats=False,
output_path=out_test
)
print(f"test preprocess time: {time() - start}")
save_model_size_config(workflow, output_path)
def parse_args():
parser = argparse.ArgumentParser(description="Process some integers.")
parser.add_argument(
"input_dir",
help="directory with either csv or parquet dataset files inside"
)
parser.add_argument(
"output_dir",
help="directory to save preprocessed dataset files"
)
parser.add_argument(
"--intermediate_dir",
required=False,
default=None,
help="directory for converted to parquet dataset files inside"
)
parser.add_argument(
"--devices",
required=True,
help="available gpus, separated with commas; e.g 0,1,2,3"
)
parser.add_argument(
"--freq_threshold",
required=False,
default=15,
help="frequency threshold for categorical can be int or dict {column_name: threshold}"
)
parser.add_argument(
"--pool",
required=False,
default=False,
help="bool value to use a RMM pooled allocator"
)
args = parser.parse_args()
args.devices = args.devices.split(",")
return args
def is_input_parquet(input_dir: str):
for f in os.listdir(input_dir):
if 'parquet' in f:
return True
return False
def start_local_CUDA_cluster(devices, pool):
if len(devices) > 1:
cluster = LocalCUDACluster(
n_workers=len(devices),
CUDA_VISIBLE_DEVICES=",".join(str(x) for x in devices),
)
client = Client(cluster)
if pool:
client.run(_pool)
elif pool:
_pool()
return client
def main():
args = parse_args()
client = start_local_CUDA_cluster(args.devices, args.pool)
if not is_input_parquet(args.input_dir):
convert_criteo_to_parquet(
input_path=args.input_dir,
output_path=args.intermediate_dir,
client=client,
)
args.input_dir = args.intermediate_dir
print("Preprocessing data")
preprocess_criteo_parquet(
input_path=args.input_dir,
output_path=args.output_dir,
client=client,
frequency_threshold=int(args.freq_threshold),
)
print("Done")
if __name__ == '__main__':
main()
|
TensorFlow/Detection/SSD/models/research/object_detection/metrics | metrics | oid_od_challenge_evaluation | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Runs evaluation using OpenImages groundtruth and predictions.
Example usage:
python models/research/object_detection/metrics/oid_od_challenge_evaluation.py \
--input_annotations_boxes=/path/to/input/annotations-human-bbox.csv \
--input_annotations_labels=/path/to/input/annotations-label.csv \
--input_class_labelmap=/path/to/input/class_labelmap.pbtxt \
--input_predictions=/path/to/input/predictions.csv \
--output_metrics=/path/to/output/metric.csv \
CSVs with bounding box annotations and image label (including the image URLs)
can be downloaded from the Open Images Challenge website:
https://storage.googleapis.com/openimages/web/challenge.html
The format of the input csv and the metrics itself are described on the
challenge website.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import pandas as pd
from google.protobuf import text_format
from object_detection.metrics import io_utils
from object_detection.metrics import oid_od_challenge_evaluation_utils as utils
from object_detection.protos import string_int_label_map_pb2
from object_detection.utils import object_detection_evaluation
def _load_labelmap(labelmap_path):
"""Loads labelmap from the labelmap path.
Args:
labelmap_path: Path to the labelmap.
Returns:
A dictionary mapping class name to class numerical id
A list with dictionaries, one dictionary per category.
"""
label_map = string_int_label_map_pb2.StringIntLabelMap()
with open(labelmap_path, 'r') as fid:
label_map_string = fid.read()
text_format.Merge(label_map_string, label_map)
labelmap_dict = {}
categories = []
for item in label_map.item:
labelmap_dict[item.name] = item.id
categories.append({'id': item.id, 'name': item.name})
return labelmap_dict, categories
def main(parsed_args):
all_box_annotations = pd.read_csv(parsed_args.input_annotations_boxes)
all_label_annotations = pd.read_csv(parsed_args.input_annotations_labels)
all_label_annotations.rename(
columns={'Confidence': 'ConfidenceImageLabel'}, inplace=True)
all_annotations = pd.concat([all_box_annotations, all_label_annotations])
class_label_map, categories = _load_labelmap(parsed_args.input_class_labelmap)
challenge_evaluator = (
object_detection_evaluation.OpenImagesDetectionChallengeEvaluator(
categories))
for _, groundtruth in enumerate(all_annotations.groupby('ImageID')):
image_id, image_groundtruth = groundtruth
groundtruth_dictionary = utils.build_groundtruth_boxes_dictionary(
image_groundtruth, class_label_map)
challenge_evaluator.add_single_ground_truth_image_info(
image_id, groundtruth_dictionary)
all_predictions = pd.read_csv(parsed_args.input_predictions)
for _, prediction_data in enumerate(all_predictions.groupby('ImageID')):
image_id, image_predictions = prediction_data
prediction_dictionary = utils.build_predictions_dictionary(
image_predictions, class_label_map)
challenge_evaluator.add_single_detected_image_info(image_id,
prediction_dictionary)
metrics = challenge_evaluator.evaluate()
with open(parsed_args.output_metrics, 'w') as fid:
io_utils.write_csv(fid, metrics)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Evaluate Open Images Object Detection Challenge predictions.'
)
parser.add_argument(
'--input_annotations_boxes',
required=True,
help='File with groundtruth boxes annotations.')
parser.add_argument(
'--input_annotations_labels',
required=True,
help='File with groundtruth labels annotations')
parser.add_argument(
'--input_predictions',
required=True,
help="""File with detection predictions; NOTE: no postprocessing is
applied in the evaluation script.""")
parser.add_argument(
'--input_class_labelmap',
required=True,
help='Open Images Challenge labelmap.')
parser.add_argument(
'--output_metrics', required=True, help='Output file with csv metrics')
args = parser.parse_args()
main(args)
|
PyTorch/SpeechRecognition/Jasper/platform | platform | DGX1-32GB_Jasper_AMP_8GPU | #!/bin/bash
NUM_GPUS=8 AMP=true BATCH_SIZE=64 GRAD_ACCUMULATION_STEPS=1 bash scripts/train.sh "$@"
|
PyTorch/SpeechSynthesis/HiFiGAN/hifigan | hifigan | __init__ | from .entrypoints import nvidia_hifigan
|
PyTorch/SpeechSynthesis/FastPitch/common/text | text | text_processing | """ adapted from https://github.com/keithito/tacotron """
import re
import numpy as np
from . import cleaners
from .symbols import get_symbols
from . import cmudict
from .numerical import _currency_re, _expand_currency
#########
# REGEX #
#########
# Regular expression matching text enclosed in curly braces for encoding
_curly_re = re.compile(r'(.*?)\{(.+?)\}(.*)')
# Regular expression matching words and not words
_words_re = re.compile(r"([a-zA-ZÀ-ž]+['][a-zA-ZÀ-ž]{1,2}|[a-zA-ZÀ-ž]+)|([{][^}]+[}]|[^a-zA-ZÀ-ž{}]+)")
# Regular expression separating words enclosed in curly braces for cleaning
_arpa_re = re.compile(r'{[^}]+}|\S+')
class TextProcessing(object):
def __init__(self, symbol_set, cleaner_names, p_arpabet=0.0,
handle_arpabet='word', handle_arpabet_ambiguous='ignore',
expand_currency=True):
self.symbols = get_symbols(symbol_set)
self.cleaner_names = cleaner_names
# Mappings from symbol to numeric ID and vice versa:
self.symbol_to_id = {s: i for i, s in enumerate(self.symbols)}
self.id_to_symbol = {i: s for i, s in enumerate(self.symbols)}
self.expand_currency = expand_currency
# cmudict
self.p_arpabet = p_arpabet
self.handle_arpabet = handle_arpabet
self.handle_arpabet_ambiguous = handle_arpabet_ambiguous
def text_to_sequence(self, text):
sequence = []
# Check for curly braces and treat their contents as ARPAbet:
while len(text):
m = _curly_re.match(text)
if not m:
sequence += self.symbols_to_sequence(text)
break
sequence += self.symbols_to_sequence(m.group(1))
sequence += self.arpabet_to_sequence(m.group(2))
text = m.group(3)
return sequence
def sequence_to_text(self, sequence):
result = ''
for symbol_id in sequence:
if symbol_id in self.id_to_symbol:
s = self.id_to_symbol[symbol_id]
# Enclose ARPAbet back in curly braces:
if len(s) > 1 and s[0] == '@':
s = '{%s}' % s[1:]
result += s
return result.replace('}{', ' ')
def clean_text(self, text):
for name in self.cleaner_names:
cleaner = getattr(cleaners, name)
if not cleaner:
raise Exception('Unknown cleaner: %s' % name)
text = cleaner(text)
return text
def symbols_to_sequence(self, symbols):
return [self.symbol_to_id[s] for s in symbols if s in self.symbol_to_id]
def arpabet_to_sequence(self, text):
return self.symbols_to_sequence(['@' + s for s in text.split()])
def get_arpabet(self, word):
arpabet_suffix = ''
if word.lower() in cmudict.heteronyms:
return word
if len(word) > 2 and word.endswith("'s"):
arpabet = cmudict.lookup(word)
if arpabet is None:
arpabet = self.get_arpabet(word[:-2])
arpabet_suffix = ' Z'
elif len(word) > 1 and word.endswith("s"):
arpabet = cmudict.lookup(word)
if arpabet is None:
arpabet = self.get_arpabet(word[:-1])
arpabet_suffix = ' Z'
else:
arpabet = cmudict.lookup(word)
if arpabet is None:
return word
elif arpabet[0] == '{':
arpabet = [arpabet[1:-1]]
# XXX arpabet might not be a list here
if type(arpabet) is not list:
return word
if len(arpabet) > 1:
if self.handle_arpabet_ambiguous == 'first':
arpabet = arpabet[0]
elif self.handle_arpabet_ambiguous == 'random':
arpabet = np.random.choice(arpabet)
elif self.handle_arpabet_ambiguous == 'ignore':
return word
else:
arpabet = arpabet[0]
arpabet = "{" + arpabet + arpabet_suffix + "}"
return arpabet
def encode_text(self, text, return_all=False):
if self.expand_currency:
text = re.sub(_currency_re, _expand_currency, text)
text_clean = [self.clean_text(split) if split[0] != '{' else split
for split in _arpa_re.findall(text)]
text_clean = ' '.join(text_clean)
text_clean = cleaners.collapse_whitespace(text_clean)
text = text_clean
text_arpabet = ''
if self.p_arpabet > 0:
if self.handle_arpabet == 'sentence':
if np.random.uniform() < self.p_arpabet:
words = _words_re.findall(text)
text_arpabet = [
self.get_arpabet(word[0])
if (word[0] != '') else word[1]
for word in words]
text_arpabet = ''.join(text_arpabet)
text = text_arpabet
elif self.handle_arpabet == 'word':
words = _words_re.findall(text)
text_arpabet = [
word[1] if word[0] == '' else (
self.get_arpabet(word[0])
if np.random.uniform() < self.p_arpabet
else word[0])
for word in words]
text_arpabet = ''.join(text_arpabet)
text = text_arpabet
elif self.handle_arpabet != '':
raise Exception("{} handle_arpabet is not supported".format(
self.handle_arpabet))
text_encoded = self.text_to_sequence(text)
if return_all:
return text_encoded, text_clean, text_arpabet
return text_encoded
def get_text_processing(symbol_set, text_cleaners, p_arpabet):
if symbol_set in ['english_basic', 'english_basic_lowercase', 'english_expanded']:
return TextProcessing(symbol_set, text_cleaners, p_arpabet=p_arpabet)
elif symbol_set == 'english_mandarin_basic':
from common.text.zh.mandarin_text_processing import MandarinTextProcessing
return MandarinTextProcessing(symbol_set, text_cleaners, p_arpabet=p_arpabet)
else:
raise ValueError(f"No TextProcessing for symbol set {symbol_set} unknown.")
|
TensorFlow/Segmentation/UNet_3D_Medical | UNet_3D_Medical | .gitignore | .idea/
*.tar
.ipynb_checkpoints
/_python_build
*.pyc
__pycache__
*.swp
/datasets
/results
results
./data
|
PyTorch/Classification/ConvNets/image_classification | image_classification | mixup | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
import numpy as np
def mixup(alpha, data, target):
with torch.no_grad():
bs = data.size(0)
c = np.random.beta(alpha, alpha)
perm = torch.randperm(bs).cuda()
md = c * data + (1 - c) * data[perm, :]
mt = c * target + (1 - c) * target[perm, :]
return md, mt
class MixUpWrapper(object):
def __init__(self, alpha, dataloader):
self.alpha = alpha
self.dataloader = dataloader
def mixup_loader(self, loader):
for input, target in loader:
i, t = mixup(self.alpha, input, target)
yield i, t
def __iter__(self):
return self.mixup_loader(self.dataloader)
def __len__(self):
return len(self.dataloader)
class NLLMultiLabelSmooth(nn.Module):
def __init__(self, smoothing=0.0):
super(NLLMultiLabelSmooth, self).__init__()
self.confidence = 1.0 - smoothing
self.smoothing = smoothing
def forward(self, x, target):
if self.training:
x = x.float()
target = target.float()
logprobs = torch.nn.functional.log_softmax(x, dim=-1)
nll_loss = -logprobs * target
nll_loss = nll_loss.sum(-1)
smooth_loss = -logprobs.mean(dim=-1)
loss = self.confidence * nll_loss + self.smoothing * smooth_loss
return loss.mean()
else:
return torch.nn.functional.cross_entropy(x, target)
|
TensorFlow2/Classification/ConvNets/efficientnet_v1/B0/evaluation | evaluation | evaluation_FP32_V100-32G | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
python3 main.py --cfg config/efficientnet_v1/b0_cfg.py \
--mode eval \
--use_xla \
--model_dir ./output \
--data_dir /data \
--eval_batch_size 256
|
TensorFlow2/Segmentation/nnUNet/models | models | unet | import tensorflow as tf
from models import layers
class UNet(tf.keras.Model):
def __init__(
self,
input_shape,
n_class,
kernels,
strides,
normalization_layer,
negative_slope,
dimension,
deep_supervision,
):
super().__init__()
self.dim = dimension
self.n_class = n_class
self.negative_slope = negative_slope
self.norm = normalization_layer
self.deep_supervision = deep_supervision
filters = [min(2 ** (5 + i), 320 if dimension == 3 else 512) for i in range(len(strides))]
self.filters = filters
self.kernels = kernels
self.strides = strides
down_block = layers.ConvBlock
self.input_block = self.get_conv_block(
conv_block=down_block,
filters=filters[0],
kernel_size=kernels[0],
stride=strides[0],
input_shape=input_shape,
)
self.downsamples = self.get_block_list(
conv_block=down_block, filters=filters[1:], kernels=kernels[1:-1], strides=strides[1:-1]
)
self.bottleneck = self.get_conv_block(
conv_block=down_block, filters=filters[-1], kernel_size=kernels[-1], stride=strides[-1]
)
self.upsamples = self.get_block_list(
conv_block=layers.UpsampleBlock,
filters=filters[:-1][::-1],
kernels=kernels[1:][::-1],
strides=strides[1:][::-1],
)
self.output_block = self.get_output_block()
if self.deep_supervision:
self.deep_supervision_heads = [self.get_output_block(), self.get_output_block()]
self.n_layers = len(self.upsamples) - 1
def call(self, x, training=True):
skip_connections = []
out = self.input_block(x)
skip_connections.append(out)
for down_block in self.downsamples:
out = down_block(out)
skip_connections.append(out)
out = self.bottleneck(out)
decoder_outputs = []
for up_block in self.upsamples:
out = up_block(out, skip_connections.pop())
decoder_outputs.append(out)
out = self.output_block(out)
if training and self.deep_supervision:
out = [
out,
self.deep_supervision_heads[0](decoder_outputs[-2]),
self.deep_supervision_heads[1](decoder_outputs[-3]),
]
return out
def get_output_block(self):
return layers.OutputBlock(filters=self.n_class, dim=self.dim, negative_slope=self.negative_slope)
def get_conv_block(self, conv_block, filters, kernel_size, stride, **kwargs):
return conv_block(
dim=self.dim,
stride=stride,
norm=self.norm,
kernel_size=kernel_size,
filters=filters,
negative_slope=self.negative_slope,
**kwargs,
)
def get_block_list(self, conv_block, filters, kernels, strides):
layers = []
for filter, kernel, stride in zip(filters, kernels, strides):
conv_layer = self.get_conv_block(conv_block, filter, kernel, stride)
layers.append(conv_layer)
return layers
|
TensorFlow/Detection/SSD/models/research/object_detection | object_detection | model_main | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Binary to run train and evaluation on object detection model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import tensorflow as tf
import horovod.tensorflow as hvd
import dllogger
import time
import os
from object_detection import model_hparams
from object_detection import model_lib
from object_detection.utils.exp_utils import AverageMeter, setup_dllogger
flags.DEFINE_string(
'model_dir', None, 'Path to output model directory '
'where event and checkpoint files will be written.')
flags.DEFINE_string('pipeline_config_path', None, 'Path to pipeline config '
'file.')
flags.DEFINE_string("raport_file", default="summary.json",
help="Path to dlloger json")
flags.DEFINE_integer('num_train_steps', None, 'Number of train steps.')
flags.DEFINE_boolean('eval_training_data', False,
'If training data should be evaluated for this job. Note '
'that one call only use this in eval-only mode, and '
'`checkpoint_dir` must be supplied.')
flags.DEFINE_integer('sample_1_of_n_eval_examples', 1, 'Will sample one of '
'every n eval input examples, where n is provided.')
flags.DEFINE_integer('sample_1_of_n_eval_on_train_examples', 5, 'Will sample '
'one of every n train input examples for evaluation, '
'where n is provided. This is only used if '
'`eval_training_data` is True.')
flags.DEFINE_integer('eval_count', 1, 'How many times the evaluation should be run')
flags.DEFINE_string(
'hparams_overrides', None, 'Hyperparameter overrides, '
'represented as a string containing comma-separated '
'hparam_name=value pairs.')
flags.DEFINE_string(
'checkpoint_dir', None, 'Path to directory holding a checkpoint. If '
'`checkpoint_dir` is provided, this binary operates in eval-only mode, '
'writing resulting metrics to `model_dir`.')
flags.DEFINE_boolean(
'allow_xla', False, 'Enable XLA compilation')
flags.DEFINE_boolean(
'amp', False, 'Whether to enable AMP ops. When false, uses TF32 on A100 and FP32 on V100 GPUS.')
flags.DEFINE_boolean(
'run_once', False, 'If running in eval-only mode, whether to run just '
'one round of eval vs running continuously (default).'
)
FLAGS = flags.FLAGS
class DLLoggerHook(tf.estimator.SessionRunHook):
def __init__(self, global_batch_size, rank=-1):
self.global_batch_size = global_batch_size
self.rank = rank
setup_dllogger(enabled=True, filename=FLAGS.raport_file, rank=rank)
def after_create_session(self, session, coord):
self.meters = {}
warmup = 100
self.meters['train_throughput'] = AverageMeter(warmup=warmup)
def before_run(self, run_context):
self.t0 = time.time()
return tf.estimator.SessionRunArgs(fetches=['global_step:0', 'learning_rate:0'])
def after_run(self, run_context, run_values):
throughput = self.global_batch_size/(time.time() - self.t0)
global_step, lr = run_values.results
self.meters['train_throughput'].update(throughput)
def end(self, session):
summary = {
'train_throughput': self.meters['train_throughput'].avg,
}
dllogger.log(step=tuple(), data=summary)
def main(unused_argv):
tf.logging.set_verbosity(tf.logging.INFO)
if FLAGS.amp:
os.environ["TF_ENABLE_AUTO_MIXED_PRECISION"] = "1"
else:
os.environ["TF_ENABLE_AUTO_MIXED_PRECISION"] = "0"
hvd.init()
flags.mark_flag_as_required('model_dir')
flags.mark_flag_as_required('pipeline_config_path')
session_config = tf.ConfigProto()
session_config.gpu_options.per_process_gpu_memory_fraction=0.9
session_config.gpu_options.visible_device_list = str(hvd.local_rank())
if FLAGS.allow_xla:
session_config.graph_options.optimizer_options.global_jit_level = tf.OptimizerOptions.ON_1
model_dir = FLAGS.model_dir if hvd.rank() == 0 else None
config = tf.estimator.RunConfig(model_dir=model_dir, session_config=session_config)
train_and_eval_dict = model_lib.create_estimator_and_inputs(
run_config=config,
eval_count=FLAGS.eval_count,
hparams=model_hparams.create_hparams(FLAGS.hparams_overrides),
pipeline_config_path=FLAGS.pipeline_config_path,
train_steps=FLAGS.num_train_steps,
sample_1_of_n_eval_examples=FLAGS.sample_1_of_n_eval_examples,
sample_1_of_n_eval_on_train_examples=(
FLAGS.sample_1_of_n_eval_on_train_examples))
estimator = train_and_eval_dict['estimator']
train_input_fn = train_and_eval_dict['train_input_fn']
eval_input_fns = train_and_eval_dict['eval_input_fns']
eval_on_train_input_fn = train_and_eval_dict['eval_on_train_input_fn']
predict_input_fn = train_and_eval_dict['predict_input_fn']
train_steps = train_and_eval_dict['train_steps']
if FLAGS.checkpoint_dir:
if FLAGS.eval_training_data:
name = 'training_data'
input_fn = eval_on_train_input_fn
else:
name = 'validation_data'
# The first eval input will be evaluated.
input_fn = eval_input_fns[0]
if FLAGS.run_once:
estimator.evaluate(input_fn,
steps=None,
checkpoint_path=tf.train.latest_checkpoint(
FLAGS.checkpoint_dir))
else:
model_lib.continuous_eval(estimator, FLAGS.checkpoint_dir, input_fn,
train_steps, name)
else:
train_spec, eval_specs = model_lib.create_train_and_eval_specs(
train_input_fn,
eval_input_fns,
eval_on_train_input_fn,
predict_input_fn,
train_steps,
eval_on_train_data=False)
train_hooks = [hvd.BroadcastGlobalVariablesHook(0), DLLoggerHook(hvd.size()*train_and_eval_dict['train_batch_size'], hvd.rank())]
eval_hooks = []
for x in range(FLAGS.eval_count):
estimator.train(train_input_fn,
hooks=train_hooks,
steps=train_steps // FLAGS.eval_count)
if hvd.rank() == 0:
eval_input_fn = eval_input_fns[0]
results = estimator.evaluate(eval_input_fn,
steps=None,
hooks=eval_hooks)
if __name__ == '__main__':
tf.app.run()
|
TensorFlow2/Classification/ConvNets/efficientnet_v1/B4/inference | inference | inference_FP32 | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
python3 main.py --cfg config/efficientnet_v1/b4_cfg.py \
--mode predict \
--use_xla \
--predict_ckpt /model \
--predict_img_dir /infer_data \
--predict_batch_size 50 \
--predict_img_size 380
|
PyTorch/SpeechSynthesis/FastPitch/fastpitch | fastpitch | data_function | # *****************************************************************************
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NVIDIA CORPORATION nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# *****************************************************************************
import functools
import json
import re
from pathlib import Path
import librosa
import numpy as np
import torch
import torch.nn.functional as F
from scipy import ndimage
from scipy.stats import betabinom
import common.layers as layers
from common.text.text_processing import get_text_processing
from common.utils import load_wav_to_torch, load_filepaths_and_text, to_gpu
class BetaBinomialInterpolator:
"""Interpolates alignment prior matrices to save computation.
Calculating beta-binomial priors is costly. Instead cache popular sizes
and use img interpolation to get priors faster.
"""
def __init__(self, round_mel_len_to=100, round_text_len_to=20):
self.round_mel_len_to = round_mel_len_to
self.round_text_len_to = round_text_len_to
self.bank = functools.lru_cache(beta_binomial_prior_distribution)
def round(self, val, to):
return max(1, int(np.round((val + 1) / to))) * to
def __call__(self, w, h):
bw = self.round(w, to=self.round_mel_len_to)
bh = self.round(h, to=self.round_text_len_to)
ret = ndimage.zoom(self.bank(bw, bh).T, zoom=(w / bw, h / bh), order=1)
assert ret.shape[0] == w, ret.shape
assert ret.shape[1] == h, ret.shape
return ret
def beta_binomial_prior_distribution(phoneme_count, mel_count, scaling=1.0):
P = phoneme_count
M = mel_count
x = np.arange(0, P)
mel_text_probs = []
for i in range(1, M+1):
a, b = scaling * i, scaling * (M + 1 - i)
rv = betabinom(P, a, b)
mel_i_prob = rv.pmf(x)
mel_text_probs.append(mel_i_prob)
return torch.tensor(np.array(mel_text_probs))
def estimate_pitch(wav, mel_len, method='pyin', normalize_mean=None,
normalize_std=None, n_formants=1):
if type(normalize_mean) is float or type(normalize_mean) is list:
normalize_mean = torch.tensor(normalize_mean)
if type(normalize_std) is float or type(normalize_std) is list:
normalize_std = torch.tensor(normalize_std)
if method == 'pyin':
snd, sr = librosa.load(wav)
pitch_mel, voiced_flag, voiced_probs = librosa.pyin(
snd, fmin=librosa.note_to_hz('C2'),
fmax=librosa.note_to_hz('C7'), frame_length=1024)
assert np.abs(mel_len - pitch_mel.shape[0]) <= 1.0
pitch_mel = np.where(np.isnan(pitch_mel), 0.0, pitch_mel)
pitch_mel = torch.from_numpy(pitch_mel).unsqueeze(0)
pitch_mel = F.pad(pitch_mel, (0, mel_len - pitch_mel.size(1)))
if n_formants > 1:
raise NotImplementedError
else:
raise ValueError
pitch_mel = pitch_mel.float()
if normalize_mean is not None:
assert normalize_std is not None
pitch_mel = normalize_pitch(pitch_mel, normalize_mean, normalize_std)
return pitch_mel
def normalize_pitch(pitch, mean, std):
zeros = (pitch == 0.0)
pitch -= mean[:, None]
pitch /= std[:, None]
pitch[zeros] = 0.0
return pitch
class TTSDataset(torch.utils.data.Dataset):
"""
1) loads audio,text pairs
2) normalizes text and converts them to sequences of one-hot vectors
3) computes mel-spectrograms from audio files.
"""
def __init__(self,
dataset_path,
audiopaths_and_text,
text_cleaners,
n_mel_channels,
symbol_set='english_basic',
p_arpabet=1.0,
n_speakers=1,
load_mel_from_disk=True,
load_pitch_from_disk=True,
pitch_mean=214.72203, # LJSpeech defaults
pitch_std=65.72038,
max_wav_value=None,
sampling_rate=None,
filter_length=None,
hop_length=None,
win_length=None,
mel_fmin=None,
mel_fmax=None,
prepend_space_to_text=False,
append_space_to_text=False,
pitch_online_dir=None,
betabinomial_online_dir=None,
use_betabinomial_interpolator=True,
pitch_online_method='pyin',
**ignored):
# Expect a list of filenames
if type(audiopaths_and_text) is str:
audiopaths_and_text = [audiopaths_and_text]
self.dataset_path = dataset_path
self.audiopaths_and_text = load_filepaths_and_text(
dataset_path, audiopaths_and_text,
has_speakers=(n_speakers > 1))
self.load_mel_from_disk = load_mel_from_disk
if not load_mel_from_disk:
self.max_wav_value = max_wav_value
self.sampling_rate = sampling_rate
self.stft = layers.TacotronSTFT(
filter_length, hop_length, win_length,
n_mel_channels, sampling_rate, mel_fmin, mel_fmax)
self.load_pitch_from_disk = load_pitch_from_disk
self.prepend_space_to_text = prepend_space_to_text
self.append_space_to_text = append_space_to_text
assert p_arpabet == 0.0 or p_arpabet == 1.0, (
'Only 0.0 and 1.0 p_arpabet is currently supported. '
'Variable probability breaks caching of betabinomial matrices.')
self.tp = get_text_processing(symbol_set, text_cleaners, p_arpabet)
self.n_speakers = n_speakers
self.pitch_tmp_dir = pitch_online_dir
self.f0_method = pitch_online_method
self.betabinomial_tmp_dir = betabinomial_online_dir
self.use_betabinomial_interpolator = use_betabinomial_interpolator
if use_betabinomial_interpolator:
self.betabinomial_interpolator = BetaBinomialInterpolator()
expected_columns = (2 + int(load_pitch_from_disk) + (n_speakers > 1))
assert not (load_pitch_from_disk and self.pitch_tmp_dir is not None)
if len(self.audiopaths_and_text[0]) < expected_columns:
raise ValueError(f'Expected {expected_columns} columns in audiopaths file. '
'The format is <mel_or_wav>|[<pitch>|]<text>[|<speaker_id>]')
if len(self.audiopaths_and_text[0]) > expected_columns:
print('WARNING: Audiopaths file has more columns than expected')
to_tensor = lambda x: torch.Tensor([x]) if type(x) is float else x
self.pitch_mean = to_tensor(pitch_mean)
self.pitch_std = to_tensor(pitch_std)
def __getitem__(self, index):
# Separate filename and text
if self.n_speakers > 1:
audiopath, *extra, text, speaker = self.audiopaths_and_text[index]
speaker = int(speaker)
else:
audiopath, *extra, text = self.audiopaths_and_text[index]
speaker = None
mel = self.get_mel(audiopath)
text = self.get_text(text)
pitch = self.get_pitch(index, mel.size(-1))
energy = torch.norm(mel.float(), dim=0, p=2)
attn_prior = self.get_prior(index, mel.shape[1], text.shape[0])
assert pitch.size(-1) == mel.size(-1)
# No higher formants?
if len(pitch.size()) == 1:
pitch = pitch[None, :]
return (text, mel, len(text), pitch, energy, speaker, attn_prior,
audiopath)
def __len__(self):
return len(self.audiopaths_and_text)
def get_mel(self, filename):
if not self.load_mel_from_disk:
audio, sampling_rate = load_wav_to_torch(filename)
if sampling_rate != self.stft.sampling_rate:
raise ValueError("{} SR doesn't match target {} SR".format(
sampling_rate, self.stft.sampling_rate))
audio_norm = audio / self.max_wav_value
audio_norm = audio_norm.unsqueeze(0)
audio_norm = torch.autograd.Variable(audio_norm,
requires_grad=False)
melspec = self.stft.mel_spectrogram(audio_norm)
melspec = torch.squeeze(melspec, 0)
else:
melspec = torch.load(filename)
# assert melspec.size(0) == self.stft.n_mel_channels, (
# 'Mel dimension mismatch: given {}, expected {}'.format(
# melspec.size(0), self.stft.n_mel_channels))
return melspec
def get_text(self, text):
text = self.tp.encode_text(text)
space = [self.tp.encode_text("A A")[1]]
if self.prepend_space_to_text:
text = space + text
if self.append_space_to_text:
text = text + space
return torch.LongTensor(text)
def get_prior(self, index, mel_len, text_len):
if self.use_betabinomial_interpolator:
return torch.from_numpy(self.betabinomial_interpolator(mel_len,
text_len))
if self.betabinomial_tmp_dir is not None:
audiopath, *_ = self.audiopaths_and_text[index]
fname = Path(audiopath).relative_to(self.dataset_path)
fname = fname.with_suffix('.pt')
cached_fpath = Path(self.betabinomial_tmp_dir, fname)
if cached_fpath.is_file():
return torch.load(cached_fpath)
attn_prior = beta_binomial_prior_distribution(text_len, mel_len)
if self.betabinomial_tmp_dir is not None:
cached_fpath.parent.mkdir(parents=True, exist_ok=True)
torch.save(attn_prior, cached_fpath)
return attn_prior
def get_pitch(self, index, mel_len=None):
audiopath, *fields = self.audiopaths_and_text[index]
if self.n_speakers > 1:
spk = int(fields[-1])
else:
spk = 0
if self.load_pitch_from_disk:
pitchpath = fields[0]
pitch = torch.load(pitchpath)
if self.pitch_mean is not None:
assert self.pitch_std is not None
pitch = normalize_pitch(pitch, self.pitch_mean, self.pitch_std)
return pitch
if self.pitch_tmp_dir is not None:
fname = Path(audiopath).relative_to(self.dataset_path)
fname_method = fname.with_suffix('.pt')
cached_fpath = Path(self.pitch_tmp_dir, fname_method)
if cached_fpath.is_file():
return torch.load(cached_fpath)
# No luck so far - calculate
wav = audiopath
if not wav.endswith('.wav'):
wav = re.sub('/mels/', '/wavs/', wav)
wav = re.sub('.pt$', '.wav', wav)
pitch_mel = estimate_pitch(wav, mel_len, self.f0_method,
self.pitch_mean, self.pitch_std)
if self.pitch_tmp_dir is not None and not cached_fpath.is_file():
cached_fpath.parent.mkdir(parents=True, exist_ok=True)
torch.save(pitch_mel, cached_fpath)
return pitch_mel
def ensure_disjoint(*tts_datasets):
paths = [set(list(zip(*d.audiopaths_and_text))[0]) for d in tts_datasets]
assert sum(len(p) for p in paths) == len(set().union(*paths)), (
"Your datasets (train, val) are not disjoint. "
"Review filelists and restart training."
)
class TTSCollate:
"""Zero-pads model inputs and targets based on number of frames per step"""
def __call__(self, batch):
"""Collate training batch from normalized text and mel-spec"""
# Right zero-pad all one-hot text sequences to max input length
input_lengths, ids_sorted_decreasing = torch.sort(
torch.LongTensor([len(x[0]) for x in batch]),
dim=0, descending=True)
max_input_len = input_lengths[0]
text_padded = torch.LongTensor(len(batch), max_input_len)
text_padded.zero_()
for i in range(len(ids_sorted_decreasing)):
text = batch[ids_sorted_decreasing[i]][0]
text_padded[i, :text.size(0)] = text
# Right zero-pad mel-spec
num_mels = batch[0][1].size(0)
max_target_len = max([x[1].size(1) for x in batch])
# Include mel padded and gate padded
mel_padded = torch.FloatTensor(len(batch), num_mels, max_target_len)
mel_padded.zero_()
output_lengths = torch.LongTensor(len(batch))
for i in range(len(ids_sorted_decreasing)):
mel = batch[ids_sorted_decreasing[i]][1]
mel_padded[i, :, :mel.size(1)] = mel
output_lengths[i] = mel.size(1)
n_formants = batch[0][3].shape[0]
pitch_padded = torch.zeros(mel_padded.size(0), n_formants,
mel_padded.size(2), dtype=batch[0][3].dtype)
energy_padded = torch.zeros_like(pitch_padded[:, 0, :])
for i in range(len(ids_sorted_decreasing)):
pitch = batch[ids_sorted_decreasing[i]][3]
energy = batch[ids_sorted_decreasing[i]][4]
pitch_padded[i, :, :pitch.shape[1]] = pitch
energy_padded[i, :energy.shape[0]] = energy
if batch[0][5] is not None:
speaker = torch.zeros_like(input_lengths)
for i in range(len(ids_sorted_decreasing)):
speaker[i] = batch[ids_sorted_decreasing[i]][5]
else:
speaker = None
attn_prior_padded = torch.zeros(len(batch), max_target_len,
max_input_len)
attn_prior_padded.zero_()
for i in range(len(ids_sorted_decreasing)):
prior = batch[ids_sorted_decreasing[i]][6]
attn_prior_padded[i, :prior.size(0), :prior.size(1)] = prior
# Count number of items - characters in text
len_x = [x[2] for x in batch]
len_x = torch.Tensor(len_x)
audiopaths = [batch[i][7] for i in ids_sorted_decreasing]
return (text_padded, input_lengths, mel_padded, output_lengths, len_x,
pitch_padded, energy_padded, speaker, attn_prior_padded,
audiopaths)
def batch_to_gpu(batch):
(text_padded, input_lengths, mel_padded, output_lengths, len_x,
pitch_padded, energy_padded, speaker, attn_prior, audiopaths) = batch
text_padded = to_gpu(text_padded).long()
input_lengths = to_gpu(input_lengths).long()
mel_padded = to_gpu(mel_padded).float()
output_lengths = to_gpu(output_lengths).long()
pitch_padded = to_gpu(pitch_padded).float()
energy_padded = to_gpu(energy_padded).float()
attn_prior = to_gpu(attn_prior).float()
if speaker is not None:
speaker = to_gpu(speaker).long()
# Alignments act as both inputs and targets - pass shallow copies
x = [text_padded, input_lengths, mel_padded, output_lengths,
pitch_padded, energy_padded, speaker, attn_prior, audiopaths]
y = [mel_padded, input_lengths, output_lengths]
len_x = torch.sum(output_lengths)
return (x, y, len_x)
|
PyTorch/SpeechSynthesis/FastPitch/triton | triton | config_model_on_triton | #!/usr/bin/env python3
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""
To configure model on Triton, you can use `config_model_on_triton.py` script.
This will prepare layout of Model Repository, including Model Configuration.
```shell script
python ./triton/config_model_on_triton.py \
--model-repository /model_repository \
--model-path /models/exported/model.onnx \
--model-format onnx \
--model-name ResNet50 \
--model-version 1 \
--max-batch-size 32 \
--precision fp16 \
--backend-accelerator trt \
--load-model explicit \
--timeout 120 \
--verbose
```
If Triton server to which we prepare model repository is running with **explicit model control mode**,
use `--load-model` argument to send request load_model request to Triton Inference Server.
If server is listening on non-default address or port use `--server-url` argument to point server control endpoint.
If it is required to use HTTP protocol to communicate with Triton server use `--http` argument.
To improve inference throughput you can use
[dynamic batching](https://github.com/triton-inference-server/server/blob/master/docs/model_configuration.md#dynamic-batcher)
for your model by providing `--preferred-batch-sizes` and `--max-queue-delay-us` parameters.
For models which doesn't support batching, set `--max-batch-sizes` to 0.
By default Triton will [automatically obtain inputs and outputs definitions](https://github.com/triton-inference-server/server/blob/master/docs/model_configuration.md#auto-generated-model-configuration).
but for TorchScript ang TF GraphDef models script uses file with I/O specs. This file is automatically generated
when the model is converted to ScriptModule (either traced or scripted).
If there is a need to pass different than default path to I/O spec file use `--io-spec` CLI argument.
I/O spec file is yaml file with below structure:
```yaml
- inputs:
- name: input
dtype: float32 # np.dtype name
shape: [None, 224, 224, 3]
- outputs:
- name: probabilities
dtype: float32
shape: [None, 1001]
- name: classes
dtype: int32
shape: [None, 1]
```
"""
import argparse
import logging
import time
from model_navigator.triton.config import BackendAccelerator as Accelerator
from model_navigator.triton.config import TensorRTOptPrecision as Precision
from model_navigator.model import Format
from model_navigator.log import set_logger, log_dict
from model_navigator.triton import ModelConfig, TritonClient, TritonModelStore
LOGGER = logging.getLogger("config_model")
def _available_enum_values(my_enum):
return [item.value for item in my_enum]
def main():
parser = argparse.ArgumentParser(
description="Create Triton model repository and model configuration", allow_abbrev=False
)
parser.add_argument("--model-repository", required=True, help="Path to Triton model repository.")
parser.add_argument("--model-path", required=True, help="Path to model to configure")
# TODO: automation
parser.add_argument(
"--model-format",
required=True,
choices=_available_enum_values(Format),
help="Format of model to deploy",
)
parser.add_argument("--model-name", required=True, help="Model name")
parser.add_argument("--model-version", default="1", help="Version of model (default 1)")
parser.add_argument(
"--max-batch-size",
type=int,
default=32,
help="Maximum batch size allowed for inference. "
"A max_batch_size value of 0 indicates that batching is not allowed for the model",
)
# TODO: automation
parser.add_argument(
"--precision",
type=str,
default=Precision.FP16.value,
choices=_available_enum_values(Precision),
help="Model precision (parameter used only by Tensorflow backend with TensorRT optimization)",
)
# Triton Inference Server endpoint
parser.add_argument(
"--server-url",
type=str,
default="grpc://localhost:8001",
help="Inference server URL in format protocol://host[:port] (default grpc://localhost:8001)",
)
parser.add_argument(
"--load-model",
choices=["none", "poll", "explicit"],
help="Loading model while Triton Server is in given model control mode",
)
parser.add_argument(
"--timeout", default=120, help="Timeout in seconds to wait till model load (default=120)", type=int
)
# optimization related
parser.add_argument(
"--backend-accelerator",
type=str,
choices=_available_enum_values(Accelerator),
default=Accelerator.TRT.value,
help="Select Backend Accelerator used to serve model",
)
parser.add_argument("--number-of-model-instances", type=int, default=1, help="Number of model instances per GPU")
parser.add_argument(
"--preferred-batch-sizes",
type=int,
nargs="*",
help="Batch sizes that the dynamic batcher should attempt to create. "
"In case --max-queue-delay-us is set and this parameter is not, default value will be --max-batch-size",
)
parser.add_argument(
"--max-queue-delay-us",
type=int,
default=0,
help="Max delay time which dynamic batcher shall wait to form a batch (default 0)",
)
parser.add_argument(
"--capture-cuda-graph",
type=int,
default=0,
help="Use cuda capture graph (used only by TensorRT platform)",
)
parser.add_argument("-v", "--verbose", help="Provide verbose logs", action='store_true')
args = parser.parse_args()
set_logger(verbose=args.verbose)
log_dict("args", vars(args))
config = ModelConfig.create(
model_path=args.model_path,
# model definition
model_name=args.model_name,
model_version=args.model_version,
model_format=args.model_format,
precision=args.precision,
max_batch_size=args.max_batch_size,
# optimization
accelerator=args.backend_accelerator,
gpu_engine_count=args.number_of_model_instances,
preferred_batch_sizes=args.preferred_batch_sizes or [],
max_queue_delay_us=args.max_queue_delay_us,
capture_cuda_graph=args.capture_cuda_graph,
)
model_store = TritonModelStore(args.model_repository)
model_store.deploy_model(model_config=config, model_path=args.model_path)
if args.load_model != "none":
client = TritonClient(server_url=args.server_url, verbose=args.verbose)
client.wait_for_server_ready(timeout=args.timeout)
if args.load_model == "explicit":
client.load_model(model_name=args.model_name)
if args.load_model == "poll":
time.sleep(15)
client.wait_for_model(model_name=args.model_name, model_version=args.model_version, timeout_s=args.timeout)
if __name__ == "__main__":
main()
|
PyTorch/LanguageModeling/BART/utils | utils | logging | # coding=utf-8
# Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
# Copyright 2020 Optuna, Hugging Face
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Logging utilities. """
import logging
import os
import sys
import threading
from logging import CRITICAL # NOQA
from logging import DEBUG # NOQA
from logging import ERROR # NOQA
from logging import FATAL # NOQA
from logging import INFO # NOQA
from logging import NOTSET # NOQA
from logging import WARN # NOQA
from logging import WARNING # NOQA
from typing import Optional
_lock = threading.Lock()
_default_handler: Optional[logging.Handler] = None
log_levels = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
"critical": logging.CRITICAL,
}
_default_log_level = logging.WARNING
def _get_default_logging_level():
"""
If TRANSFORMERS_VERBOSITY env var is set to one of the valid choices return that as the new default level. If it is
not - fall back to ``_default_log_level``
"""
env_level_str = os.getenv("TRANSFORMERS_VERBOSITY", None)
if env_level_str:
if env_level_str in log_levels:
return log_levels[env_level_str]
else:
logging.getLogger().warning(
f"Unknown option TRANSFORMERS_VERBOSITY={env_level_str}, "
f"has to be one of: { ', '.join(log_levels.keys()) }"
)
return _default_log_level
def _get_library_name() -> str:
return __name__.split(".")[0]
def _get_library_root_logger() -> logging.Logger:
return logging.getLogger(_get_library_name())
def _configure_library_root_logger() -> None:
global _default_handler
with _lock:
if _default_handler:
# This library has already configured the library root logger.
return
_default_handler = logging.StreamHandler() # Set sys.stderr as stream.
_default_handler.flush = sys.stderr.flush
# Apply our default configuration to the library root logger.
library_root_logger = _get_library_root_logger()
library_root_logger.addHandler(_default_handler)
library_root_logger.setLevel(_get_default_logging_level())
library_root_logger.propagate = False
def _reset_library_root_logger() -> None:
global _default_handler
with _lock:
if not _default_handler:
return
library_root_logger = _get_library_root_logger()
library_root_logger.removeHandler(_default_handler)
library_root_logger.setLevel(logging.NOTSET)
_default_handler = None
def get_logger(name: Optional[str] = None) -> logging.Logger:
"""
Return a logger with the specified name.
This function is not supposed to be directly accessed unless you are writing a custom transformers module.
"""
if name is None:
name = _get_library_name()
_configure_library_root_logger()
return logging.getLogger(name)
def get_verbosity() -> int:
"""
Return the current level for the 🤗 Transformers's root logger as an int.
Returns:
:obj:`int`: The logging level.
.. note::
🤗 Transformers has following logging levels:
- 50: ``transformers.logging.CRITICAL`` or ``transformers.logging.FATAL``
- 40: ``transformers.logging.ERROR``
- 30: ``transformers.logging.WARNING`` or ``transformers.logging.WARN``
- 20: ``transformers.logging.INFO``
- 10: ``transformers.logging.DEBUG``
"""
_configure_library_root_logger()
return _get_library_root_logger().getEffectiveLevel()
def set_verbosity(verbosity: int) -> None:
"""
Set the vebosity level for the 🤗 Transformers's root logger.
Args:
verbosity (:obj:`int`):
Logging level, e.g., one of:
- ``transformers.logging.CRITICAL`` or ``transformers.logging.FATAL``
- ``transformers.logging.ERROR``
- ``transformers.logging.WARNING`` or ``transformers.logging.WARN``
- ``transformers.logging.INFO``
- ``transformers.logging.DEBUG``
"""
_configure_library_root_logger()
_get_library_root_logger().setLevel(verbosity)
def set_verbosity_info():
"""Set the verbosity to the :obj:`INFO` level."""
return set_verbosity(INFO)
def set_verbosity_warning():
"""Set the verbosity to the :obj:`WARNING` level."""
return set_verbosity(WARNING)
def set_verbosity_debug():
"""Set the verbosity to the :obj:`DEBUG` level."""
return set_verbosity(DEBUG)
def set_verbosity_error():
"""Set the verbosity to the :obj:`ERROR` level."""
return set_verbosity(ERROR)
def disable_default_handler() -> None:
"""Disable the default handler of the HuggingFace Transformers's root logger."""
_configure_library_root_logger()
assert _default_handler is not None
_get_library_root_logger().removeHandler(_default_handler)
def enable_default_handler() -> None:
"""Enable the default handler of the HuggingFace Transformers's root logger."""
_configure_library_root_logger()
assert _default_handler is not None
_get_library_root_logger().addHandler(_default_handler)
def disable_propagation() -> None:
"""
Disable propagation of the library log outputs. Note that log propagation is disabled by default.
"""
_configure_library_root_logger()
_get_library_root_logger().propagate = False
def enable_propagation() -> None:
"""
Enable propagation of the library log outputs. Please disable the HuggingFace Transformers's default handler to
prevent double logging if the root logger has been configured.
"""
_configure_library_root_logger()
_get_library_root_logger().propagate = True
def enable_explicit_format() -> None:
"""
Enable explicit formatting for every HuggingFace Transformers's logger. The explicit formatter is as follows:
::
[LEVELNAME|FILENAME|LINE NUMBER] TIME >> MESSAGE
All handlers currently bound to the root logger are affected by this method.
"""
handlers = _get_library_root_logger().handlers
for handler in handlers:
formatter = logging.Formatter("[%(levelname)s|%(filename)s:%(lineno)s] %(asctime)s >> %(message)s")
handler.setFormatter(formatter)
def reset_format() -> None:
"""
Resets the formatting for HuggingFace Transformers's loggers.
All handlers currently bound to the root logger are affected by this method.
"""
handlers = _get_library_root_logger().handlers
for handler in handlers:
handler.setFormatter(None) |
TensorFlow2/Recommendation/WideAndDeep/triton/deployment_toolkit/library | library | tensorrt | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
from pathlib import Path
from typing import Dict, NamedTuple, Optional, Union
import numpy as np
# pytype: disable=import-error
try:
import pycuda.autoinit
import pycuda.driver as cuda
except Exception as e:
logging.getLogger(__name__).warning(f"Problems with importing pycuda package; {e}")
# pytype: enable=import-error
import tensorrt as trt # pytype: disable=import-error
from ..core import BaseLoader, BaseRunner, BaseRunnerSession, Format, Model, TensorSpec
from ..extensions import loaders, runners
LOGGER = logging.getLogger(__name__)
TRT_LOGGER = trt.Logger(trt.Logger.INFO)
# documentation:
# https://docs.nvidia.com/deeplearning/tensorrt/api/python_api/index.html
# https://docs.nvidia.com/deeplearning/tensorrt/developer-guide/index.html#python_samples_section
_NP_DTYPE2TRT_DTYPE = {
np.dtype("float32"): trt.DataType.FLOAT,
np.dtype("float16"): trt.DataType.HALF,
np.dtype("int8"): trt.DataType.INT8,
np.dtype("int32"): trt.DataType.INT32,
np.dtype("bool"): trt.DataType.BOOL,
}
class TensorRTLoader(BaseLoader):
def load(self, model_path: Union[str, Path], **_) -> Model:
model_path = Path(model_path)
LOGGER.debug(f"Loading TensorRT engine from {model_path}")
engine = self._load_engine(model_path)
if engine is None:
LOGGER.debug("Unable to load engine without plugins. Loading plugins.")
trt.init_libnvinfer_plugins(logger=TRT_LOGGER, namespace="")
LOGGER.debug(f"Loading TensorRT engine with plugins from {model_path}")
engine = self._load_engine(model_path)
if engine is None:
raise RuntimeError(f"Could not load ICudaEngine from {model_path}")
inputs = {}
outputs = {}
for binding_idx in range(engine.num_bindings):
name = engine.get_binding_name(binding_idx)
is_input = engine.binding_is_input(binding_idx)
dtype = np.dtype(trt.nptype(engine.get_binding_dtype(binding_idx))).name
shape = engine.get_binding_shape(binding_idx)
if is_input:
inputs[name] = TensorSpec(name, dtype, shape)
else:
outputs[name] = TensorSpec(name, dtype, shape)
return Model(engine, None, inputs, outputs)
def _load_engine(self, model_path: Path):
with model_path.open("rb") as fh, trt.Runtime(TRT_LOGGER) as runtime:
engine = runtime.deserialize_cuda_engine(fh.read())
return engine
class TRTBuffers(NamedTuple):
x_host: Optional[Dict[str, object]]
x_dev: Dict[str, object]
y_pred_host: Dict[str, object]
y_pred_dev: Dict[str, object]
class TensorRTRunner(BaseRunner):
def __init__(self):
pass
def init_inference(self, model: Model):
return TensorRTRunnerSession(model=model)
class TensorRTRunnerSession(BaseRunnerSession):
def __init__(self, model: Model):
super().__init__(model)
assert isinstance(model.handle, trt.ICudaEngine)
self._model = model
self._has_dynamic_shapes = None
self._context = None
self._engine: trt.ICudaEngine = self._model.handle
self._cuda_context = pycuda.autoinit.context
self._input_names = None
self._output_names = None
self._buffers = None
def __enter__(self):
self._context = self._engine.create_execution_context()
self._context.__enter__()
self._input_names = [
self._engine[idx] for idx in range(self._engine.num_bindings) if self._engine.binding_is_input(idx)
]
self._output_names = [
self._engine[idx] for idx in range(self._engine.num_bindings) if not self._engine.binding_is_input(idx)
]
# all_binding_shapes_specified is True for models without dynamic shapes
# so initially this variable is False for models with dynamic shapes
self._has_dynamic_shapes = not self._context.all_binding_shapes_specified
return self
def __exit__(self, exc_type, exc_value, traceback):
self._context.__exit__(exc_type, exc_value, traceback)
self._input_names = None
self._output_names = None
# TODO: are cuda buffers dealloc automatically?
self._buffers = None
def __call__(self, x):
buffers = self._prepare_buffers_if_needed(x)
bindings = self._update_bindings(buffers)
for name in self._input_names:
cuda.memcpy_htod(buffers.x_dev[name], buffers.x_host[name])
self._cuda_context.push()
self._context.execute_v2(bindings=bindings)
self._cuda_context.pop()
for name in self._output_names:
cuda.memcpy_dtoh(buffers.y_pred_host[name], buffers.y_pred_dev[name])
return buffers.y_pred_host
def _update_bindings(self, buffers: TRTBuffers):
bindings = [None] * self._engine.num_bindings
for name in buffers.y_pred_dev:
binding_idx: int = self._engine[name]
bindings[binding_idx] = buffers.y_pred_dev[name]
for name in buffers.x_dev:
binding_idx: int = self._engine[name]
bindings[binding_idx] = buffers.x_dev[name]
return bindings
def _set_dynamic_input_shapes(self, x_host):
def _is_shape_dynamic(input_shape):
return any([dim is None or dim == -1 for dim in input_shape])
for name in self._input_names:
bindings_idx = self._engine[name]
data_shape = x_host[name].shape # pytype: disable=attribute-error
if self._engine.is_shape_binding(bindings_idx):
input_shape = self._context.get_shape(bindings_idx)
if _is_shape_dynamic(input_shape):
self._context.set_shape_input(bindings_idx, data_shape)
else:
input_shape = self._engine.get_binding_shape(bindings_idx)
if _is_shape_dynamic(input_shape):
self._context.set_binding_shape(bindings_idx, data_shape)
assert self._context.all_binding_shapes_specified and self._context.all_shape_inputs_specified
def _prepare_buffers_if_needed(self, x_host: Dict[str, object]):
# pytype: disable=attribute-error
new_batch_size = list(x_host.values())[0].shape[0]
current_batch_size = list(self._buffers.y_pred_host.values())[0].shape[0] if self._buffers else 0
# pytype: enable=attribute-error
if self._has_dynamic_shapes or new_batch_size != current_batch_size:
# TODO: are CUDA buffers dealloc automatically?
self._set_dynamic_input_shapes(x_host)
y_pred_host = {}
for name in self._output_names:
shape = self._context.get_binding_shape(self._engine[name])
binding_idx: int = self._engine[name]
dtype_from_trt_binding = np.dtype(trt.nptype(self._engine.get_binding_dtype(binding_idx)))
dtype_from_model_spec = np.dtype(self._model.outputs[name].dtype)
assert dtype_from_model_spec == dtype_from_trt_binding
y_pred_host[name] = np.zeros(shape, dtype=dtype_from_model_spec)
y_pred_dev = {name: cuda.mem_alloc(data.nbytes) for name, data in y_pred_host.items()}
# cast host input into binding dtype
def _cast_input(name, data):
binding_idx: int = self._engine[name]
np_dtype = trt.nptype(self._engine.get_binding_dtype(binding_idx))
return data.astype(np_dtype)
x_host = {name: _cast_input(name, host_input) for name, host_input in x_host.items()}
x_dev = {
name: cuda.mem_alloc(host_input.nbytes)
for name, host_input in x_host.items()
if name in self._input_names # pytype: disable=attribute-error
}
self._buffers = TRTBuffers(None, x_dev, y_pred_host, y_pred_dev)
return self._buffers._replace(x_host=x_host)
if "pycuda.driver" in sys.modules:
loaders.register_extension(Format.TRT.value, TensorRTLoader)
runners.register_extension(Format.TRT.value, TensorRTRunner)
else:
LOGGER.warning("Do not register TensorRT extension due problems with importing pycuda.driver package.")
|
PyTorch/Segmentation/MaskRCNN/pytorch/configs | configs | e2e_faster_rcnn_R_50_FPN_1x | MODEL:
META_ARCHITECTURE: "GeneralizedRCNN"
WEIGHT: "catalog://ImageNetPretrained/MSRA/R-50"
BACKBONE:
CONV_BODY: "R-50-FPN"
OUT_CHANNELS: 256
RPN:
USE_FPN: True
ANCHOR_STRIDE: (4, 8, 16, 32, 64)
PRE_NMS_TOP_N_TRAIN: 2000
PRE_NMS_TOP_N_TEST: 1000
POST_NMS_TOP_N_TEST: 1000
FPN_POST_NMS_TOP_N_TEST: 1000
ROI_HEADS:
USE_FPN: True
ROI_BOX_HEAD:
POOLER_RESOLUTION: 7
POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)
POOLER_SAMPLING_RATIO: 2
FEATURE_EXTRACTOR: "FPN2MLPFeatureExtractor"
PREDICTOR: "FPNPredictor"
DATASETS:
TRAIN: ("coco_2014_train", "coco_2014_valminusminival")
TEST: ("coco_2014_minival",)
DATALOADER:
SIZE_DIVISIBILITY: 32
SOLVER:
BASE_LR: 0.02
WEIGHT_DECAY: 0.0001
STEPS: (60000, 80000)
MAX_ITER: 90000
|
PyTorch/Recommendation/DLRM/tests/feature_specs | feature_specs | 10_num | channel_spec:
categorical:
- cat_0.bin
- cat_1.bin
- cat_2.bin
- cat_3.bin
- cat_4.bin
- cat_5.bin
- cat_6.bin
- cat_7.bin
- cat_8.bin
- cat_9.bin
- cat_10.bin
- cat_11.bin
- cat_12.bin
- cat_13.bin
- cat_14.bin
- cat_15.bin
- cat_16.bin
- cat_17.bin
- cat_18.bin
- cat_19.bin
- cat_20.bin
- cat_21.bin
- cat_22.bin
- cat_23.bin
- cat_24.bin
- cat_25.bin
label:
- label
numerical: &id001
- num_0
- num_1
- num_2
- num_3
- num_4
- num_5
- num_6
- num_7
- num_8
- num_9
feature_spec:
cat_0.bin:
cardinality: 100000
dtype: int32
cat_1.bin:
cardinality: 100001
dtype: int32
cat_10.bin:
cardinality: 100010
dtype: int32
cat_11.bin:
cardinality: 100011
dtype: int32
cat_12.bin:
cardinality: 100012
dtype: int32
cat_13.bin:
cardinality: 100013
dtype: int32
cat_14.bin:
cardinality: 100014
dtype: int32
cat_15.bin:
cardinality: 100015
dtype: int32
cat_16.bin:
cardinality: 100016
dtype: int32
cat_17.bin:
cardinality: 100017
dtype: int32
cat_18.bin:
cardinality: 100018
dtype: int32
cat_19.bin:
cardinality: 100019
dtype: int32
cat_2.bin:
cardinality: 100002
dtype: int32
cat_20.bin:
cardinality: 100020
dtype: int32
cat_21.bin:
cardinality: 100021
dtype: int32
cat_22.bin:
cardinality: 100022
dtype: int32
cat_23.bin:
cardinality: 100023
dtype: int32
cat_24.bin:
cardinality: 100024
dtype: int32
cat_25.bin:
cardinality: 100025
dtype: int32
cat_3.bin:
cardinality: 100003
dtype: int32
cat_4.bin:
cardinality: 100004
dtype: int32
cat_5.bin:
cardinality: 100005
dtype: int32
cat_6.bin:
cardinality: 100006
dtype: int32
cat_7.bin:
cardinality: 100007
dtype: int32
cat_8.bin:
cardinality: 100008
dtype: int32
cat_9.bin:
cardinality: 100009
dtype: int32
label:
dtype: bool
num_0:
dtype: float16
num_1:
dtype: float16
num_2:
dtype: float16
num_3:
dtype: float16
num_4:
dtype: float16
num_5:
dtype: float16
num_6:
dtype: float16
num_7:
dtype: float16
num_8:
dtype: float16
num_9:
dtype: float16
metadata: {}
source_spec:
test:
- features: *id001
files:
- test/numerical.bin
type: split_binary
- features:
- label
files:
- test/label.bin
type: split_binary
- features:
- cat_0.bin
files:
- test/cat_0.bin
type: split_binary
- features:
- cat_1.bin
files:
- test/cat_1.bin
type: split_binary
- features:
- cat_2.bin
files:
- test/cat_2.bin
type: split_binary
- features:
- cat_3.bin
files:
- test/cat_3.bin
type: split_binary
- features:
- cat_4.bin
files:
- test/cat_4.bin
type: split_binary
- features:
- cat_5.bin
files:
- test/cat_5.bin
type: split_binary
- features:
- cat_6.bin
files:
- test/cat_6.bin
type: split_binary
- features:
- cat_7.bin
files:
- test/cat_7.bin
type: split_binary
- features:
- cat_8.bin
files:
- test/cat_8.bin
type: split_binary
- features:
- cat_9.bin
files:
- test/cat_9.bin
type: split_binary
- features:
- cat_10.bin
files:
- test/cat_10.bin
type: split_binary
- features:
- cat_11.bin
files:
- test/cat_11.bin
type: split_binary
- features:
- cat_12.bin
files:
- test/cat_12.bin
type: split_binary
- features:
- cat_13.bin
files:
- test/cat_13.bin
type: split_binary
- features:
- cat_14.bin
files:
- test/cat_14.bin
type: split_binary
- features:
- cat_15.bin
files:
- test/cat_15.bin
type: split_binary
- features:
- cat_16.bin
files:
- test/cat_16.bin
type: split_binary
- features:
- cat_17.bin
files:
- test/cat_17.bin
type: split_binary
- features:
- cat_18.bin
files:
- test/cat_18.bin
type: split_binary
- features:
- cat_19.bin
files:
- test/cat_19.bin
type: split_binary
- features:
- cat_20.bin
files:
- test/cat_20.bin
type: split_binary
- features:
- cat_21.bin
files:
- test/cat_21.bin
type: split_binary
- features:
- cat_22.bin
files:
- test/cat_22.bin
type: split_binary
- features:
- cat_23.bin
files:
- test/cat_23.bin
type: split_binary
- features:
- cat_24.bin
files:
- test/cat_24.bin
type: split_binary
- features:
- cat_25.bin
files:
- test/cat_25.bin
type: split_binary
train:
- features: *id001
files:
- train/numerical.bin
type: split_binary
- features:
- label
files:
- train/label.bin
type: split_binary
- features:
- cat_0.bin
files:
- train/cat_0.bin
type: split_binary
- features:
- cat_1.bin
files:
- train/cat_1.bin
type: split_binary
- features:
- cat_2.bin
files:
- train/cat_2.bin
type: split_binary
- features:
- cat_3.bin
files:
- train/cat_3.bin
type: split_binary
- features:
- cat_4.bin
files:
- train/cat_4.bin
type: split_binary
- features:
- cat_5.bin
files:
- train/cat_5.bin
type: split_binary
- features:
- cat_6.bin
files:
- train/cat_6.bin
type: split_binary
- features:
- cat_7.bin
files:
- train/cat_7.bin
type: split_binary
- features:
- cat_8.bin
files:
- train/cat_8.bin
type: split_binary
- features:
- cat_9.bin
files:
- train/cat_9.bin
type: split_binary
- features:
- cat_10.bin
files:
- train/cat_10.bin
type: split_binary
- features:
- cat_11.bin
files:
- train/cat_11.bin
type: split_binary
- features:
- cat_12.bin
files:
- train/cat_12.bin
type: split_binary
- features:
- cat_13.bin
files:
- train/cat_13.bin
type: split_binary
- features:
- cat_14.bin
files:
- train/cat_14.bin
type: split_binary
- features:
- cat_15.bin
files:
- train/cat_15.bin
type: split_binary
- features:
- cat_16.bin
files:
- train/cat_16.bin
type: split_binary
- features:
- cat_17.bin
files:
- train/cat_17.bin
type: split_binary
- features:
- cat_18.bin
files:
- train/cat_18.bin
type: split_binary
- features:
- cat_19.bin
files:
- train/cat_19.bin
type: split_binary
- features:
- cat_20.bin
files:
- train/cat_20.bin
type: split_binary
- features:
- cat_21.bin
files:
- train/cat_21.bin
type: split_binary
- features:
- cat_22.bin
files:
- train/cat_22.bin
type: split_binary
- features:
- cat_23.bin
files:
- train/cat_23.bin
type: split_binary
- features:
- cat_24.bin
files:
- train/cat_24.bin
type: split_binary
- features:
- cat_25.bin
files:
- train/cat_25.bin
type: split_binary
|
TensorFlow/Detection/SSD/models/research/object_detection/predictors/heads | heads | keypoint_head_test | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.predictors.heads.keypoint_head."""
import tensorflow as tf
from google.protobuf import text_format
from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import keypoint_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
class MaskRCNNKeypointHeadTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(self,
op_type=hyperparams_pb2.Hyperparams.FC):
hyperparams = hyperparams_pb2.Hyperparams()
hyperparams_text_proto = """
activation: NONE
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
"""
text_format.Merge(hyperparams_text_proto, hyperparams)
hyperparams.op = op_type
return hyperparams_builder.build(hyperparams, is_training=True)
def test_prediction_size(self):
keypoint_prediction_head = keypoint_head.MaskRCNNKeypointHead(
conv_hyperparams_fn=self._build_arg_scope_with_hyperparams())
roi_pooled_features = tf.random_uniform(
[64, 14, 14, 1024], minval=-2.0, maxval=2.0, dtype=tf.float32)
prediction = keypoint_prediction_head.predict(
features=roi_pooled_features, num_predictions_per_location=1)
self.assertAllEqual([64, 1, 17, 56, 56], prediction.get_shape().as_list())
if __name__ == '__main__':
tf.test.main()
|
PyTorch/Classification/GPUNet/triton/runner | runner | configuration | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from typing import Any, Dict, Optional
# method from PEP-366 to support relative import in executed modules
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from .task import DataObject
class Configuration(DataObject):
"""
Configuration object - handle single experiment data
"""
def __init__(
self,
parameters: Dict,
checkpoint: Optional[str],
):
"""
Args:
parameters: Configuration parameters
checkpoint: Checkpoint used for experiment
"""
self.parameters = parameters
self.checkpoint = checkpoint
|
TensorFlow/LanguageModeling/BERT/biobert/scripts | scripts | run_pretraining_pubmed_base_phase_1 | #! /bin/bash
echo "Container nvidia build = " $NVIDIA_BUILD_ID
train_batch_size=${1:-128}
learning_rate=${2:-"9.625e-5"}
cased=${3:-false}
precision=${4:-"fp16"}
use_xla=${5:-"true"}
num_gpu=${6:-16}
warmup_steps=${7:-"1953"}
train_steps=${8:-19531}
num_accumulation_steps=${9:-32}
save_checkpoint_steps=${10:-5000}
eval_batch_size=${11:-80}
use_fp16=""
if [ "$precision" = "fp16" ] ; then
echo "fp16 activated!"
use_fp16="--amp"
else
echo "fp32/tf32 activated!"
use_fp16="--noamp"
fi
if [ "$use_xla" = "true" ] ; then
use_xla_tag="--use_xla"
echo "XLA activated"
else
use_xla_tag="--nouse_xla"
fi
if [ "$cased" = "true" ] ; then
DO_LOWER_CASE=0
CASING_DIR_PREFIX="cased"
else
DO_LOWER_CASE=1
CASING_DIR_PREFIX="uncased"
fi
BERT_CONFIG=/workspace/bert/data/download/google_pretrained_weights/${CASING_DIR_PREFIX}_L-12_H-768_A-12/bert_config.json
RESULTS_DIR=/results
CHECKPOINTS_DIR=${RESULTS_DIR}/biobert_phase_1
mkdir -p ${CHECKPOINTS_DIR}
INIT_CHECKPOINT=/workspace/bert/data/download/google_pretrained_weights/${CASING_DIR_PREFIX}_L-12_H-768_A-12/bert_model.ckpt
INPUT_FILES_DIR="/workspace/bert/data/tfrecord/lower_case_${DO_LOWER_CASE}_seq_len_128_max_pred_20_masked_lm_prob_0.15_random_seed_12345_dupe_factor_5_shard_1472_test_split_10/pubmed_baseline/training"
EVAL_FILES_DIR="/workspace/bert/data/tfrecord/lower_case_${DO_LOWER_CASE}_seq_len_128_max_pred_20_masked_lm_prob_0.15_random_seed_12345_dupe_factor_5_shard_1472_test_split_10/pubmed_baseline/test"
if [ $num_gpu -gt 1 ] ; then
mpi_command="mpirun -np $num_gpu -H localhost:$num_gpu \
--allow-run-as-root -bind-to none -map-by slot \
-x NCCL_DEBUG=INFO \
-x LD_LIBRARY_PATH \
-x PATH -mca pml ob1 -mca btl ^openib"
use_hvd="--horovod"
else
mpi_command=""
use_hvd=""
fi
export GBS=$(expr $train_batch_size \* $num_gpus \* num_accumulation_steps)
printf -v TAG "tf_bert_bio_1n_phase1_cased_%s_%s_gbs%d" "$cased" "$precision" $GBS
DATESTAMP=`date +'%y%m%d%H%M%S'`
LOGFILE=$RESULTS_DIR/$TAG.$DATESTAMP.log
printf "Logs written to %s\n" "$LOGFILE"
$mpi python3 /workspace/bert/run_pretraining.py \
--input_files_dir=$INPUT_FILES_DIR \
--eval_files_dir=$EVAL_FILES_DIR \
--output_dir=$CHECKPOINTS_DIR \
--bert_config_file=$BERT_CONFIG \
--do_train=True \
--do_eval=True \
--train_batch_size=$train_batch_size \
--eval_batch_size=$eval_batch_size \
--max_seq_length=128 \
--max_predictions_per_seq=20 \
--num_train_steps=$train_steps \
--num_warmup_steps=$warmup_steps \
--save_checkpoints_steps=$save_checkpoint_steps \
--num_accumulation_steps=$num_accumulation_steps \
--learning_rate=$learning_rate \
--report_loss \
$use_hvd $use_fp16 $use_xla_tag \
--init_checkpoint=$INIT_CHECKPOINT |& tee $LOGFILE |
TensorFlow/Segmentation/UNet_Medical | UNet_Medical | main | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Entry point of the application.
This file serves as entry point to the training of UNet for segmentation of neuronal processes.
Example:
Training can be adjusted by modifying the arguments specified below::
$ python main.py --exec_mode train --model_dir /datasets ...
"""
import os
import horovod.tensorflow as hvd
import math
import numpy as np
import tensorflow as tf
from PIL import Image
from utils.setup import prepare_model_dir, get_logger, build_estimator, set_flags
from utils.cmd_util import PARSER, parse_args
from utils.data_loader import Dataset
from utils.hooks.profiling_hook import ProfilingHook
from utils.hooks.training_hook import TrainingHook
def main(_):
"""
Starting point of the application
"""
hvd.init()
set_flags()
params = parse_args(PARSER.parse_args())
model_dir = prepare_model_dir(params)
logger = get_logger(params)
estimator = build_estimator(params, model_dir)
dataset = Dataset(data_dir=params.data_dir,
batch_size=params.batch_size,
fold=params.crossvalidation_idx,
augment=params.augment,
gpu_id=hvd.rank(),
num_gpus=hvd.size(),
seed=params.seed)
if 'train' in params.exec_mode:
max_steps = params.max_steps // (1 if params.benchmark else hvd.size())
hooks = [hvd.BroadcastGlobalVariablesHook(0),
TrainingHook(logger,
max_steps=max_steps,
log_every=params.log_every)]
if params.benchmark and hvd.rank() == 0:
hooks.append(ProfilingHook(logger,
batch_size=params.batch_size,
log_every=params.log_every,
warmup_steps=params.warmup_steps,
mode='train'))
estimator.train(
input_fn=dataset.train_fn,
steps=max_steps,
hooks=hooks)
if 'evaluate' in params.exec_mode:
if hvd.rank() == 0:
results = estimator.evaluate(input_fn=dataset.eval_fn, steps=dataset.eval_size)
logger.log(step=(),
data={"eval_ce_loss": float(results["eval_ce_loss"]),
"eval_dice_loss": float(results["eval_dice_loss"]),
"eval_total_loss": float(results["eval_total_loss"]),
"eval_dice_score": float(results["eval_dice_score"])})
if 'predict' in params.exec_mode:
if hvd.rank() == 0:
predict_steps = dataset.test_size
hooks = None
if params.benchmark:
hooks = [ProfilingHook(logger,
batch_size=params.batch_size,
log_every=params.log_every,
warmup_steps=params.warmup_steps,
mode="test")]
predict_steps = params.warmup_steps * 2 * params.batch_size
predictions = estimator.predict(
input_fn=lambda: dataset.test_fn(count=math.ceil(predict_steps / dataset.test_size)),
hooks=hooks)
binary_masks = [np.argmax(p['logits'], axis=-1).astype(np.uint8) * 255 for p in predictions]
if not params.benchmark:
multipage_tif = [Image.fromarray(mask).resize(size=(512, 512), resample=Image.BILINEAR)
for mask in binary_masks]
output_dir = os.path.join(params.model_dir, 'pred')
if not os.path.exists(output_dir):
os.makedirs(output_dir)
multipage_tif[0].save(os.path.join(output_dir, 'test-masks.tif'),
compression="tiff_deflate",
save_all=True,
append_images=multipage_tif[1:])
if __name__ == '__main__':
tf.compat.v1.app.run()
|
PyTorch/LanguageModeling/BERT/triton | triton | run_performance_on_triton | #!/usr/bin/env python3
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import csv
import logging
import os
import pathlib
import shutil
from distutils.version import LooseVersion
from enum import Enum
from importlib.metadata import version
from typing import Any, Dict, List
import yaml
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from .deployment_toolkit.core import BatchingMode, EvaluationMode, MeasurementMode, OfflineMode, PerformanceTool
from .deployment_toolkit.model_analyzer import ModelAnalyzer, ModelAnalyzerConfig, ModelAnalyzerMode
from .deployment_toolkit.perf_analyzer import PerfAnalyzer, PerfAnalyzerConfig
from .deployment_toolkit.report import save_results, show_results, sort_results
from .deployment_toolkit.utils import parse_server_url
from .deployment_toolkit.warmup import performance_evaluation_warmup
LOGGER = logging.getLogger("run_performance_on_triton")
TRITON_CLIENT_VERSION = LooseVersion(version("tritonclient"))
def _log_dict(title: str, dict_: Dict[str, Any]):
LOGGER.info(title)
for key, value in dict_.items():
LOGGER.info(f"\t{key} = {value}")
def _calculate_average_latency(r):
avg_sum_fields = [
"Client Send",
"Network+Server Send/Recv",
"Server Queue",
"Server Compute",
"Server Compute Input",
"Server Compute Infer",
"Server Compute Output",
"Client Recv",
]
avg_latency = sum([int(r.get(f, 0)) for f in avg_sum_fields])
return avg_latency
def _update_performance_data(results: List, batch_size: int, performance_partial_file: str):
row: Dict = {"Batch": batch_size}
with open(performance_partial_file) as csvfile:
reader = csv.DictReader(csvfile)
for r in reader:
avg_latency = _calculate_average_latency(r)
row = {**row, **r, "avg latency": avg_latency}
results.append(row)
def _model_analyzer_evaluation(
server_url: str,
model_name: str,
input_data: str,
input_shapes: List[str],
batch_sizes: List[int],
number_of_triton_instances: int,
number_of_model_instances: int,
measurement_mode: MeasurementMode,
measurement_interval: int,
measurement_request_count: int,
concurrency_steps: int,
batching_mode: BatchingMode,
evaluation_mode: EvaluationMode,
offline_mode: OfflineMode,
model_repository: str,
result_path: str,
output_shared_memory_size: int = 102400,
verbose: bool = False,
):
_log_dict(
"Selected configuration",
{
"server_url": server_url,
"model_name": model_name,
"input_data": input_data,
"input_shapes": input_shapes,
"batch_sizes": batch_sizes,
"number_of_triton_instances": number_of_triton_instances,
"number_of_model_instances": number_of_model_instances,
"measurement_mode": measurement_mode,
"measurement_interval": measurement_interval,
"measurement_request_count": measurement_request_count,
"concurrency_steps": concurrency_steps,
"batching_mode": batching_mode,
"evaluation_mode": evaluation_mode,
"offline_mode": offline_mode,
"output_shared_memory_size": output_shared_memory_size,
"model_repository": model_repository,
"result_path": result_path,
"verbose": verbose,
},
)
perf_analyzer_config = {
"input-data": input_data,
"measurement-interval": measurement_interval,
}
if TRITON_CLIENT_VERSION >= LooseVersion("2.11.0"):
perf_analyzer_config["measurement-mode"] = measurement_mode.value
perf_analyzer_config["measurement-request-count"] = measurement_request_count
if evaluation_mode == EvaluationMode.OFFLINE:
perf_analyzer_config["shared-memory"] = offline_mode.value
perf_analyzer_config["output-shared-memory-size"] = output_shared_memory_size
if input_shapes:
perf_analyzer_config["shape"] = input_shapes[0]
LOGGER.warning("Model Analyzer support only single shape param for Perf Analyzer.")
if batching_mode == BatchingMode.STATIC:
batch_sizes = batch_sizes
concurrency = [number_of_triton_instances]
elif batching_mode == BatchingMode.DYNAMIC:
max_batch_size = max(batch_sizes)
max_total_requests = 2 * max_batch_size * number_of_triton_instances * number_of_model_instances
max_concurrency = min(256, max_total_requests)
step = max(1, max_concurrency // concurrency_steps)
min_concurrency = step
concurrency = {"start": min_concurrency, "stop": max_concurrency, "step": step}
batch_sizes = [max(1, max_total_requests // 256)]
else:
raise ValueError(f"Unsupported batching mode: {batching_mode}")
protocol, host, port = parse_server_url(server_url)
checkpoints = pathlib.Path("./checkpoints")
if checkpoints.is_dir():
shutil.rmtree(checkpoints.as_posix())
checkpoints.mkdir(parents=True, exist_ok=True)
config = {
"model_repository": model_repository,
"triton_launch_mode": "remote",
"run_config_search_disable": True,
"perf_analyzer_flags": perf_analyzer_config,
"perf_analyzer_timeout": 3600, # Workaround for Perf Analyzer timeout - use 1h
"profile_models": [model_name],
"batch_sizes": batch_sizes,
"concurrency": concurrency,
"verbose": verbose,
"checkpoint_directory": checkpoints.as_posix(),
"override_output_model_repository": True,
"client_protocol": protocol,
f"triton_{protocol}_endpoint": f"{host}:{port}",
}
if verbose:
_log_dict("Model Analyzer profiling configuration", config)
with open("config.yaml", "w") as file:
yaml.safe_dump(config, file)
config = ModelAnalyzerConfig()
model_analyzer = ModelAnalyzer(config=config)
model_analyzer.run(mode=ModelAnalyzerMode.PROFILE, verbose=verbose)
result_path = pathlib.Path(result_path)
result_path.mkdir(parents=True, exist_ok=True)
for file in checkpoints.iterdir():
if not file.is_file() or file.suffix != ".ckpt":
continue
LOGGER.info(f"Moving checkpoint {file.name} to {result_path}")
shutil.move(file, result_path / file.name)
inference_output_fields = [
"batch_size",
"concurrency",
"perf_throughput",
"perf_latency",
"perf_client_send_recv",
"perf_client_response_wait",
"perf_server_queue",
"perf_server_compute_input",
"perf_server_compute_infer",
"perf_server_compute_output",
]
gpu_output_fields = [
"gpu_uuid",
"batch_size",
"concurrency",
"gpu_used_memory",
"gpu_free_memory",
"gpu_utilization",
"gpu_power_usage",
]
filename_model_inference = "metrics-model-inference.csv"
filename_model_gpu = "metrics-model-gpu.csv"
config = {
"analysis_models": model_name,
"checkpoint_directory": result_path.as_posix(),
"export_path": "/tmp",
"inference_output_fields": inference_output_fields,
"gpu_output_fields": gpu_output_fields,
"filename_model_inference": filename_model_inference,
"filename_model_gpu": filename_model_gpu,
"summarize": False,
}
if verbose:
_log_dict("Model Analyzer analysis configuration", config)
with open("config.yaml", "w") as file:
yaml.safe_dump(config, file)
config = ModelAnalyzerConfig()
model_analyzer = ModelAnalyzer(config=config)
model_analyzer.run(mode=ModelAnalyzerMode.ANALYZE, verbose=verbose)
inference_metrics_file = pathlib.Path("/tmp") / "results" / filename_model_inference
gpu_metrics_file = pathlib.Path("/tmp") / "results" / filename_model_gpu
for file in [inference_metrics_file, gpu_metrics_file]:
LOGGER.info(f"Moving metrics {file.name} to {result_path}")
shutil.move(file, result_path / file.name)
def _perf_analyzer_evaluation(
server_url: str,
model_name: str,
input_data: str,
input_shapes: List[str],
batch_sizes: List[int],
number_of_triton_instances: int,
number_of_model_instances: int,
measurement_mode: MeasurementMode,
measurement_interval: int,
measurement_request_count: int,
concurrency_steps: int,
batching_mode: BatchingMode,
evaluation_mode: EvaluationMode,
offline_mode: OfflineMode,
result_path: str,
output_shared_memory_size: int = 102400,
verbose: bool = False,
):
protocol, host, port = parse_server_url(server_url)
if batching_mode == BatchingMode.STATIC:
batch_sizes = batch_sizes
max_concurrency = 1
min_concurrency = 1
step = 1
elif batching_mode == BatchingMode.DYNAMIC:
max_batch_size = max(batch_sizes)
max_total_requests = 2 * max_batch_size * number_of_triton_instances * number_of_model_instances
max_concurrency = min(256, max_total_requests)
step = max(1, max_concurrency // concurrency_steps)
min_concurrency = step
batch_sizes = [max(1, max_total_requests // 256)]
else:
raise ValueError(f"Unsupported batching mode: {batching_mode}")
_log_dict(
"Selected configuration",
{
"server_url": server_url,
"model_name": model_name,
"input_data": input_data,
"input_shapes": input_shapes,
"batch_sizes": batch_sizes,
"number_of_triton_instances": number_of_triton_instances,
"number_of_model_instances": number_of_model_instances,
"measurement_mode": measurement_mode,
"measurement_interval": measurement_interval,
"measurement_request_count": measurement_request_count,
"concurrency_steps": concurrency_steps,
"batching_mode": batching_mode,
"evaluation_mode": evaluation_mode,
"offline_mode": offline_mode,
"output_shared_memory_size": output_shared_memory_size,
"result_path": result_path,
"verbose": verbose,
},
)
results: List[Dict] = list()
for batch_size in batch_sizes:
for concurrency in range(min_concurrency, max_concurrency + step, step):
performance_partial_file = f"triton_performance_{evaluation_mode.value.lower()}_{batching_mode.value.lower()}_partial_{batch_size}_{concurrency}.csv"
params = {
"model-name": model_name,
"model-version": 1,
"batch-size": batch_size,
"url": f"{host}:{port}",
"protocol": protocol,
"input-data": input_data,
"measurement-interval": measurement_interval,
"concurrency-range": f"{concurrency}:{concurrency}:1",
"latency-report-file": performance_partial_file,
}
if verbose:
params["extra-verbose"] = True
if TRITON_CLIENT_VERSION >= LooseVersion("2.11.0"):
params["measurement-mode"] = measurement_mode.value
params["measurement-request-count"] = measurement_request_count
if evaluation_mode == EvaluationMode.OFFLINE:
params["shared-memory"] = offline_mode.value
params["output-shared-memory-size"] = output_shared_memory_size
if verbose:
_log_dict(f"Perf Analyzer config for batch_size: {batch_size} and concurrency: {concurrency}", params)
config = PerfAnalyzerConfig()
for param, value in params.items():
config[param] = value
for shape in input_shapes:
config["shape"] = shape
perf_analyzer = PerfAnalyzer(config=config)
perf_analyzer.run()
_update_performance_data(results, batch_size, performance_partial_file)
os.remove(performance_partial_file)
results = sort_results(results=results)
save_results(filename=result_path, data=results)
show_results(results=results)
def _run_performance_analysis(
server_url: str,
model_name: str,
input_data: str,
input_shapes: List[str],
batch_sizes: List[int],
number_of_triton_instances: int,
number_of_model_instances: int,
measurement_mode: MeasurementMode,
measurement_interval: int,
measurement_request_count: int,
concurrency_steps: int,
batching_mode: BatchingMode,
evaluation_mode: EvaluationMode,
offline_mode: OfflineMode,
output_shared_memory_size: int,
performance_tool: PerformanceTool,
model_repository: str,
result_path: str,
warmup: bool,
verbose: bool,
):
log_level = logging.INFO if not verbose else logging.DEBUG
log_format = "%(asctime)s %(levelname)s %(name)s %(message)s"
logging.basicConfig(level=log_level, format=log_format)
if warmup:
LOGGER.info("Running warmup before the main test")
performance_evaluation_warmup(
server_url=server_url,
model_name=model_name,
input_data=input_data,
input_shapes=input_shapes,
batch_sizes=batch_sizes,
number_of_triton_instances=number_of_triton_instances,
number_of_model_instances=number_of_model_instances,
measurement_mode=measurement_mode,
measurement_interval=measurement_interval,
measurement_request_count=measurement_request_count,
batching_mode=batching_mode,
evaluation_mode=evaluation_mode,
offline_mode=offline_mode,
output_shared_memory_size=output_shared_memory_size,
)
if performance_tool == PerformanceTool.MODEL_ANALYZER:
LOGGER.info("Using Model Analyzer for performance evaluation")
_model_analyzer_evaluation(
server_url=server_url,
model_name=model_name,
input_data=input_data,
input_shapes=input_shapes,
batch_sizes=batch_sizes,
number_of_triton_instances=number_of_triton_instances,
number_of_model_instances=number_of_model_instances,
measurement_mode=measurement_mode,
measurement_interval=measurement_interval,
measurement_request_count=measurement_request_count,
concurrency_steps=concurrency_steps,
batching_mode=batching_mode,
evaluation_mode=evaluation_mode,
offline_mode=offline_mode,
output_shared_memory_size=output_shared_memory_size,
model_repository=model_repository,
result_path=result_path,
verbose=verbose,
)
elif performance_tool == PerformanceTool.PERF_ANALYZER:
LOGGER.info("Using Perf Analyzer for performance evaluation")
_perf_analyzer_evaluation(
server_url=server_url,
model_name=model_name,
input_data=input_data,
input_shapes=input_shapes,
batch_sizes=batch_sizes,
number_of_triton_instances=number_of_triton_instances,
number_of_model_instances=number_of_model_instances,
measurement_mode=measurement_mode,
measurement_interval=measurement_interval,
measurement_request_count=measurement_request_count,
concurrency_steps=concurrency_steps,
batching_mode=batching_mode,
evaluation_mode=evaluation_mode,
offline_mode=offline_mode,
output_shared_memory_size=output_shared_memory_size,
result_path=result_path,
verbose=verbose,
)
else:
raise ValueError(f"Unsupported performance tool {performance_tool}")
class MeasurementMode(Enum):
"""
Available measurement stabilization modes
"""
COUNT_WINDOWS = "count_windows"
TIME_WINDOWS = "time_windows"
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--server-url",
type=str,
required=False,
default="grpc://127.0.0.1:8001",
help="Url to Triton server",
)
parser.add_argument(
"--model-name",
type=str,
required=True,
help="Name of the model to test",
)
parser.add_argument(
"--input-data",
type=str,
required=False,
default="random",
help="Input data to perform profiling.",
)
parser.add_argument(
"--input-shapes",
action="append",
required=False,
help="Input data shape in form INPUT_NAME:<full_shape_without_batch_axis>.",
)
parser.add_argument(
"--batch-sizes",
type=str,
required=True,
help="List of batch sizes to tests. Comma separated.",
)
parser.add_argument(
"--number-of-triton-instances",
type=int,
default=1,
help="Number of Triton Server instances",
)
parser.add_argument(
"--number-of-model-instances",
type=int,
default=1,
help="Number of models instances on Triton Server",
)
parser.add_argument(
"--measurement-mode",
choices=[item.value for item in MeasurementMode],
default=MeasurementMode.COUNT_WINDOWS.value,
type=str,
help="Select measurement mode "
"'time_windows' stabilize performance on measurement window. "
"'count_windows' stabilize performance on number of samples.",
)
parser.add_argument(
"--measurement-interval",
required=False,
help="Time window perf_analyzer will wait to stabilize the measurement",
default=5000,
type=int,
)
parser.add_argument(
"--measurement-request-count",
required=False,
help="Number of samples on which perf_analyzer will stabilize the measurement",
default=50,
type=int,
)
parser.add_argument(
"--concurrency-steps",
help="Define number of concurrency steps used for dynamic batching tests",
default=32,
type=int,
)
parser.add_argument(
"--batching-mode",
choices=[item.value for item in BatchingMode],
default=BatchingMode.STATIC.value,
type=str,
help="Select batching mode "
"'static' run static batching scenario. "
"'dynamic' run dynamic batching scenario.",
)
parser.add_argument(
"--evaluation-mode",
choices=[item.value for item in EvaluationMode],
default=EvaluationMode.OFFLINE.value,
type=str,
help="Select evaluation mode "
"'offline' run offline analysis and use GPU memory to pass tensors. "
"'online' run online analysis and use HTTP protocol.",
)
parser.add_argument(
"--offline-mode",
choices=[item.value for item in OfflineMode],
default=OfflineMode.SYSTEM.value,
type=str,
help="Select offline mode "
"'system' pass tensors through CPU RAM memory. "
"'cuda' pass tensors through GPU RAM memory.",
)
parser.add_argument(
"--output-shared-memory-size",
default=100240,
type=int,
help="Size of memory buffer allocated for output with dynamic shapes in bytes. "
"Has to be equal to maximal size of output tensor.",
)
parser.add_argument(
"--performance-tool",
choices=[item.value for item in PerformanceTool],
default=PerformanceTool.MODEL_ANALYZER.value,
type=str,
help="Select performance tool for measurement mode "
"'model_analyzer' use Model Analyzer "
"'perf_analyzer' use Perf Analyzer",
)
parser.add_argument(
"--model-repository",
default=None,
type=str,
help="Path to model repository. Valid when using Model Analyzer",
)
parser.add_argument("--result-path", type=str, required=True, help="Path where results files is stored.")
parser.add_argument(
"--warmup", help="Enable model warmup before performance test", action="store_true", default=False
)
parser.add_argument("-v", "--verbose", help="Verbose logs", action="store_true", default=False)
args = parser.parse_args()
batch_sizes = list(map(lambda x: int(x), args.batch_sizes.split(",")))
_run_performance_analysis(
server_url=args.server_url,
model_name=args.model_name,
input_data=args.input_data,
input_shapes=args.input_shapes or [],
batch_sizes=batch_sizes,
number_of_triton_instances=args.number_of_triton_instances,
number_of_model_instances=args.number_of_model_instances,
measurement_mode=MeasurementMode(args.measurement_mode),
measurement_interval=args.measurement_interval,
measurement_request_count=args.measurement_request_count,
concurrency_steps=args.concurrency_steps,
batching_mode=BatchingMode(args.batching_mode),
evaluation_mode=EvaluationMode(args.evaluation_mode),
offline_mode=OfflineMode(args.offline_mode),
output_shared_memory_size=args.output_shared_memory_size,
performance_tool=PerformanceTool(args.performance_tool),
model_repository=args.model_repository,
result_path=args.result_path,
warmup=args.warmup,
verbose=args.verbose,
)
if __name__ == "__main__":
main()
|
Tools/PyTorch/TimeSeriesPredictionPlatform/models/tft_pyt/triton/runner | runner | pipeline | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from typing import Dict, Tuple
# method from PEP-366 to support relative import in executed modules
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from .stages import (
ConversionStage,
DeployStage,
ExportStage,
ResultsType,
TritonPerformanceOfflineStage,
TritonPerformanceOnlineStage,
TritonPreparePerformanceProfilingDataStage,
)
class Pipeline:
"""
Definition of stages that has to be executed before and during experiments
"""
# Stages to execute as part of single experiment
_experiment_stages = [
ExportStage.label,
ConversionStage.label,
DeployStage.label,
TritonPreparePerformanceProfilingDataStage.label,
TritonPerformanceOfflineStage.label,
TritonPerformanceOnlineStage.label,
]
def __init__(self):
"""
Initialize pipeline
"""
self._stages: Dict = dict()
def model_export(self, commands: Tuple[str, ...]) -> None:
"""
Model export stage
Args:
commands: Commands to be executed as part of stage
Returns:
None
"""
stage = ExportStage(commands=commands)
self._stages[stage.label] = stage
def model_conversion(self, commands: Tuple[str, ...]) -> None:
"""
Model conversion stage
Args:
commands: Commands to be executed as part of stage
Returns:
None
"""
stage = ConversionStage(commands=commands)
self._stages[stage.label] = stage
def model_deploy(self, commands: Tuple[str, ...]) -> None:
"""
Model deployment stage
Args:
commands: Commands to be executed as part of stage
Returns:
None
"""
stage = DeployStage(commands=commands)
self._stages[stage.label] = stage
def triton_prepare_performance_profiling_data(self, commands: Tuple[str, ...]) -> None:
"""
Model profiling data creation stage
Args:
commands: Commands to be executed as part of stage
Returns:
None
"""
stage = TritonPreparePerformanceProfilingDataStage(commands=commands)
self._stages[stage.label] = stage
def triton_performance_offline_tests(self, commands: Tuple[str, ...], result_path: str) -> None:
"""
Model performance offline test stage
Args:
commands: Commands to be executed as part of stage
result_path: Path where results file is stored
Returns:
None
"""
stage = TritonPerformanceOfflineStage(
commands=commands,
result_path=result_path,
result_type=ResultsType.TRITON_PERFORMANCE_OFFLINE,
)
self._stages[stage.label] = stage
def triton_performance_online_tests(self, commands: Tuple[str, ...], result_path: str) -> None:
"""
Model performance online test stage
Args:
commands: Commands to be executed as part of stage
result_path: Path where results file is stored
Returns:
None
"""
stage = TritonPerformanceOnlineStage(
commands=commands,
result_path=result_path,
result_type=ResultsType.TRITON_PERFORMANCE_ONLINE,
)
self._stages[stage.label] = stage
def stages(self):
"""
Generate stages which should be run per experiment
Returns:
Generator with stages object
"""
for stage_name in self._experiment_stages:
stage = self._stages.get(stage_name)
if not stage:
continue
yield stage
|
TensorFlow2/Segmentation/Contrib/UNet3P/data_preparation | data_preparation | README | For data two options are available
- [Train on LiTS Data](#lits-liver-tumor-segmentation-challenge)
- [Train on custom data](#train-on-custom-data)
## LiTS Liver Tumor Segmentation challenge
This dataset consist of 131 Liver CT Scans.
Register [here](https://competitions.codalab.org/competitions/17094) to get dataset access.
Go to participate → Training Data to get dataset link.
Download Training Batch 1 and Training Batch 2 zip files and past them under data folder.
`Training Batch 1` size is 3.97GB and `Training Batch 2` zip file size is 11.5GB.
Inside main directory `/workspace/unet3p` run below command to extract zip files
```shell
bash data_preparation/extract_data.sh
```
After extraction `Training Batch 1` folder size will be 11.4GB and `Training Batch 2` folder size will be 38.5GB.
- `Training Batch 1` consist of 28 scans which are used for testing
- `Training Batch 2` consist of 103 scans which are used for training
Default directory structure looks like this
├── data/
│ ├── Training Batch 1/
├── segmentation-0.nii
├── volume-0.nii
├── ...
├── volume-27.nii
│ ├── Training Batch 2/
├── segmentation-28.nii
├── volume-28.nii
├── ...
├── volume-130.nii
For testing, you can have any number of files in Training Batch 1 and Training Batch 2. But make sure the naming
convention is similar.
To prepare LiTS dataset for training run
```
python data_preparation/preprocess_data.py
```
> Note: Because of the extensive preprocessing, it will take some time, so relax and wait.
#### Final directory
After completion, you will have a directories like this
├── data/
│ ├── train/
├── images
├── image_28_0.png
├── ...
├── mask
├── mask_28_0.png
├── ...
│ ├── val/
├── images
├── image_0_0.png
├── ...
├── mask
├── mask_0_0.png
├── ...
After processing the `train` folder size will be 5GB and `val` folder size will be 1.7GB.
#### Free space (Optional)
At this stage you can delete the intermediate scans files to free space, run below command
```shell
bash data_preparation/delete_extracted_scans_data.sh
```
You can also delete the data zip files using below command, but remember you cannot retrieve them back
```shell
bash data_preparation/delete_zip_data.sh
```
> Note: It is recommended to delete scan files but not zip data because you may need it again.
## Train on custom data
To train on custom dateset it's advised that you follow the same train and val directory structure like
mentioned [above](#final-directory).
In our case image file name can be mapped to it's corresponding mask file name by replacing `image` text with `mask`. If
your data has different mapping then you need to update [image_to_mask_name](./../utils/images_utils.py#L63) function which
is responsible for converting image name to it's corresponding file name.
Each image should be a color image with 3 channels and `RGB` color format. Each mask is considered as a gray scale
image, where each pixel value is the class on which each pixel belongs.
Congratulations, now you can start training and testing on your new dataset! |
PyTorch/LanguageModeling/Transformer-XL | Transformer-XL | .gitignore | **/.DS_Store
__pycache__/
data/
results/
pytorch/LM-TFM/*
*.out
*.log
|
PyTorch/SpeechSynthesis/FastPitch/triton | triton | requirements | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
networkx==2.5
numpy
onnx==1.8.0
onnxruntime==1.5.2
pycuda>=2019.1.2
PyYAML>=5.2
tqdm>=4.44.1
tabulate>=0.8.7
natsort>=7.0.0
# use tags instead of branch names - because there might be docker cache hit causing not fetching most recent changes on branch
model_navigator @ git+https://github.com/triton-inference-server/[email protected]#egg=model_navigator
|
TensorFlow/Detection/SSD/models/research/object_detection/core | core | region_similarity_calculator | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Region Similarity Calculators for BoxLists.
Region Similarity Calculators compare a pairwise measure of similarity
between the boxes in two BoxLists.
"""
from abc import ABCMeta
from abc import abstractmethod
import tensorflow as tf
from object_detection.core import box_list_ops
from object_detection.core import standard_fields as fields
class RegionSimilarityCalculator(object):
"""Abstract base class for region similarity calculator."""
__metaclass__ = ABCMeta
def compare(self, boxlist1, boxlist2, scope=None):
"""Computes matrix of pairwise similarity between BoxLists.
This op (to be overridden) computes a measure of pairwise similarity between
the boxes in the given BoxLists. Higher values indicate more similarity.
Note that this method simply measures similarity and does not explicitly
perform a matching.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
scope: Op scope name. Defaults to 'Compare' if None.
Returns:
a (float32) tensor of shape [N, M] with pairwise similarity score.
"""
with tf.name_scope(scope, 'Compare', [boxlist1, boxlist2]) as scope:
return self._compare(boxlist1, boxlist2)
@abstractmethod
def _compare(self, boxlist1, boxlist2):
pass
class IouSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on Intersection over Union (IOU) metric.
This class computes pairwise similarity between two BoxLists based on IOU.
"""
def _compare(self, boxlist1, boxlist2):
"""Compute pairwise IOU similarity between the two BoxLists.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing pairwise iou scores.
"""
return box_list_ops.iou(boxlist1, boxlist2)
class NegSqDistSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on the squared distance metric.
This class computes pairwise similarity between two BoxLists based on the
negative squared distance metric.
"""
def _compare(self, boxlist1, boxlist2):
"""Compute matrix of (negated) sq distances.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing negated pairwise squared distance.
"""
return -1 * box_list_ops.sq_dist(boxlist1, boxlist2)
class IoaSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on Intersection over Area (IOA) metric.
This class computes pairwise similarity between two BoxLists based on their
pairwise intersections divided by the areas of second BoxLists.
"""
def _compare(self, boxlist1, boxlist2):
"""Compute pairwise IOA similarity between the two BoxLists.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing pairwise IOA scores.
"""
return box_list_ops.ioa(boxlist1, boxlist2)
class ThresholdedIouSimilarity(RegionSimilarityCalculator):
"""Class to compute similarity based on thresholded IOU and score.
This class computes pairwise similarity between two BoxLists based on IOU and
a 'score' present in boxlist1. If IOU > threshold, then the entry in the
output pairwise tensor will contain `score`, otherwise 0.
"""
def __init__(self, iou_threshold=0):
"""Initialize the ThresholdedIouSimilarity.
Args:
iou_threshold: For a given pair of boxes, if the IOU is > iou_threshold,
then the comparison result will be the foreground probability of
the first box, otherwise it will be zero.
"""
self._iou_threshold = iou_threshold
def _compare(self, boxlist1, boxlist2):
"""Compute pairwise IOU similarity between the two BoxLists and score.
Args:
boxlist1: BoxList holding N boxes. Must have a score field.
boxlist2: BoxList holding M boxes.
Returns:
A tensor with shape [N, M] representing scores threholded by pairwise
iou scores.
"""
ious = box_list_ops.iou(boxlist1, boxlist2)
scores = boxlist1.get_field(fields.BoxListFields.scores)
scores = tf.expand_dims(scores, axis=1)
row_replicated_scores = tf.tile(scores, [1, tf.shape(ious)[-1]])
thresholded_ious = tf.where(ious > self._iou_threshold,
row_replicated_scores, tf.zeros_like(ious))
return thresholded_ious
|
PyTorch/Detection/Efficientdet/scripts/D0 | D0 | inference_FP32_V100-32G | #!/bin/bash
rm -rf *.json
python -u -m bind_launch --nproc_per_node=${NUM_PROC:-1} validate.py '/workspace/object_detection/datasets/coco/' --model efficientdet_d0 -b ${BATCH_SIZE:-8} --torchscript --use-ema --checkpoint ${CKPT_PATH:-/checkpoints/Effdet_B0.pth} --inference |
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/waveglow | waveglow | blending | /*
* Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the NVIDIA CORPORATION nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "blending.h"
/******************************************************************************
* CONSTANTS ******************************************************************
*****************************************************************************/
namespace
{
constexpr const int BLOCK_SIZE = 1024;
}
/******************************************************************************
* KERNELS ********************************************************************
*****************************************************************************/
__global__ void linearBlendingKernel(const float* const newChunk, float* const base, const int chunkLength,
const int overlapSize, const int spacing, const int offset)
{
const int idx = threadIdx.x + blockIdx.x * blockDim.x;
if (idx < chunkLength)
{
const float weight
= offset > 0 && idx < overlapSize ? static_cast<float>(idx) / static_cast<float>(overlapSize) : 1.0f;
const int inputIdx = idx + (blockIdx.y * chunkLength);
const int outputIdx = idx + offset + (blockIdx.y * spacing);
float newValue;
if (weight < 1.0f)
{
newValue = (1.0f - weight) * base[outputIdx] + newChunk[inputIdx] * weight;
}
else
{
newValue = newChunk[inputIdx];
}
base[outputIdx] = newValue;
}
}
/******************************************************************************
* HELPER FUNCTIONS ***********************************************************
*****************************************************************************/
namespace
{
static int roundUpBlocks(const int num, const int blockSize)
{
return ((num - 1) / blockSize) + 1;
}
} // namespace
/******************************************************************************
* PUBLIC STATIC METHODS ******************************************************
*****************************************************************************/
void Blending::linear(const int batchSize, const float* const newChunk, float* const base, const int chunkSize,
const int overlapSize, const int outputSequenceSpacing, const int outputSequenceOffset, cudaStream_t stream)
{
const int blocksPerChunk = roundUpBlocks(chunkSize, BLOCK_SIZE);
const dim3 grid(blocksPerChunk, batchSize);
const dim3 block(BLOCK_SIZE);
linearBlendingKernel<<<grid, block, 0, stream>>>(
newChunk, base, chunkSize, overlapSize, outputSequenceSpacing, outputSequenceOffset);
}
|
PyTorch/Detection/Efficientdet/effdet/layers | layers | cond_conv2d | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2019-2022 Ross Wightman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from functools import partial
import numpy as np
import torch
from torch import nn as nn
from torch.nn import functional as F
from .helpers import tup_pair
from .conv2d_same import conv2d_same
from .padding import get_padding_value
def get_condconv_initializer(initializer, num_experts, expert_shape):
def condconv_initializer(weight):
"""CondConv initializer function."""
num_params = np.prod(expert_shape)
if (len(weight.shape) != 2 or weight.shape[0] != num_experts or
weight.shape[1] != num_params):
raise (ValueError(
'CondConv variables must have shape [num_experts, num_params]'))
for i in range(num_experts):
initializer(weight[i].view(expert_shape))
return condconv_initializer
class CondConv2d(nn.Module):
""" Conditionally Parameterized Convolution
Inspired by: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/condconv/condconv_layers.py
Grouped convolution hackery for parallel execution of the per-sample kernel filters inspired by this discussion:
https://github.com/pytorch/pytorch/issues/17983
"""
__constants__ = ['in_channels', 'out_channels', 'dynamic_padding']
def __init__(self, in_channels, out_channels, kernel_size=3,
stride=1, padding='', dilation=1, groups=1, bias=False, num_experts=4):
super(CondConv2d, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = tup_pair(kernel_size)
self.stride = tup_pair(stride)
padding_val, is_padding_dynamic = get_padding_value(
padding, kernel_size, stride=stride, dilation=dilation)
self.dynamic_padding = is_padding_dynamic # if in forward to work with torchscript
self.padding = tup_pair(padding_val)
self.dilation = tup_pair(dilation)
self.groups = groups
self.num_experts = num_experts
self.weight_shape = (self.out_channels, self.in_channels // self.groups) + self.kernel_size
weight_num_param = 1
for wd in self.weight_shape:
weight_num_param *= wd
self.weight = torch.nn.Parameter(torch.Tensor(self.num_experts, weight_num_param))
if bias:
self.bias_shape = (self.out_channels,)
self.bias = torch.nn.Parameter(torch.Tensor(self.num_experts, self.out_channels))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
init_weight = get_condconv_initializer(
partial(nn.init.kaiming_uniform_, a=math.sqrt(5)), self.num_experts, self.weight_shape)
init_weight(self.weight)
if self.bias is not None:
fan_in = np.prod(self.weight_shape[1:])
bound = 1 / math.sqrt(fan_in)
init_bias = get_condconv_initializer(
partial(nn.init.uniform_, a=-bound, b=bound), self.num_experts, self.bias_shape)
init_bias(self.bias)
def forward(self, x, routing_weights):
B, C, H, W = x.shape
weight = torch.matmul(routing_weights, self.weight)
new_weight_shape = (B * self.out_channels, self.in_channels // self.groups) + self.kernel_size
weight = weight.view(new_weight_shape)
bias = None
if self.bias is not None:
bias = torch.matmul(routing_weights, self.bias)
bias = bias.view(B * self.out_channels)
# move batch elements with channels so each batch element can be efficiently convolved with separate kernel
x = x.view(1, B * C, H, W)
if self.dynamic_padding:
out = conv2d_same(
x, weight, bias, stride=self.stride, padding=self.padding,
dilation=self.dilation, groups=self.groups * B)
else:
out = F.conv2d(
x, weight, bias, stride=self.stride, padding=self.padding,
dilation=self.dilation, groups=self.groups * B)
out = out.permute([1, 0, 2, 3]).view(B, self.out_channels, out.shape[-2], out.shape[-1])
# Literal port (from TF definition)
# x = torch.split(x, 1, 0)
# weight = torch.split(weight, 1, 0)
# if self.bias is not None:
# bias = torch.matmul(routing_weights, self.bias)
# bias = torch.split(bias, 1, 0)
# else:
# bias = [None] * B
# out = []
# for xi, wi, bi in zip(x, weight, bias):
# wi = wi.view(*self.weight_shape)
# if bi is not None:
# bi = bi.view(*self.bias_shape)
# out.append(self.conv_fn(
# xi, wi, bi, stride=self.stride, padding=self.padding,
# dilation=self.dilation, groups=self.groups))
# out = torch.cat(out, 0)
return out |
TensorFlow2/Recommendation/SIM/sim/layers | layers | ctr_classification_mlp | # Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
import tensorflow as tf
class CTRClassificationMLP(tf.keras.layers.Layer):
def __init__(
self,
layer_sizes=(200,),
num_outputs=1,
activation_function=partial(
tf.keras.layers.PReLU, alpha_initializer=tf.keras.initializers.Constant(0.1)
),
use_bn=False,
dropout_rate=-1
):
super().__init__()
self.layer_sizes = layer_sizes
self.activation_function = activation_function
self.use_bn = use_bn
self.dropout_rate = dropout_rate
if self.use_bn:
self.batch_norm = tf.keras.layers.BatchNormalization()
self.layers = []
for layer_size in self.layer_sizes:
# add dense layer and activation
self.layers.append(tf.keras.layers.Dense(layer_size))
self.layers.append(self.activation_function())
if self.dropout_rate > 0.0:
# add dropout between final representation and classification layer
self.layers.append(tf.keras.layers.Dropout(rate=self.dropout_rate))
# add the scoring layer
scoring_layer = tf.keras.layers.Dense(num_outputs, dtype='float32')
self.layers.append(scoring_layer)
def call(self, input, training=False):
if self.use_bn:
input = self.batch_norm(input, training=training)
for layer in self.layers:
input = layer(input, training=training)
return input
|
TensorFlow/Segmentation/UNet_3D_Medical/scripts | scripts | unet3d_infer_benchmark_TF-AMP | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script launches 3D-UNet run TF-AMP inference benchmark.
# Usage:
# bash examples/unet3d_infer_benchmark_TF-AMP.sh <path/to/dataset> <path/to/results/directory> <batch/size>
python main.py --data_dir $1 --model_dir $2 --exec_mode predict --warmup_steps 20 --fold 0 --batch_size $3 --benchmark --amp --xla |
Tools/PyTorch/TimeSeriesPredictionPlatform/evaluators | evaluators | evaluation_metrics | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import numpy as np
from abc import ABC, abstractmethod
class AbstractMetric(ABC):
@staticmethod
@abstractmethod
def __call__(pred, label, weights):
pass
class SMAPE(AbstractMetric):
name = "SMAPE"
@staticmethod
def __call__(preds, labels, weights):
if not weights.size:
weights = None
return 100 * np.average(2 * np.abs(preds - labels) / (np.abs(labels) + np.abs(preds)), weights=weights)
def normalised_quantile_loss(y_pred, y, quantile, weights=None):
"""Implementation of the q-Risk function from https://arxiv.org/pdf/1912.09363.pdf"""
prediction_underflow = y - y_pred
weighted_errors = quantile * np.maximum(prediction_underflow, 0.0) + (1.0 - quantile) * np.maximum(
-prediction_underflow, 0.0
)
if weights is not None and weights.size:
weighted_errors = weighted_errors * weights
y = y * weights
loss = weighted_errors.sum()
normaliser = abs(y).sum()
return 2 * loss / normaliser
class P50_loss(AbstractMetric):
name = "P50"
selector = 1
@staticmethod
def __call__(labels, preds, weights):
return normalised_quantile_loss(labels, preds, 0.5,weights)
class P90_loss(AbstractMetric):
name = "P90"
selector = 2
@staticmethod
def __call__(labels, preds, weights):
return normalised_quantile_loss(labels, preds, 0.9,weights)
# Normalized Deviation
class ND(AbstractMetric):
name = "ND"
@staticmethod
def __call__(preds, labels, weights):
diff = np.abs(labels - preds)
if not weights.size:
return np.sum(diff) / np.sum(np.abs(labels))
else:
return np.sum(diff * weights) / np.sum(np.abs(labels) * weights)
class MAE(AbstractMetric):
name = "MAE"
@staticmethod
def __call__(preds, labels, weights, return_individual=False):
if not weights.size:
weights = None
if return_individual:
return np.average(np.abs(preds - labels), weights=weights, axis=0)
else:
return np.average(np.abs(preds - labels), weights=weights)
class MSE(AbstractMetric):
name = "MSE"
@staticmethod
def __call__(preds, labels, weights, return_individual=False):
if not weights.size:
weights = None
if return_individual:
return np.average((preds - labels)**2, weights=weights, axis=0)
else:
return np.average((preds - labels)**2, weights=weights)
class RMSE(AbstractMetric):
name = "RMSE"
@staticmethod
def __call__(preds, labels, weights):
if not weights.size:
weights = None
return np.sqrt(np.average((preds - labels)**2, weights=weights))
class R_Squared(AbstractMetric):
name = "R_Squared"
@staticmethod
def __call__(preds, labels, weights, return_individual=False):
if not weights.size:
if return_individual:
return r2_score(preds, labels, multioutput="raw_values")
return r2_score(preds, labels)
else:
values = r2_score(preds, labels, multioutput="raw_values")
if return_individual:
return values * weights
return np.sum(values * weights) / np.sum(weights)
class WMSMAPE(AbstractMetric):
name = "WMSMAPE"
@staticmethod
def __call__(preds, labels, weights, return_individual=False):
if weights.size:
if return_individual:
return 2 * weights * np.abs(preds - labels) / (np.maximum(labels, 1) + np.abs(preds))
else:
return (
100.0
/ np.sum(weights)
* np.sum(2 * weights * np.abs(preds - labels) / (np.maximum(labels, 1) + np.abs(preds)))
)
if return_individual:
return 2 * np.abs(preds - labels) / (np.maximum(labels, 1) + np.abs(preds))
else:
return 100.0 / len(labels) * np.sum(2 * np.abs(preds - labels) / (np.maximum(labels, 1) + np.abs(preds)))
METRICS = {
"SMAPE": SMAPE,
"WMSMAPE": WMSMAPE,
"MSE": MSE,
"MAE": MAE,
"P50": P50_loss,
"P90": P90_loss,
"RMSE": RMSE,
"R_Squared": R_Squared,
"ND": ND,
}
|
TensorFlow/Detection/SSD/models/research/object_detection/utils | utils | per_image_vrd_evaluation | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Evaluates Visual Relations Detection(VRD) result evaluation on an image.
Annotate each VRD result as true positives or false positive according to
a predefined IOU ratio. Multi-class detection is supported by default.
Based on the settings, per image evaluation is performed either on phrase
detection subtask or on relation detection subtask.
"""
import numpy as np
from object_detection.utils import np_box_list
from object_detection.utils import np_box_list_ops
class PerImageVRDEvaluation(object):
"""Evaluate vrd result of a single image."""
def __init__(self, matching_iou_threshold=0.5):
"""Initialized PerImageVRDEvaluation by evaluation parameters.
Args:
matching_iou_threshold: A ratio of area intersection to union, which is
the threshold to consider whether a detection is true positive or not;
in phrase detection subtask.
"""
self.matching_iou_threshold = matching_iou_threshold
def compute_detection_tp_fp(self, detected_box_tuples, detected_scores,
detected_class_tuples, groundtruth_box_tuples,
groundtruth_class_tuples):
"""Evaluates VRD as being tp, fp from a single image.
Args:
detected_box_tuples: A numpy array of structures with shape [N,],
representing N tuples, each tuple containing the same number of named
bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max].
detected_scores: A float numpy array of shape [N,], representing
the confidence scores of the detected N object instances.
detected_class_tuples: A numpy array of structures shape [N,],
representing the class labels of the corresponding bounding boxes and
possibly additional classes.
groundtruth_box_tuples: A float numpy array of structures with the shape
[M,], representing M tuples, each tuple containing the same number
of named bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max].
groundtruth_class_tuples: A numpy array of structures shape [M,],
representing the class labels of the corresponding bounding boxes and
possibly additional classes.
Returns:
scores: A single numpy array with shape [N,], representing N scores
detected with object class, sorted in descentent order.
tp_fp_labels: A single boolean numpy array of shape [N,], representing N
True/False positive label, one label per tuple. The labels are sorted
so that the order of the labels matches the order of the scores.
result_mapping: A numpy array with shape [N,] with original index of each
entry.
"""
scores, tp_fp_labels, result_mapping = self._compute_tp_fp(
detected_box_tuples=detected_box_tuples,
detected_scores=detected_scores,
detected_class_tuples=detected_class_tuples,
groundtruth_box_tuples=groundtruth_box_tuples,
groundtruth_class_tuples=groundtruth_class_tuples)
return scores, tp_fp_labels, result_mapping
def _compute_tp_fp(self, detected_box_tuples, detected_scores,
detected_class_tuples, groundtruth_box_tuples,
groundtruth_class_tuples):
"""Labels as true/false positives detection tuples across all classes.
Args:
detected_box_tuples: A numpy array of structures with shape [N,],
representing N tuples, each tuple containing the same number of named
bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max]
detected_scores: A float numpy array of shape [N,], representing
the confidence scores of the detected N object instances.
detected_class_tuples: A numpy array of structures shape [N,],
representing the class labels of the corresponding bounding boxes and
possibly additional classes.
groundtruth_box_tuples: A float numpy array of structures with the shape
[M,], representing M tuples, each tuple containing the same number
of named bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max]
groundtruth_class_tuples: A numpy array of structures shape [M,],
representing the class labels of the corresponding bounding boxes and
possibly additional classes.
Returns:
scores: A single numpy array with shape [N,], representing N scores
detected with object class, sorted in descentent order.
tp_fp_labels: A single boolean numpy array of shape [N,], representing N
True/False positive label, one label per tuple. The labels are sorted
so that the order of the labels matches the order of the scores.
result_mapping: A numpy array with shape [N,] with original index of each
entry.
"""
unique_gt_tuples = np.unique(
np.concatenate((groundtruth_class_tuples, detected_class_tuples)))
result_scores = []
result_tp_fp_labels = []
result_mapping = []
for unique_tuple in unique_gt_tuples:
detections_selector = (detected_class_tuples == unique_tuple)
gt_selector = (groundtruth_class_tuples == unique_tuple)
selector_mapping = np.where(detections_selector)[0]
detection_scores_per_tuple = detected_scores[detections_selector]
detection_box_per_tuple = detected_box_tuples[detections_selector]
sorted_indices = np.argsort(detection_scores_per_tuple)
sorted_indices = sorted_indices[::-1]
tp_fp_labels = self._compute_tp_fp_for_single_class(
detected_box_tuples=detection_box_per_tuple[sorted_indices],
groundtruth_box_tuples=groundtruth_box_tuples[gt_selector])
result_scores.append(detection_scores_per_tuple[sorted_indices])
result_tp_fp_labels.append(tp_fp_labels)
result_mapping.append(selector_mapping[sorted_indices])
if result_scores:
result_scores = np.concatenate(result_scores)
result_tp_fp_labels = np.concatenate(result_tp_fp_labels)
result_mapping = np.concatenate(result_mapping)
else:
result_scores = np.array([], dtype=float)
result_tp_fp_labels = np.array([], dtype=bool)
result_mapping = np.array([], dtype=int)
sorted_indices = np.argsort(result_scores)
sorted_indices = sorted_indices[::-1]
return result_scores[sorted_indices], result_tp_fp_labels[
sorted_indices], result_mapping[sorted_indices]
def _get_overlaps_and_scores_relation_tuples(self, detected_box_tuples,
groundtruth_box_tuples):
"""Computes overlaps and scores between detected and groundtruth tuples.
Both detections and groundtruth boxes have the same class tuples.
Args:
detected_box_tuples: A numpy array of structures with shape [N,],
representing N tuples, each tuple containing the same number of named
bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max]
groundtruth_box_tuples: A float numpy array of structures with the shape
[M,], representing M tuples, each tuple containing the same number
of named bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max]
Returns:
result_iou: A float numpy array of size
[num_detected_tuples, num_gt_box_tuples].
"""
result_iou = np.ones(
(detected_box_tuples.shape[0], groundtruth_box_tuples.shape[0]),
dtype=float)
for field in detected_box_tuples.dtype.fields:
detected_boxlist_field = np_box_list.BoxList(detected_box_tuples[field])
gt_boxlist_field = np_box_list.BoxList(groundtruth_box_tuples[field])
iou_field = np_box_list_ops.iou(detected_boxlist_field, gt_boxlist_field)
result_iou = np.minimum(iou_field, result_iou)
return result_iou
def _compute_tp_fp_for_single_class(self, detected_box_tuples,
groundtruth_box_tuples):
"""Labels boxes detected with the same class from the same image as tp/fp.
Detection boxes are expected to be already sorted by score.
Args:
detected_box_tuples: A numpy array of structures with shape [N,],
representing N tuples, each tuple containing the same number of named
bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max]
groundtruth_box_tuples: A float numpy array of structures with the shape
[M,], representing M tuples, each tuple containing the same number
of named bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max]
Returns:
tp_fp_labels: a boolean numpy array indicating whether a detection is a
true positive.
"""
if detected_box_tuples.size == 0:
return np.array([], dtype=bool)
min_iou = self._get_overlaps_and_scores_relation_tuples(
detected_box_tuples, groundtruth_box_tuples)
num_detected_tuples = detected_box_tuples.shape[0]
tp_fp_labels = np.zeros(num_detected_tuples, dtype=bool)
if min_iou.shape[1] > 0:
max_overlap_gt_ids = np.argmax(min_iou, axis=1)
is_gt_tuple_detected = np.zeros(min_iou.shape[1], dtype=bool)
for i in range(num_detected_tuples):
gt_id = max_overlap_gt_ids[i]
if min_iou[i, gt_id] >= self.matching_iou_threshold:
if not is_gt_tuple_detected[gt_id]:
tp_fp_labels[i] = True
is_gt_tuple_detected[gt_id] = True
return tp_fp_labels
|
TensorFlow/Recommendation/VAE-CF/vae/metrics | metrics | ndcg | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Discounted Cumulative Gain @ R is
DCG@R(u,ω) := Σ_{r=1}^{R} I[ω(r) ∈ I_u] − 1 / log(r + 1) / IDCG@R(u,ω)
IDCG@R(u,ω) := Σ_{r=1}^{|I_u|} 1 / log(r + 1)
https://en.wikipedia.org/wiki/Discounted_cumulative_gain#Normalized_DCG
https://arxiv.org/pdf/1802.05814.pdf, chapter 4.2
"""
import numpy as np
from scipy.sparse import csr_matrix
def ndcg(X_true: csr_matrix, X_top_k: np.array, R=100) -> np.array:
""" Calculate ndcg@R for each users in X_true and X_pred matrices
Args:
X_true: Matrix containing True values for user-item interactions
X_top_k: Matrix containing inidices picked by model
R: Number of elements taken into consideration
Returns:
Numpy array containing calculated ndcg@R for each user
"""
penalties = 1. / np.log2(np.arange(2, R + 2))
selected = np.take_along_axis(X_true, X_top_k[:, :R], axis=-1)
DCG = selected * penalties
cpenalties = np.empty(R + 1)
np.cumsum(penalties, out=cpenalties[1:])
cpenalties[0] = 0
maxhit = np.minimum(X_true.getnnz(axis=1), R)
IDCG = cpenalties[maxhit]
return DCG / IDCG
|
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/preprocessing/datasets | datasets | cora | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import logging
import shutil
import subprocess
from typing import List, Union, Optional
import numpy as np
import pandas as pd
from syngen.configuration import SynGenDatasetFeatureSpec
from syngen.preprocessing.base_preprocessing import BasePreprocessing
from syngen.utils.types import MetaData
logger = logging.getLogger(__name__)
log = logger
class CORAPreprocessing(BasePreprocessing):
def __init__(
self,
source_path: str,
destination_path: Optional[str] = None,
download: bool = False,
**kwargs,
):
"""
preprocessing for https://linqs-data.soe.ucsc.edu/public/lbc/cora.tgz
"""
super().__init__(source_path, destination_path, download, **kwargs)
def transform(self, gpu=False, use_cache=False):
assert not gpu, "CORA preprocessing does not support cudf preprocessing"
if use_cache and os.path.exists(self.destination_path):
return SynGenDatasetFeatureSpec.instantiate_from_preprocessed(self.destination_path)
tabular_operator = pd
operator = np
examples = {}
with open(os.path.join(self.source_path, 'cora.content'), "r") as cora_content:
for line in cora_content:
entries = line.rstrip("\n").split("\t")
# entries contains [ID, Word1, Word2, ..., Label]; "Words" are 0/1 values.
words = list(map(int, entries[1:-1]))
example_id = int(entries[0])
label = entries[-1]
features = {
"id": example_id,
"label": label,
}
for i, w in enumerate(words):
features[f"w_{i}"] = w
examples[example_id] = features
tabular_data = tabular_operator.DataFrame.from_dict(
examples, orient="index"
).reset_index(drop=True)
node_features = [
{
MetaData.NAME: f"w_{i}",
MetaData.DTYPE: 'int64',
MetaData.FEATURE_TYPE: MetaData.CATEGORICAL,
}
for i in range(len(words))
]
node_features.extend([
{
MetaData.NAME: name,
MetaData.DTYPE: 'int64',
MetaData.FEATURE_TYPE: MetaData.CATEGORICAL,
}
for name in ["label"]
])
for c in tabular_data.columns:
tabular_data[c] = tabular_data[c].astype("category").cat.codes.astype(int)
tabular_data = tabular_data.set_index('id')
structural_data = tabular_operator.read_csv(os.path.join(self.source_path, "cora.cites"))
structural_data.columns = ["src", "dst"]
for c in ["src", "dst"]:
structural_data[c] = structural_data[c].astype(int)
paper_ids = operator.unique(operator.concatenate([
structural_data["src"].values,
structural_data["dst"].values,
]))
mapping = operator.empty(int(paper_ids.max()) + 1, dtype=int)
mapping[paper_ids] = operator.arange(len(paper_ids))
for c in ["src", "dst"]:
structural_data[c] = mapping[structural_data[c]]
graph_metadata = {
MetaData.NODES: [
{
MetaData.NAME: "paper",
MetaData.COUNT: len(tabular_data),
MetaData.FEATURES: node_features,
MetaData.FEATURES_PATH: "paper.parquet",
},
],
MetaData.EDGES: [{
MetaData.NAME: "cite",
MetaData.COUNT: len(structural_data),
MetaData.SRC_NODE_TYPE: "paper",
MetaData.DST_NODE_TYPE: "paper",
MetaData.DIRECTED: False,
MetaData.FEATURES: [],
MetaData.FEATURES_PATH: None,
MetaData.STRUCTURE_PATH: "cite_edge_list.parquet",
}]
}
shutil.rmtree(self.destination_path, ignore_errors=True)
os.makedirs(self.destination_path)
tabular_data.to_parquet(os.path.join(self.destination_path, "paper.parquet"))
structural_data.to_parquet(os.path.join(self.destination_path, "cite_edge_list.parquet"))
with open(os.path.join(self.destination_path, 'graph_metadata.json'), 'w') as f:
json.dump(graph_metadata, f, indent=4)
graph_metadata[MetaData.PATH] = self.destination_path
return SynGenDatasetFeatureSpec(graph_metadata)
def download(self):
log.info("downloading CORA dataset...")
cmds = [
fr"mkdir -p {self.source_path}",
fr"wget 'https://linqs-data.soe.ucsc.edu/public/lbc/cora.tgz' -P {self.source_path}",
fr"tar -xf {self.source_path}/cora.tgz -C {self.source_path}",
fr"sed -i 's/\t/,/g' {self.source_path}/cora/cora.cites",
fr"sed -i '1s/^/src,dst\n/' {self.source_path}/cora/cora.cites",
fr"mv {self.source_path}/cora/* {self.source_path}/.",
fr"rm -r {self.source_path}/cora",
]
for cmd in cmds:
try:
subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError as e:
raise Exception(e.output)
def _check_files(self):
files = ['cora.cites', 'cora.content']
return all(os.path.exists(os.path.join(self.source_path, file)) for file in files)
|
PyTorch/LanguageModeling/BERT/distillation | distillation | distillation_config_backbone | {"distillation": true,
"distillation_config": {"use_attention_scores": true,
"use_hidden_states": true,
"use_value_states": true,
"use_embedding_states": false,
"use_pred_states": false,
"attention_loss": "kld",
"hidden_state_loss": "cosine",
"embedding_state_loss": "cosine",
"value_state_loss": "kld",
"student_teacher_layer_mapping": "last_layer"}
}
|
TensorFlow2/Segmentation/UNet_Medical/examples | examples | unet_INFER | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script launches U-Net run in FP32 on 1 GPU for inference batch_size 1. Usage:
# bash unet_INFER.sh <path to this repository> <path to dataset> <path to results directory> <fold>
horovodrun -np 1 python main.py --data_dir $1 --model_dir $2 --batch_size 1 --exec_mode predict --xla --fold $3
|
PyTorch/Classification/GPUNet/triton/deployment_toolkit/triton_performance_runner/model_analyzer | model_analyzer | exceptions | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ModelAnalyzerException(Exception):
def __init__(self, message: str):
self._message = message
def __str__(self):
"""
Get the exception string representation.
Returns
-------
str
The message associated with this exception, or None if no message.
"""
return self._message
@property
def message(self):
"""
Get the exception message.
Returns
-------
str
The message associated with this exception, or None if no message.
"""
return self._message
|
TensorFlow/Segmentation/UNet_Medical/examples | examples | unet_TRAIN_8GPU | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script launches U-Net run in FP32 on 8 GPUs and runs 5-fold cross-validation training for 6400 iterations.
# Usage:
# bash unet_TRAIN_FP32_8GPU.sh <path to dataset> <path to results directory> <batch size>
horovodrun -np 8 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 0 --augment --xla > $2/log_FP32_8GPU_fold0.txt
horovodrun -np 8 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 1 --augment --xla > $2/log_FP32_8GPU_fold1.txt
horovodrun -np 8 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 2 --augment --xla > $2/log_FP32_8GPU_fold2.txt
horovodrun -np 8 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 3 --augment --xla > $2/log_FP32_8GPU_fold3.txt
horovodrun -np 8 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 4 --augment --xla > $2/log_FP32_8GPU_fold4.txt
python utils/parse_results.py --model_dir $2 --exec_mode convergence --env FP32_8GPU |
PyTorch/Recommendation/DLRM/tests/feature_specs | feature_specs | different_feature_names | channel_spec:
categorical:
- 65ytfg.bin
- dgtwrg.bin
- hmfgd.bin
- 6tyjgh.bin
- 67yu.bin
- l6rtd.bin
- ouikjhfg.bin
- 65ry.bin
- 5yhtrfg.bin
- 65rty.bin
- 34ywesh5rtg.bin
- w4su6js.bin
- 45wyhtr.bin
- u65rhty.bin
- tujy.bin
- tyjdh.bin
- ujtyesh.bin
- 5e7tdyj.bin
- 46rjydh.bin
- 8kiujynrht.bin
- fsgh.bin
- 34eyr.bin
- we5etydj.bin
- fsghfsdgh.bin
- hrthshs.bin
- tujyhfg.bin
label:
- qwer
numerical: &id001
- gadsfgsdfg
- 5yrthf
- 45ryhtf
- u5j6yrhtfd
- u5rtg3qq
- j65ee5he5
- yhe5h
- 4y5e6ru
- 5yfwerf
- g53g6y635
- 42c524
- bge5v6gve5
- jhw5rf
feature_spec:
65ytfg.bin:
cardinality: 100000
dtype: int32
dgtwrg.bin:
cardinality: 100000
dtype: int32
34ywesh5rtg.bin:
cardinality: 100000
dtype: int32
w4su6js.bin:
cardinality: 100000
dtype: int32
45wyhtr.bin:
cardinality: 100000
dtype: int32
u65rhty.bin:
cardinality: 100000
dtype: int32
tujy.bin:
cardinality: 100000
dtype: int32
tyjdh.bin:
cardinality: 100000
dtype: int32
ujtyesh.bin:
cardinality: 100000
dtype: int32
5e7tdyj.bin:
cardinality: 100000
dtype: int32
46rjydh.bin:
cardinality: 100000
dtype: int32
8kiujynrht.bin:
cardinality: 100000
dtype: int32
hmfgd.bin:
cardinality: 100000
dtype: int32
fsgh.bin:
cardinality: 100000
dtype: int32
34eyr.bin:
cardinality: 100000
dtype: int32
we5etydj.bin:
cardinality: 100000
dtype: int32
fsghfsdgh.bin:
cardinality: 100000
dtype: int32
hrthshs.bin:
cardinality: 100000
dtype: int32
tujyhfg.bin:
cardinality: 100000
dtype: int32
6tyjgh.bin:
cardinality: 100000
dtype: int32
67yu.bin:
cardinality: 100000
dtype: int32
l6rtd.bin:
cardinality: 100000
dtype: int32
ouikjhfg.bin:
cardinality: 100000
dtype: int32
65ry.bin:
cardinality: 100000
dtype: int32
5yhtrfg.bin:
cardinality: 100000
dtype: int32
65rty.bin:
cardinality: 100000
dtype: int32
qwer:
dtype: bool
gadsfgsdfg:
dtype: float16
5yrthf:
dtype: float16
42c524:
dtype: float16
bge5v6gve5:
dtype: float16
jhw5rf:
dtype: float16
45ryhtf:
dtype: float16
u5j6yrhtfd:
dtype: float16
u5rtg3qq:
dtype: float16
j65ee5he5:
dtype: float16
yhe5h:
dtype: float16
4y5e6ru:
dtype: float16
5yfwerf:
dtype: float16
g53g6y635:
dtype: float16
metadata: {}
source_spec:
test:
- features: *id001
files:
- test/numerical.bin
type: split_binary
- features:
- qwer
files:
- test/label.bin
type: split_binary
- features:
- 65ytfg.bin
files:
- test/65ytfg.bin
type: split_binary
- features:
- dgtwrg.bin
files:
- test/dgtwrg.bin
type: split_binary
- features:
- hmfgd.bin
files:
- test/hmfgd.bin
type: split_binary
- features:
- 6tyjgh.bin
files:
- test/6tyjgh.bin
type: split_binary
- features:
- 67yu.bin
files:
- test/67yu.bin
type: split_binary
- features:
- l6rtd.bin
files:
- test/l6rtd.bin
type: split_binary
- features:
- ouikjhfg.bin
files:
- test/ouikjhfg.bin
type: split_binary
- features:
- 65ry.bin
files:
- test/65ry.bin
type: split_binary
- features:
- 5yhtrfg.bin
files:
- test/5yhtrfg.bin
type: split_binary
- features:
- 65rty.bin
files:
- test/65rty.bin
type: split_binary
- features:
- 34ywesh5rtg.bin
files:
- test/34ywesh5rtg.bin
type: split_binary
- features:
- w4su6js.bin
files:
- test/w4su6js.bin
type: split_binary
- features:
- 45wyhtr.bin
files:
- test/45wyhtr.bin
type: split_binary
- features:
- u65rhty.bin
files:
- test/u65rhty.bin
type: split_binary
- features:
- tujy.bin
files:
- test/tujy.bin
type: split_binary
- features:
- tyjdh.bin
files:
- test/tyjdh.bin
type: split_binary
- features:
- ujtyesh.bin
files:
- test/ujtyesh.bin
type: split_binary
- features:
- 5e7tdyj.bin
files:
- test/5e7tdyj.bin
type: split_binary
- features:
- 46rjydh.bin
files:
- test/46rjydh.bin
type: split_binary
- features:
- 8kiujynrht.bin
files:
- test/8kiujynrht.bin
type: split_binary
- features:
- fsgh.bin
files:
- test/fsgh.bin
type: split_binary
- features:
- 34eyr.bin
files:
- test/34eyr.bin
type: split_binary
- features:
- we5etydj.bin
files:
- test/we5etydj.bin
type: split_binary
- features:
- fsghfsdgh.bin
files:
- test/fsghfsdgh.bin
type: split_binary
- features:
- hrthshs.bin
files:
- test/hrthshs.bin
type: split_binary
- features:
- tujyhfg.bin
files:
- test/tujyhfg.bin
type: split_binary
train:
- features: *id001
files:
- train/numerical.bin
type: split_binary
- features:
- qwer
files:
- train/label.bin
type: split_binary
- features:
- 65ytfg.bin
files:
- train/65ytfg.bin
type: split_binary
- features:
- dgtwrg.bin
files:
- train/dgtwrg.bin
type: split_binary
- features:
- hmfgd.bin
files:
- train/hmfgd.bin
type: split_binary
- features:
- 6tyjgh.bin
files:
- train/6tyjgh.bin
type: split_binary
- features:
- 67yu.bin
files:
- train/67yu.bin
type: split_binary
- features:
- l6rtd.bin
files:
- train/l6rtd.bin
type: split_binary
- features:
- ouikjhfg.bin
files:
- train/ouikjhfg.bin
type: split_binary
- features:
- 65ry.bin
files:
- train/65ry.bin
type: split_binary
- features:
- 5yhtrfg.bin
files:
- train/5yhtrfg.bin
type: split_binary
- features:
- 65rty.bin
files:
- train/65rty.bin
type: split_binary
- features:
- 34ywesh5rtg.bin
files:
- train/34ywesh5rtg.bin
type: split_binary
- features:
- w4su6js.bin
files:
- train/w4su6js.bin
type: split_binary
- features:
- 45wyhtr.bin
files:
- train/45wyhtr.bin
type: split_binary
- features:
- u65rhty.bin
files:
- train/u65rhty.bin
type: split_binary
- features:
- tujy.bin
files:
- train/tujy.bin
type: split_binary
- features:
- tyjdh.bin
files:
- train/tyjdh.bin
type: split_binary
- features:
- ujtyesh.bin
files:
- train/ujtyesh.bin
type: split_binary
- features:
- 5e7tdyj.bin
files:
- train/5e7tdyj.bin
type: split_binary
- features:
- 46rjydh.bin
files:
- train/46rjydh.bin
type: split_binary
- features:
- 8kiujynrht.bin
files:
- train/8kiujynrht.bin
type: split_binary
- features:
- fsgh.bin
files:
- train/fsgh.bin
type: split_binary
- features:
- 34eyr.bin
files:
- train/34eyr.bin
type: split_binary
- features:
- we5etydj.bin
files:
- train/we5etydj.bin
type: split_binary
- features:
- fsghfsdgh.bin
files:
- train/fsghfsdgh.bin
type: split_binary
- features:
- hrthshs.bin
files:
- train/hrthshs.bin
type: split_binary
- features:
- tujyhfg.bin
files:
- train/tujyhfg.bin
type: split_binary
|
PyTorch/Recommendation/DLRM/tests | tests | test_all_configs | #!/bin/bash
set -e
set -x
NAMES=${1:-'*.yaml'}
TARGET=feature_specs/${NAMES}
OPTIONS=${2-""}
for file in ${TARGET};
do
echo "${file}";
done
for fspec_file in ${TARGET};
do
SYNTH_DATA_DIR=/tmp/generated_data/${fspec_file}
# generate data based on fspec
python -m dlrm.scripts.prepare_synthetic_dataset --feature_spec ${fspec_file} --synthetic_dataset_dir ${SYNTH_DATA_DIR}
# train on the data
for mlp in True False;
do
for graphs in True;
do
for dot in cuda_dot dot;
do
for amp in True False;
do
python -m dlrm.scripts.main --mode train --dataset ${SYNTH_DATA_DIR} --optimized_mlp=${mlp} --cuda_graphs=${graphs} --interaction_op=${dot} --embedding_type=joint_sparse --amp=${amp}
#DGX-2
python -m torch.distributed.launch --no_python --use_env --nproc_per_node 8 bash -c "/workspace/dlrm/bind.sh --cpu=exclusive -- python -m dlrm.scripts.main --dataset ${SYNTH_DATA_DIR} --optimized_mlp=${mlp} --cuda_graphs=${graphs} --interaction_op=${dot} --embedding_type=joint_sparse --amp=${amp}"
#DGX A100
#python -m torch.distributed.launch --no_python --use_env --nproc_per_node 8 bash -c "/workspace/dlrm/bind.sh --cpu=/workspace/dlrm/dgxa100_ccx.sh --mem=/workspace/dlrm/dgxa100_ccx.sh python -m dlrm.scripts.main --dataset ${SYNTH_DATA_DIR} --optimized_mlp=${mlp} --cuda_graphs=${graphs} --interaction_op=${dot} --embedding_type=joint_sparse --amp=${amp}"
done;
done
done
done
# delete the data
rm -r ${SYNTH_DATA_DIR}
done
#
# usage:
# docker build . -t nvidia_dlrm_pyt
# docker run --security-opt seccomp=unconfined --runtime=nvidia -it --rm --ipc=host -v ${PWD}/data:/data nvidia_dlrm_pyt bash
# cd tests
# bash test_custom_dot.sh |
PyTorch/Detection/SSD/ssd | ssd | coco_pipeline | # Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ctypes
import time
import logging
import numpy as np
import torch
# DALI imports
import nvidia.dali as dali
from nvidia.dali.pipeline import Pipeline
from nvidia.dali.types import to_numpy_type
class COCOPipeline(Pipeline):
def __init__(self, batch_size, file_root, annotations_file, default_boxes,
device_id, num_shards,
output_fp16=False, output_nhwc=False, pad_output=False,
num_threads=1, seed=15):
super(COCOPipeline, self).__init__(batch_size=batch_size,
device_id=device_id,
num_threads=num_threads,
seed=seed)
if torch.distributed.is_initialized():
shard_id = torch.distributed.get_rank()
else:
shard_id = 0
# Data loader and image decoder
self.input = dali.ops.readers.COCO(file_root=file_root,
annotations_file=annotations_file,
shard_id=shard_id,
num_shards=num_shards,
ratio=True,
ltrb=True,
shuffle_after_epoch=True,
skip_empty=True)
self.decode_slice = dali.ops.decoders.ImageSlice(device="cpu",
output_type=dali.types.RGB)
# Augumentation techniques
## Random crop
self.crop = dali.ops.RandomBBoxCrop(device="cpu",
aspect_ratio=[0.5, 2.0],
thresholds=[0, 0.1, 0.3, 0.5, 0.7, 0.9],
scaling=[0.3, 1.0],
bbox_layout="xyXY",
allow_no_crop=True,
num_attempts=1)
## Color twist
self.hsv = dali.ops.Hsv(device="gpu",
dtype=dali.types.FLOAT) # use float to avoid clipping and quantizing the intermediate result
self.bc = dali.ops.BrightnessContrast(device="gpu",
contrast_center=128, # input is in the [0, 255] range
dtype=dali.types.UINT8)
## Cropping and normalization
dtype = dali.types.FLOAT16 if output_fp16 else dali.types.FLOAT
output_layout = dali.types.NHWC if output_nhwc else dali.types.NCHW
self.normalize = dali.ops.CropMirrorNormalize(
device="gpu",
crop=(300, 300),
mean=[0.0, 0.0, 0.0],
std=[255.0, 255.0, 255.0],
mirror=0,
dtype=dtype,
output_layout=output_layout,
pad_output=pad_output)
## Flipping
self.flip = dali.ops.Flip(device="cpu")
self.bbflip = dali.ops.BbFlip(device="cpu", ltrb=True)
# Resize
self.resize = dali.ops.Resize(device="cpu",
resize_x=300,
resize_y=300)
# Random variables
self.rng1 = dali.ops.random.Uniform(range=[0.5, 1.5])
self.rng2 = dali.ops.random.Uniform(range=[0.875, 1.125])
self.rng3 = dali.ops.random.Uniform(range=[-0.5, 0.5])
self.flip_coin = dali.ops.random.CoinFlip(probability=0.5)
# bbox encoder
self.anchors = default_boxes(order='ltrb').cpu().numpy().flatten().tolist()
self.box_encoder = dali.ops.BoxEncoder(device="cpu",
criteria=0.5,
anchors=self.anchors)
def define_graph(self):
saturation = self.rng1()
contrast = self.rng1()
brightness = self.rng2()
hue = self.rng3()
coin_rnd = self.flip_coin()
inputs, bboxes, labels = self.input(name="Reader")
crop_begin, crop_size, bboxes, labels = self.crop(bboxes, labels)
images = self.decode_slice(inputs, crop_begin, crop_size)
images = self.flip(images, horizontal=coin_rnd)
bboxes = self.bbflip(bboxes, horizontal=coin_rnd)
images = self.resize(images)
images = images.gpu()
images = self.hsv(images, hue=hue, saturation=saturation)
images = self.bc(images, brightness=brightness, contrast=contrast)
images = self.normalize(images)
bboxes, labels = self.box_encoder(bboxes, labels)
# bboxes and images and labels on GPU
return (images, bboxes.gpu(), labels.gpu())
to_torch_type = {
np.float32 : torch.float32,
np.float64 : torch.float64,
np.float16 : torch.float16,
np.uint8 : torch.uint8,
np.int8 : torch.int8,
np.int16 : torch.int16,
np.int32 : torch.int32,
np.int64 : torch.int64
}
def feed_ndarray(dali_tensor, arr):
"""
Copy contents of DALI tensor to pyTorch's Tensor.
Parameters
----------
`dali_tensor` : nvidia.dali.backend.TensorCPU or nvidia.dali.backend.TensorGPU
Tensor from which to copy
`arr` : torch.Tensor
Destination of the copy
"""
assert dali_tensor.shape() == list(arr.size()), \
("Shapes do not match: DALI tensor has size {0}"
", but PyTorch Tensor has size {1}".format(dali_tensor.shape(), list(arr.size())))
#turn raw int to a c void pointer
c_type_pointer = ctypes.c_void_p(arr.data_ptr())
dali_tensor.copy_to_external(c_type_pointer)
return arr
class DALICOCOIterator(object):
"""
COCO DALI iterator for pyTorch.
Parameters
----------
pipelines : list of nvidia.dali.pipeline.Pipeline
List of pipelines to use
size : int
Epoch size.
"""
def __init__(self, pipelines, size):
if not isinstance(pipelines, list):
pipelines = [pipelines]
self._num_gpus = len(pipelines)
assert pipelines is not None, "Number of provided pipelines has to be at least 1"
self.batch_size = pipelines[0].max_batch_size
self._size = size
self._pipes = pipelines
# Build all pipelines
for p in self._pipes:
p.build()
# Use double-buffering of data batches
self._data_batches = [[None, None, None, None] for i in range(self._num_gpus)]
self._counter = 0
self._current_data_batch = 0
self.output_map = ["image", "bboxes", "labels"]
# We need data about the batches (like shape information),
# so we need to run a single batch as part of setup to get that info
self._first_batch = None
self._first_batch = self.next()
def __next__(self):
if self._first_batch is not None:
batch = self._first_batch
self._first_batch = None
return batch
if self._counter > self._size:
raise StopIteration
# Gather outputs
outputs = []
for p in self._pipes:
p._prefetch()
for p in self._pipes:
outputs.append(p.share_outputs())
for i in range(self._num_gpus):
dev_id = self._pipes[i].device_id
out_images = []
bboxes = []
labels = []
# segregate outputs into image/labels/bboxes entries
for j, out in enumerate(outputs[i]):
if self.output_map[j] == "image":
out_images.append(out)
elif self.output_map[j] == "bboxes":
bboxes.append(out)
elif self.output_map[j] == "labels":
labels.append(out)
# Change DALI TensorLists into Tensors
images = [x.as_tensor() for x in out_images]
images_shape = [x.shape() for x in images]
# Prepare bboxes shapes
bboxes_shape = []
for j in range(len(bboxes)):
bboxes_shape.append([])
for k in range(len(bboxes[j])):
bboxes_shape[j].append(bboxes[j][k].shape())
# Prepare labels shapes and offsets
labels_shape = []
bbox_offsets = []
torch.cuda.synchronize()
for j in range(len(labels)):
labels_shape.append([])
bbox_offsets.append([0])
for k in range(len(labels[j])):
lshape = labels[j][k].shape()
bbox_offsets[j].append(bbox_offsets[j][k] + lshape[0])
labels_shape[j].append(lshape)
# We always need to alocate new memory as bboxes and labels varies in shape
images_torch_type = to_torch_type[to_numpy_type(images[0].dtype)]
bboxes_torch_type = to_torch_type[to_numpy_type(bboxes[0][0].dtype)]
labels_torch_type = to_torch_type[to_numpy_type(labels[0][0].dtype)]
torch_gpu_device = torch.device('cuda', dev_id)
torch_cpu_device = torch.device('cpu')
pyt_images = [torch.zeros(shape, dtype=images_torch_type, device=torch_gpu_device) for shape in images_shape]
pyt_bboxes = [[torch.zeros(shape, dtype=bboxes_torch_type, device=torch_gpu_device) for shape in shape_list] for shape_list in bboxes_shape]
pyt_labels = [[torch.zeros(shape, dtype=labels_torch_type, device=torch_gpu_device) for shape in shape_list] for shape_list in labels_shape]
pyt_offsets = [torch.zeros(len(offset), dtype=torch.int32, device=torch_cpu_device) for offset in bbox_offsets]
self._data_batches[i][self._current_data_batch] = (pyt_images, pyt_bboxes, pyt_labels, pyt_offsets)
# Copy data from DALI Tensors to torch tensors
for j, i_arr in enumerate(images):
feed_ndarray(i_arr, pyt_images[j])
for j, b_list in enumerate(bboxes):
for k in range(len(b_list)):
if (pyt_bboxes[j][k].shape[0] != 0):
feed_ndarray(b_list[k], pyt_bboxes[j][k])
pyt_bboxes[j] = torch.cat(pyt_bboxes[j])
for j, l_list in enumerate(labels):
for k in range(len(l_list)):
if (pyt_labels[j][k].shape[0] != 0):
feed_ndarray(l_list[k], pyt_labels[j][k])
pyt_labels[j] = torch.cat(pyt_labels[j])
for j in range(len(pyt_offsets)):
pyt_offsets[j] = torch.IntTensor(bbox_offsets[j])
for p in self._pipes:
p.release_outputs()
p.schedule_run()
copy_db_index = self._current_data_batch
# Change index for double buffering
self._current_data_batch = (self._current_data_batch + 1) % 2
self._counter += self._num_gpus * self.batch_size
return [db[copy_db_index] for db in self._data_batches]
def next(self):
"""
Returns the next batch of data.
"""
return self.__next__();
def __iter__(self):
return self
def reset(self):
"""
Resets the iterator after the full epoch.
DALI iterators do not support resetting before the end of the epoch
and will ignore such request.
"""
if self._counter > self._size:
self._counter = self._counter % self._size
else:
logging.warning("DALI iterator does not support resetting while epoch is not finished. Ignoring...")
|
PyTorch/SpeechSynthesis/FastPitch/notebooks | notebooks | FastPitch_voice_modification | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
# Copyright 2020 NVIDIA Corporation. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# # FastPitch: Voice Modification with Pre-defined Pitch Transformations
# The [FastPitch](https://arxiv.org/abs/2006.06873) model is based on the [FastSpeech](https://arxiv.org/abs/1905.09263) model. Similarly to [FastSpeech2](https://arxiv.org/abs/2006.04558), which has been developed concurrently, it learns to predict the pitch contour and conditions the generation on such contour.
#
# The simple mechanism of predicting the pitch on grapheme-level (rather than frame-level, as FastSpeech2 does) allows to easily alter the pitch during synthesis. FastPitch can thus change the perceived emotional state of the speaker, or slightly emphasise certain lexical units.
# ## Requirements
# Run the notebook inside the container. By default the container forwards port `8888`.
# ```
# bash scripts/docker/interactive.sh
#
# # inside the container
# cd notebooks
# jupyter notebook --ip='*' --port=8888
# ```
# Please refer the Requirement section in `README.md` for more details and running outside the container.
# In[ ]:
import os
assert os.getcwd().split('/')[-1] == 'notebooks'
# ## Generate audio samples
# Training a FastPitch model from scrath takes 3 to 27 hours depending on the type and number of GPUs, performance numbers can be found in Section "Training performance results" in `README.md`. Therefore, to save the time of running this notebook, we recommend to download the pretrained FastPitch checkpoints on NGC for inference.
#
# You can find FP32 checkpoint at [NGC](https://ngc.nvidia.com/catalog/models/nvidia:fastpitch_pyt_fp32_ckpt_v1/files) , and AMP (Automatic Mixed Precision) checkpoint at [NGC](https://ngc.nvidia.com/catalog/models/nvidia:fastpitch_pyt_amp_ckpt_v1/files).
#
# To synthesize audio, you will need a WaveGlow model, which generates waveforms based on mel-spectrograms generated by FastPitch.You can download a pre-trained WaveGlow AMP model at [NGC](https://ngc.nvidia.com/catalog/models/nvidia:waveglow256pyt_fp16).
# In[ ]:
get_ipython().system(' mkdir -p output')
get_ipython().system(" MODEL_DIR='../pretrained_models' ../scripts/download_fastpitch.sh")
get_ipython().system(" MODEL_DIR='../pretrained_models' ../scripts/download_waveglow.sh")
# You can perform inference using the respective checkpoints that are passed as `--fastpitch` and `--waveglow` arguments. Next, you will use FastPitch model to generate audio samples for input text, including the basic version and the variations i npace, fade out, and pitch transforms, etc.
# In[ ]:
import IPython
# store paths in aux variables
fastp = '../pretrained_models/fastpitch/nvidia_fastpitch_200518.pt'
waveg = '../pretrained_models/waveglow/waveglow_1076430_14000_amp.pt'
flags = f'--cuda --fastpitch {fastp} --waveglow {waveg} --wn-channels 256'
# ### 1. Basic speech synthesis
# You need to create an input file with some text, or just input the text in the below cell:
# In[ ]:
get_ipython().run_cell_magic('writefile', 'text.txt', 'The forms of printed letters should be beautiful, and that their arrangement on the page should be reasonable and a help to the shapeliness of the letters themselves.\n')
# Run the script below to generate audio from the input text file:
# In[ ]:
# basic systhesis
get_ipython().system('python ../inference.py {flags} -i text.txt -o output/original > /dev/null')
IPython.display.Audio("output/original/audio_0.wav")
# ### 2. Add variations to the generated speech
# FastPitch allows us to exert additional control over the synthesized utterances, the key parameters are the pace, fade out, and pitch transforms in particular.
# ### 2.1 Pace
# FastPitch allows you to linearly adjust the pace of synthesized speech, similar to [FastSpeech](https://arxiv.org/abs/1905.09263) model. For instance, pass --pace 0.5 for a twofold decrease in speed, --pace 1.0 = unchanged.
# In[ ]:
# Change the pace of speech to double with --pace 0.5
# (1.0 = unchanged)
get_ipython().system('python ../inference.py {flags} -i text.txt -o output/pace --pace 0.5 > /dev/null')
IPython.display.Audio("output/pace/audio_0.wav")
# ### 2.2 Raise or lower the pitch
# For every input character, the model predicts a pitch cue - an average pitch over a character in Hz. Pitch can be adjusted by transforming those pitch cues. A few simple examples are provided below.
# In[ ]:
# Raise/lower pitch by --pitch-transform-shift <Hz>
# Synthesize with a -50 Hz shift
get_ipython().system('python ../inference.py {flags} -i text.txt -o output/riselowpitch --pitch-transform-shift -50 > /dev/null')
IPython.display.Audio("output/riselowpitch/audio_0.wav")
# ### 2.3 Flatten the pitch
# In[ ]:
# Flatten the pitch to a constant value with --pitch-transform-flatten
get_ipython().system('python ../inference.py {flags} -i text.txt -o output/flattenpitch --pitch-transform-flatten > /dev/null')
IPython.display.Audio("output/flattenpitch/audio_0.wav")
# ### 2.4 Invert the pitch
# In[ ]:
# Invert pitch wrt. to the mean pitch with --pitch-transform-invert
get_ipython().system('python ../inference.py {flags} -i text.txt -o output/invertpitch --pitch-transform-invert > /dev/null')
IPython.display.Audio("output/invertpitch/audio_0.wav")
# ### 2.5 Amplify the pitch
# In[ ]:
# Amplify pitch wrt. to the mean pitch with --pitch-transform-amplify 2.0
# values in the (1.0, 3.0) range work the best
get_ipython().system('python ../inference.py {flags} -i text.txt -o output/amplifypitch --pitch-transform-amplify 2.0 > /dev/null')
IPython.display.Audio("output/amplifypitch/audio_0.wav")
# ### 2.6 Combine the flags
# The flags can be combined. You can find all the available options by calling python inference.py --help.
# In[ ]:
get_ipython().system('python ../inference.py --help')
# Below example shows how to generate an audio with a combination of the flags --pace --pitch-transform-flatten --pitch-transform-shift --pitch-transform-invert --pitch-transform-amplify
# In[ ]:
# Dobuble the speed and combine multiple transformations
get_ipython().system('python ../inference.py {flags} -i text.txt -o output/combine --pace 2.0 --pitch-transform-flatten --pitch-transform-shift 50 --pitch-transform-invert --pitch-transform-amplify 1.5 > /dev/null')
IPython.display.Audio("output/combine/audio_0.wav")
# ### 3. Inference performance benchmark
# In[ ]:
# Benchmark inference using AMP
get_ipython().system('python ../inference.py {flags} --include-warmup --batch-size 8 --repeats 100 --torchscript --amp -i ../phrases/benchmark_8_128.tsv -o output/benchmark')
# ### 4. Next step
# Now you have learnt how to generate high quality audio from text using FastPitch, as well as add variations to the audio using the flags. You can experiment with more input texts, or change the hyperparameters of the models, such as pitch flags, batch size, different precisions, etc, to see if they could improve the inference results.
#
# If you are interested in learning more about FastPitch, please check more samples (trained with multi-speaker) presented at [samples page](https://fastpitch.github.io/).
|
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/csrc/cuda | cuda | box_encode | /**
* Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <ATen/ATen.h>
#include <ATen/cuda/CUDAContext.h>
#include <THC/THC.h>
#include <torch/torch.h>
#include <vector>
#include <iostream>
__global__ void box_encode_kernel(float *targets_dx, float *targets_dy, float *targets_dw, float *targets_dh,
float4 *boxes, float4 *anchors, float wx, float wy, float ww, float wh,
size_t gt, size_t idxJump) {
int idx = blockIdx.x * blockDim.x + threadIdx.x;
size_t row_offset;
float anchors_x1, anchors_x2, anchors_y1, anchors_y2,
boxes_x1, boxes_x2, boxes_y1, boxes_y2, ex_w, ex_h,
ex_ctr_x, ex_ctr_y, gt_w, gt_h, gt_ctr_x, gt_ctr_y;
for (int i = idx; i < gt; i += idxJump){
row_offset = i;
anchors_x1 = anchors[row_offset].x;
anchors_y1 = anchors[row_offset].y;
anchors_x2 = anchors[row_offset].z;
anchors_y2 = anchors[row_offset].w;
boxes_x1 = boxes[row_offset].x;
boxes_y1 = boxes[row_offset].y;
boxes_x2 = boxes[row_offset].z;
boxes_y2 = boxes[row_offset].w;
ex_w = anchors_x2 - anchors_x1 + 1;
ex_h = anchors_y2 - anchors_y1 + 1;
ex_ctr_x = anchors_x1 + 0.5 * ex_w;
ex_ctr_y = anchors_y1 + 0.5 * ex_h;
gt_w = boxes_x2 - boxes_x1 + 1;
gt_h = boxes_y2 - boxes_y1 + 1;
gt_ctr_x = boxes_x1 + 0.5 * gt_w;
gt_ctr_y = boxes_y1 + 0.5 * gt_h;
targets_dx[i] = wx * (gt_ctr_x - ex_ctr_x) / ex_w;
targets_dy[i] = wy * (gt_ctr_y - ex_ctr_y) / ex_h;
targets_dw[i] = ww * log(gt_w / ex_w);
targets_dh[i] = wh * log(gt_h / ex_h);
}
}
std::vector<at::Tensor> box_encode_cuda(at::Tensor boxes, at::Tensor anchors, float wx, float wy, float ww, float wh){
int minGridSize;
int blockSize;
cudaOccupancyMaxPotentialBlockSize(&minGridSize,
&blockSize,
(void*) box_encode_kernel,
0, // dynamic memory
0); // maximum utilized threads
long size = boxes.size(0);
auto targets_dx = torch::ones({size}, torch::CUDA(at::kFloat));
auto targets_dy = torch::ones({size}, torch::CUDA(at::kFloat));
auto targets_dw = torch::ones({size}, torch::CUDA(at::kFloat));
auto targets_dh = torch::ones({size}, torch::CUDA(at::kFloat));
dim3 gridDim(minGridSize);
dim3 blockDim(blockSize);
int idxJump = minGridSize * blockSize;
auto stream = at::cuda::getCurrentCUDAStream();
box_encode_kernel<<<gridDim,blockDim,0,stream.stream()>>>(targets_dx.data_ptr<float>(),
targets_dy.data_ptr<float>(),
targets_dw.data_ptr<float>(),
targets_dh.data_ptr<float>(),
(float4*) boxes.data_ptr<float>(),
(float4*) anchors.data_ptr<float>(),
wx, wy, ww, wh,
size, idxJump);
std::vector<at::Tensor> result;
result.push_back(targets_dx);
result.push_back(targets_dy);
result.push_back(targets_dw);
result.push_back(targets_dh);
return result;
}
|
TensorFlow/Segmentation/UNet_Medical/examples | examples | unet_INFER | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script launches U-Net run in FP32 on 1 GPU for inference batch_size 1. Usage:
# bash unet_INFER_FP32.sh <path to this repository> <path to dataset> <path to results directory>
horovodrun -np 1 python main.py --data_dir $1 --model_dir $2 --batch_size 1 --exec_mode predict --xla
|
DGLPyTorch/DrugDiscovery/SE3Transformer/se3_transformer/model/layers | layers | attention | # Copyright (c) 2021-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES
# SPDX-License-Identifier: MIT
import dgl
import numpy as np
import torch
import torch.nn as nn
from dgl import DGLGraph
from dgl.ops import edge_softmax
from torch import Tensor
from typing import Dict, Optional, Union
from se3_transformer.model.fiber import Fiber
from se3_transformer.model.layers.convolution import ConvSE3, ConvSE3FuseLevel
from se3_transformer.model.layers.linear import LinearSE3
from se3_transformer.runtime.utils import degree_to_dim, aggregate_residual, unfuse_features
from torch.cuda.nvtx import range as nvtx_range
class AttentionSE3(nn.Module):
""" Multi-headed sparse graph self-attention (SE(3)-equivariant) """
def __init__(
self,
num_heads: int,
key_fiber: Fiber,
value_fiber: Fiber
):
"""
:param num_heads: Number of attention heads
:param key_fiber: Fiber for the keys (and also for the queries)
:param value_fiber: Fiber for the values
"""
super().__init__()
self.num_heads = num_heads
self.key_fiber = key_fiber
self.value_fiber = value_fiber
def forward(
self,
value: Union[Tensor, Dict[str, Tensor]], # edge features (may be fused)
key: Union[Tensor, Dict[str, Tensor]], # edge features (may be fused)
query: Dict[str, Tensor], # node features
graph: DGLGraph
):
with nvtx_range('AttentionSE3'):
with nvtx_range('reshape keys and queries'):
if isinstance(key, Tensor):
# case where features of all types are fused
key = key.reshape(key.shape[0], self.num_heads, -1)
# need to reshape queries that way to keep the same layout as keys
out = torch.cat([query[str(d)] for d in self.key_fiber.degrees], dim=-1)
query = out.reshape(list(query.values())[0].shape[0], self.num_heads, -1)
else:
# features are not fused, need to fuse and reshape them
key = self.key_fiber.to_attention_heads(key, self.num_heads)
query = self.key_fiber.to_attention_heads(query, self.num_heads)
with nvtx_range('attention dot product + softmax'):
# Compute attention weights (softmax of inner product between key and query)
edge_weights = dgl.ops.e_dot_v(graph, key, query).squeeze(-1)
edge_weights = edge_weights / np.sqrt(self.key_fiber.num_features)
edge_weights = edge_softmax(graph, edge_weights)
edge_weights = edge_weights[..., None, None]
with nvtx_range('weighted sum'):
if isinstance(value, Tensor):
# features of all types are fused
v = value.view(value.shape[0], self.num_heads, -1, value.shape[-1])
weights = edge_weights * v
feat_out = dgl.ops.copy_e_sum(graph, weights)
feat_out = feat_out.view(feat_out.shape[0], -1, feat_out.shape[-1]) # merge heads
out = unfuse_features(feat_out, self.value_fiber.degrees)
else:
out = {}
for degree, channels in self.value_fiber:
v = value[str(degree)].view(-1, self.num_heads, channels // self.num_heads,
degree_to_dim(degree))
weights = edge_weights * v
res = dgl.ops.copy_e_sum(graph, weights)
out[str(degree)] = res.view(-1, channels, degree_to_dim(degree)) # merge heads
return out
class AttentionBlockSE3(nn.Module):
""" Multi-headed sparse graph self-attention block with skip connection, linear projection (SE(3)-equivariant) """
def __init__(
self,
fiber_in: Fiber,
fiber_out: Fiber,
fiber_edge: Optional[Fiber] = None,
num_heads: int = 4,
channels_div: int = 2,
use_layer_norm: bool = False,
max_degree: bool = 4,
fuse_level: ConvSE3FuseLevel = ConvSE3FuseLevel.FULL,
low_memory: bool = False,
**kwargs
):
"""
:param fiber_in: Fiber describing the input features
:param fiber_out: Fiber describing the output features
:param fiber_edge: Fiber describing the edge features (node distances excluded)
:param num_heads: Number of attention heads
:param channels_div: Divide the channels by this integer for computing values
:param use_layer_norm: Apply layer normalization between MLP layers
:param max_degree: Maximum degree used in the bases computation
:param fuse_level: Maximum fuse level to use in TFN convolutions
"""
super().__init__()
if fiber_edge is None:
fiber_edge = Fiber({})
self.fiber_in = fiber_in
# value_fiber has same structure as fiber_out but #channels divided by 'channels_div'
value_fiber = Fiber([(degree, channels // channels_div) for degree, channels in fiber_out])
# key_query_fiber has the same structure as fiber_out, but only degrees which are in in_fiber
# (queries are merely projected, hence degrees have to match input)
key_query_fiber = Fiber([(fe.degree, fe.channels) for fe in value_fiber if fe.degree in fiber_in.degrees])
self.to_key_value = ConvSE3(fiber_in, value_fiber + key_query_fiber, pool=False, fiber_edge=fiber_edge,
use_layer_norm=use_layer_norm, max_degree=max_degree, fuse_level=fuse_level,
allow_fused_output=True, low_memory=low_memory)
self.to_query = LinearSE3(fiber_in, key_query_fiber)
self.attention = AttentionSE3(num_heads, key_query_fiber, value_fiber)
self.project = LinearSE3(value_fiber + fiber_in, fiber_out)
def forward(
self,
node_features: Dict[str, Tensor],
edge_features: Dict[str, Tensor],
graph: DGLGraph,
basis: Dict[str, Tensor]
):
with nvtx_range('AttentionBlockSE3'):
with nvtx_range('keys / values'):
fused_key_value = self.to_key_value(node_features, edge_features, graph, basis)
key, value = self._get_key_value_from_fused(fused_key_value)
with nvtx_range('queries'):
query = self.to_query(node_features)
z = self.attention(value, key, query, graph)
z_concat = aggregate_residual(node_features, z, 'cat')
return self.project(z_concat)
def _get_key_value_from_fused(self, fused_key_value):
# Extract keys and queries features from fused features
if isinstance(fused_key_value, Tensor):
# Previous layer was a fully fused convolution
value, key = torch.chunk(fused_key_value, chunks=2, dim=-2)
else:
key, value = {}, {}
for degree, feat in fused_key_value.items():
if int(degree) in self.fiber_in.degrees:
value[degree], key[degree] = torch.chunk(feat, chunks=2, dim=-2)
else:
value[degree] = feat
return key, value
|
PyTorch/Classification/GPUNet/triton/deployment_toolkit/triton_performance_runner/perf_analyzer | perf_analyzer | warmup | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import pathlib
from distutils.version import LooseVersion
from importlib.metadata import version
from typing import List, Optional
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from ...core import EvaluationMode, MeasurementMode, OfflineMode
from ...utils import parse_server_url
from .perf_analyzer import PerfAnalyzer
from .perf_config import PerfAnalyzerConfig
LOGGER = logging.getLogger("warmup")
TRITON_CLIENT_VERSION = LooseVersion(version("tritonclient"))
class PerfAnalyzerWarmupRunner:
def __init__(
self,
server_url: str,
model_name: str,
batch_sizes: List[int],
concurrency: List[int],
input_data: str,
input_shapes: List[str],
measurement_mode: MeasurementMode,
measurement_interval: int,
measurement_request_count: int,
offline_mode: OfflineMode,
evaluation_mode: EvaluationMode,
output_shared_memory_size: int,
timeout: Optional[int],
):
self._model_name = model_name
self._input_data = input_data
self._input_shapes = input_shapes
self._measurement_mode = measurement_mode
self._offline_mode = offline_mode
self._evaluation_mode = evaluation_mode
self._output_shared_memory_size = output_shared_memory_size
self._protocol, self._host, self._port = parse_server_url(server_url)
self._measurement_interval = 2 * measurement_interval
self._measurement_request_count = 2 * measurement_request_count
self._batch_sizes = [min(batch_sizes)]
self._concurrency = [max(concurrency)]
self._timeout = timeout
def run(self):
for batch_size in self._batch_sizes:
for concurrency in self._concurrency:
params = {
"model-name": self._model_name,
"model-version": 1,
"batch-size": batch_size,
"url": f"{self._host}:{self._port}",
"protocol": self._protocol.value,
"input-data": self._input_data,
"measurement-interval": self._measurement_interval,
"concurrency-range": f"{concurrency}:{concurrency}:1",
"verbose": True,
}
if TRITON_CLIENT_VERSION >= LooseVersion("2.11.0"):
params["measurement-mode"] = self._measurement_mode.value
params["measurement-request-count"] = self._measurement_request_count
if self._evaluation_mode == EvaluationMode.OFFLINE:
params["shared-memory"] = self._offline_mode.value
params["output-shared-memory-size"] = self._output_shared_memory_size
config = PerfAnalyzerConfig()
for param, value in params.items():
config[param] = value
for shape in self._input_shapes:
config["shape"] = shape
perf_analyzer = PerfAnalyzer(config=config, timeout=self._timeout)
perf_analyzer.run()
|
TensorFlow/Classification/ConvNets/resnext101-32x4d/training | training | DGX2_RNxt101-32x4d_FP32_250E | #!/bin/bash
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
WORKSPACE=${1:-"/workspace/rn50v15_tf"}
DATA_DIR=${2:-"/data"}
OTHER=${@:3}
if [[ ! -z "${BIND_TO_SOCKET}" ]]; then
BIND_TO_SOCKET="--bind-to socket"
fi
mpiexec --allow-run-as-root ${BIND_TO_SOCKET} -np 8 python3 main.py --arch=resnext101-32x4d \
--mode=train_and_evaluate --iter_unit=epoch --num_iter=250 --mixup=0.2 \
--batch_size=64 --warmup_steps=100 --cosine_lr --label_smoothing 0.1 \
--lr_init=0.256 --lr_warmup_epochs=8 --momentum=0.875 --weight_decay=6.103515625e-05 \
--data_dir=${DATA_DIR}/tfrecords --data_idx_dir=${DATA_DIR}/dali_idx \
--results_dir=${WORKSPACE}/results --weight_init=fan_in ${OTHER}
|
TensorFlow/Classification/ConvNets/resnet50v1.5/training | training | DGX1_RN50_AMP_250E | #!/bin/bash
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
WORKSPACE=${1:-"/workspace/rn50v15_tf"}
DATA_DIR=${2:-"/data"}
OTHER=${@:3}
if [[ ! -z "${BIND_TO_SOCKET}" ]]; then
BIND_TO_SOCKET="--bind-to socket"
fi
mpiexec --allow-run-as-root ${BIND_TO_SOCKET} -np 8 python3 main.py --arch=resnet50 \
--mode=train_and_evaluate --iter_unit=epoch --num_iter=250 --mixup=0.2 \
--batch_size=256 --warmup_steps=100 --cosine_lr --label_smoothing 0.1 \
--lr_init=0.256 --lr_warmup_epochs=8 --momentum=0.875 --weight_decay=3.0517578125e-05 \
--amp --static_loss_scale 128 \
--data_dir=${DATA_DIR}/tfrecords --data_idx_dir=${DATA_DIR}/dali_idx \
--results_dir=${WORKSPACE}/results --weight_init=fan_in ${OTHER}
|
PyTorch/SpeechRecognition/wav2vec2/common | common | metrics | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from collections import defaultdict
from copy import copy
import numpy as np
import torch
from common.utils import all_reduce_cpu_scalars, print_once
def __levenshtein(a, b):
"""Calculates the Levenshtein distance between two sequences."""
n, m = len(a), len(b)
if n > m:
# Make sure n <= m, to use O(min(n,m)) space
a, b = b, a
n, m = m, n
current = list(range(n + 1))
for i in range(1, m + 1):
previous, current = current, [i] + [0] * n
for j in range(1, n + 1):
add, delete = previous[j] + 1, current[j - 1] + 1
change = previous[j - 1]
if a[j - 1] != b[i - 1]:
change = change + 1
current[j] = min(add, delete, change)
return current[n]
def word_error_rate(hypotheses, references):
"""Computes average Word Error Rate (WER) between two text lists."""
scores = 0
words = 0
len_diff = len(references) - len(hypotheses)
if len_diff > 0:
raise ValueError("Uneqal number of hypthoses and references: "
"{0} and {1}".format(len(hypotheses), len(references)))
elif len_diff < 0:
hypotheses = hypotheses[:len_diff]
for h, r in zip(hypotheses, references):
h_list = h.split()
r_list = r.split()
words += len(r_list)
scores += __levenshtein(h_list, r_list)
if words != 0:
wer = 1.0*scores/words
else:
wer = float('inf')
return wer, scores, words
class MetricsAggregator:
def __init__(self, scopes=('train', 'train_avg'),
dllogger_keys=(),
benchmark_keys=(),
benchmark_epochs=0,
reduce_mean=(),
reduce_last=(),
group_tb_entries=False,
cuda=True):
"""
Args:
scopes: possible scopes of metrics accumulation
dll_keys: metrics to log with dllogger
benchmark_keys: metrics to log as benchmark metrics
benchmark_epochs: num of last epochs to benchmark
"""
super().__init__()
self.dll_keys = dllogger_keys
self.partials = defaultdict(float)
self.partial_counts = defaultdict(int)
self.accum_reductions = defaultdict(lambda: 'sum')
self.accum_reductions.update({k: 'mean' for k in reduce_mean})
self.accum_reductions.update({k: 'last' for k in reduce_last})
self.metrics = {scope: defaultdict(float) for scope in scopes}
self.metric_counts = {scope: defaultdict(int) for scope in scopes}
self.start_time = {scope: None for scope in scopes}
self.done_accumulating = {scope: True for scope in scopes}
self.benchmark_epochs = benchmark_epochs
self.metrics['train_benchmark'] = defaultdict(list)
self.benchmark_keys = benchmark_keys
self.scopes = scopes
self.group_tb_entries = group_tb_entries
self.cuda = cuda
def log_scalar(self, key, val, accum_reduction=None):
"""Main primitive for logging partial metrics from single batch.
NOTE: Assumption: `log_scalar` cannot be called with different
`accum_reduction` for the same `key`. This results in undefined behavior
Args:
key: metric key
val: metric value
accum_reduction: defines how to accumulate given metric:
- 'sum': sums metrics across grad acc and devices batches
- 'mean': same as 'sum' but with averaging
- 'last': overwrites previous accumulated values. Useful for
logging metric once in a grad acc batch, e.g. learning rate.
If None, a default value is fetched from self.accum_reductions.
If not None, overwrites defaults in self.accum_reductions
"""
if accum_reduction is None:
accum_reduction = self.accum_reductions[key]
else:
self.accum_reductions[key] = accum_reduction
if accum_reduction == 'sum':
self.partials[key] += val
self.partial_counts[key] = 1
elif accum_reduction == 'mean':
self.partials[key] += val
self.partial_counts[key] += 1
elif accum_reduction == 'last':
self.partials[key] = val # overwrite accumulation
self.partial_counts[key] = 1
else:
raise ValueError(accum_reduction)
def log_scalars(self, scalars_dict, accum_reduction=None):
""" Log whole dict of metrics at once """
for k, v in scalars_dict.items():
self.log_scalar(k, v, accum_reduction)
def __setitem__(self, key, val):
""" Convenience logging method. Use sparingly (see NOTE below).
Uses 'last' aggregation and extracts tensors.
Example:
>>> metrics['lr'] = optim.param_groups[0]['lr']
NOTE: `metrics['lr'] = ...` is very different
from `metrics.partial['lr'] = ...`
"""
extract = lambda t: t.item() if type(t) is torch.Tensor else t
if type(val) is dict:
for k, v in val.items():
self.log_scalar(k, extract(v), 'last')
else:
self.log_scalar(key, extract(val), 'last')
def accumulate(self, scopes=None):
""" Accumulates partial metrics in metrics for given scopes.
Defines boundaries of accum_reduction in `log_scalar` method.
Intended to run after each gradient accumulation adjusted iteration.
"""
scopes = scopes if scopes is not None else self.scopes
for scope in scopes:
for k, v in self.partials.items():
self.metrics[scope][k] += v
self.metric_counts[scope][k] += self.partial_counts.get(k, 1)
self.partials.clear()
self.partial_counts.clear()
def all_reduce(self, world_size):
""" Reduce metrics across devices.
Currently assumes that all metrics are float scalars.
After reducing, `log_scalar` method with accumulation other than 'last'
shouldn't be called prior to calling `accumulate`.
"""
if world_size == 1:
return
self.partials = defaultdict(float,
all_reduce_cpu_scalars(self.partials))
for k, v in self.partials.items():
if self.accum_reductions[k] in ('mean', 'last'):
self.partial_counts[k] *= (world_size - self.partials.get('ignore', 0))
if self.partials.get('ignore', 0) > 0:
assert self.accum_reductions[k] == 'mean'
print_once(f'reducing with world size {world_size - self.partials.get("ignore", 0)}')
def start_iter(self, iter):
self._start_accumulating(iter, True, 'train')
def start_epoch(self, epoch):
if self.cuda:
torch.cuda.synchronize()
self._start_accumulating(epoch, True, 'train_avg')
def start_val(self):
if self.cuda:
torch.cuda.synchronize()
self._start_accumulating(None, True, 'val')
def finish_iter(self):
self._accumulate_time('train')
def finish_logging_interval(self):
self._finish_accumulating('train')
def finish_epoch(self):
if self.cuda:
torch.cuda.synchronize()
self._accumulate_time('train_avg')
self._finish_accumulating('train_avg')
metr = self.metrics['train_benchmark']
for k in self.benchmark_keys:
metr[k].append(self.metrics['train_avg'][k])
if len(metr[k]) > self.benchmark_epochs:
metr[k].pop(0)
def finish_val(self, scope='val'):
if self.cuda:
torch.cuda.synchronize()
self._accumulate_time(scope)
self._finish_accumulating(scope)
def get_metrics(self, scope='train', target='dll'):
if scope == 'train_benchmark':
metr = self.metrics[scope]
ret = {'train_avg_' + k: np.mean(v) for k, v in metr.items()}
ret['benchmark_epochs_num'] = len(list(metr.values())[0])
return ret
assert self.done_accumulating[scope]
ret = copy(self.metrics[scope])
if target == 'dll':
ret = {f'{scope}_{k}': v
for k, v in ret.items() if k in self.dll_keys}
elif target == 'tb' and self.group_tb_entries:
# Rename keys so they would group nicely inside TensorBoard
def split_key(k):
pos = k.rfind('_')
return k[:pos] + '/' + k[pos+1:] if pos >= 0 else k
ret = {split_key(k): v for k, v in ret.items()}
return ret
def _start_accumulating(self, step, start_timer=True, scope='train'):
del step # unused
assert not self.partials, 'metrics.accumulate call missed'
assert not self.partial_counts, 'metrics.accumulate call missed'
if self.done_accumulating[scope]:
self.metrics[scope].clear()
self.metric_counts[scope].clear()
if start_timer:
self.start_time[scope] = time.time()
self.done_accumulating[scope] = False
def _finish_accumulating(self, scope='train'):
assert not self.done_accumulating[scope]
metr = self.metrics[scope]
counts = self.metric_counts[scope]
for k, v in metr.items():
metr[k] = v / counts[k]
self.done_accumulating[scope] = True
def _accumulate_time(self, scope='train'):
assert not self.done_accumulating[scope]
took = time.time() - self.start_time[scope]
self.start_time[scope] = None
self.metrics[scope]['took'] += took
self.metric_counts[scope]['took'] = 1 # not +=
|
PyTorch/Segmentation/MaskRCNN/pytorch/tools | tools | test_net | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
# Set up custom environment before nearly anything else is imported
# NOTE: this should be the first import (no not reorder)
from maskrcnn_benchmark.utils.env import setup_environment # noqa F401 isort:skip
import argparse
import os
import torch
from maskrcnn_benchmark.config import cfg
from maskrcnn_benchmark.data import make_data_loader
from maskrcnn_benchmark.engine.inference import inference
from maskrcnn_benchmark.modeling.detector import build_detection_model
from maskrcnn_benchmark.utils.checkpoint import DetectronCheckpointer
from maskrcnn_benchmark.utils.collect_env import collect_env_info
from maskrcnn_benchmark.utils.comm import synchronize, get_rank, is_main_process
from maskrcnn_benchmark.utils.logger import setup_logger
from maskrcnn_benchmark.utils.miscellaneous import mkdir
from maskrcnn_benchmark.utils.logger import format_step
import dllogger
def main():
parser = argparse.ArgumentParser(description="PyTorch Object Detection Inference")
parser.add_argument(
"--config-file",
default="/workspace/object_detection/configs/e2e_mask_rcnn_R_50_FPN_1x.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument("--local_rank", type=int, default=os.getenv('LOCAL_RANK', 0))
parser.add_argument("--json-summary",
help="Out file for DLLogger",
default="dllogger_inference.out",
type=str)
parser.add_argument(
"--skip-eval",
dest="skip_eval",
help="Do not eval the predictions",
action="store_true",
)
parser.add_argument(
"--fp16",
help="Mixed precision training",
action="store_true",
)
parser.add_argument(
"--amp",
help="Mixed precision training",
action="store_true",
)
parser.add_argument(
"--infer_steps",
help="Total inference steps",
default=-1,
type=int)
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
args.fp16 = args.fp16 or args.amp
num_gpus = int(os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1
distributed = num_gpus > 1
if distributed:
torch.cuda.set_device(args.local_rank)
torch.distributed.init_process_group(
backend="nccl", init_method="env://"
)
synchronize()
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
save_dir = ""
logger = setup_logger("maskrcnn_benchmark", save_dir, get_rank())
if is_main_process():
dllogger.init(backends=[dllogger.JSONStreamBackend(verbosity=dllogger.Verbosity.VERBOSE,
filename=args.json_summary),
dllogger.StdOutBackend(verbosity=dllogger.Verbosity.VERBOSE, step_format=format_step)])
else:
dllogger.init(backends=[])
dllogger.metadata("BBOX_mAP", {"unit": None})
dllogger.metadata("MASK_mAP", {"unit": None})
dllogger.metadata("e2e_infer_time", {"unit": "s"})
dllogger.metadata("inference_perf_fps", {"unit": "images/s"})
dllogger.metadata("latency_avg", {"unit": "s"})
dllogger.metadata("latency_90", {"unit": "s"})
dllogger.metadata("latency_95", {"unit": "s"})
dllogger.metadata("latency_99", {"unit": "s"})
save_dir = ""
dllogger.log(step="PARAMETER", data={"config":cfg})
dllogger.log(step="PARAMETER", data={"gpu_count": num_gpus})
# dllogger.log(step="PARAMETER", data={"env_info": collect_env_info()})
model = build_detection_model(cfg)
model.to(cfg.MODEL.DEVICE)
# Initialize mixed-precision
if args.fp16:
use_mixed_precision = True
else:
use_mixed_precision = cfg.DTYPE == "float16"
output_dir = cfg.OUTPUT_DIR
checkpointer = DetectronCheckpointer(cfg, model, save_dir=output_dir)
_ = checkpointer.load(cfg.MODEL.WEIGHT)
iou_types = ("bbox",)
if cfg.MODEL.MASK_ON:
iou_types = iou_types + ("segm",)
output_folders = [None] * len(cfg.DATASETS.TEST)
dataset_names = cfg.DATASETS.TEST
if cfg.OUTPUT_DIR:
for idx, dataset_name in enumerate(dataset_names):
output_folder = os.path.join(cfg.OUTPUT_DIR, "inference", dataset_name)
mkdir(output_folder)
output_folders[idx] = output_folder
data_loaders_val = make_data_loader(cfg, is_train=False, is_distributed=distributed)
results = []
for output_folder, dataset_name, data_loader_val in zip(output_folders, dataset_names, data_loaders_val):
if use_mixed_precision:
with torch.cuda.amp.autocast():
result = inference(
model,
data_loader_val,
dataset_name=dataset_name,
iou_types=iou_types,
box_only=cfg.MODEL.RPN_ONLY,
device=cfg.MODEL.DEVICE,
expected_results=cfg.TEST.EXPECTED_RESULTS,
expected_results_sigma_tol=cfg.TEST.EXPECTED_RESULTS_SIGMA_TOL,
output_folder=output_folder,
skip_eval=args.skip_eval,
dllogger=dllogger,
steps=args.infer_steps
)
else:
result = inference(
model,
data_loader_val,
dataset_name=dataset_name,
iou_types=iou_types,
box_only=cfg.MODEL.RPN_ONLY,
device=cfg.MODEL.DEVICE,
expected_results=cfg.TEST.EXPECTED_RESULTS,
expected_results_sigma_tol=cfg.TEST.EXPECTED_RESULTS_SIGMA_TOL,
output_folder=output_folder,
skip_eval=args.skip_eval,
dllogger=dllogger,
steps=args.infer_steps
)
synchronize()
results.append(result)
if is_main_process() and not args.skip_eval:
map_results, raw_results = results[0]
bbox_map = map_results.results["bbox"]['AP']
segm_map = map_results.results["segm"]['AP']
dllogger.log(step=tuple(), data={"BBOX_mAP": bbox_map, "MASK_mAP": segm_map})
if __name__ == "__main__":
main()
dllogger.log(step=tuple(), data={})
|
TensorFlow/Detection/SSD/models/research/object_detection/dataset_tools | dataset_tools | create_kitti_tf_record | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Convert raw KITTI detection dataset to TFRecord for object_detection.
Converts KITTI detection dataset to TFRecords with a standard format allowing
to use this dataset to train object detectors. The raw dataset can be
downloaded from:
http://kitti.is.tue.mpg.de/kitti/data_object_image_2.zip.
http://kitti.is.tue.mpg.de/kitti/data_object_label_2.zip
Permission can be requested at the main website.
KITTI detection dataset contains 7481 training images. Using this code with
the default settings will set aside the first 500 images as a validation set.
This can be altered using the flags, see details below.
Example usage:
python object_detection/dataset_tools/create_kitti_tf_record.py \
--data_dir=/home/user/kitti \
--output_path=/home/user/kitti.record
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import hashlib
import io
import os
import numpy as np
import PIL.Image as pil
import tensorflow as tf
from object_detection.utils import dataset_util
from object_detection.utils import label_map_util
from object_detection.utils.np_box_ops import iou
tf.app.flags.DEFINE_string('data_dir', '', 'Location of root directory for the '
'data. Folder structure is assumed to be:'
'<data_dir>/training/label_2 (annotations) and'
'<data_dir>/data_object_image_2/training/image_2'
'(images).')
tf.app.flags.DEFINE_string('output_path', '', 'Path to which TFRecord files'
'will be written. The TFRecord with the training set'
'will be located at: <output_path>_train.tfrecord.'
'And the TFRecord with the validation set will be'
'located at: <output_path>_val.tfrecord')
tf.app.flags.DEFINE_string('classes_to_use', 'car,pedestrian,dontcare',
'Comma separated list of class names that will be'
'used. Adding the dontcare class will remove all'
'bboxs in the dontcare regions.')
tf.app.flags.DEFINE_string('label_map_path', 'data/kitti_label_map.pbtxt',
'Path to label map proto.')
tf.app.flags.DEFINE_integer('validation_set_size', '500', 'Number of images to'
'be used as a validation set.')
FLAGS = tf.app.flags.FLAGS
def convert_kitti_to_tfrecords(data_dir, output_path, classes_to_use,
label_map_path, validation_set_size):
"""Convert the KITTI detection dataset to TFRecords.
Args:
data_dir: The full path to the unzipped folder containing the unzipped data
from data_object_image_2 and data_object_label_2.zip.
Folder structure is assumed to be: data_dir/training/label_2 (annotations)
and data_dir/data_object_image_2/training/image_2 (images).
output_path: The path to which TFRecord files will be written. The TFRecord
with the training set will be located at: <output_path>_train.tfrecord
And the TFRecord with the validation set will be located at:
<output_path>_val.tfrecord
classes_to_use: List of strings naming the classes for which data should be
converted. Use the same names as presented in the KIITI README file.
Adding dontcare class will remove all other bounding boxes that overlap
with areas marked as dontcare regions.
label_map_path: Path to label map proto
validation_set_size: How many images should be left as the validation set.
(Ffirst `validation_set_size` examples are selected to be in the
validation set).
"""
label_map_dict = label_map_util.get_label_map_dict(label_map_path)
train_count = 0
val_count = 0
annotation_dir = os.path.join(data_dir,
'training',
'label_2')
image_dir = os.path.join(data_dir,
'data_object_image_2',
'training',
'image_2')
train_writer = tf.python_io.TFRecordWriter('%s_train.tfrecord'%
output_path)
val_writer = tf.python_io.TFRecordWriter('%s_val.tfrecord'%
output_path)
images = sorted(tf.gfile.ListDirectory(image_dir))
for img_name in images:
img_num = int(img_name.split('.')[0])
is_validation_img = img_num < validation_set_size
img_anno = read_annotation_file(os.path.join(annotation_dir,
str(img_num).zfill(6)+'.txt'))
image_path = os.path.join(image_dir, img_name)
# Filter all bounding boxes of this frame that are of a legal class, and
# don't overlap with a dontcare region.
# TODO(talremez) filter out targets that are truncated or heavily occluded.
annotation_for_image = filter_annotations(img_anno, classes_to_use)
example = prepare_example(image_path, annotation_for_image, label_map_dict)
if is_validation_img:
val_writer.write(example.SerializeToString())
val_count += 1
else:
train_writer.write(example.SerializeToString())
train_count += 1
train_writer.close()
val_writer.close()
def prepare_example(image_path, annotations, label_map_dict):
"""Converts a dictionary with annotations for an image to tf.Example proto.
Args:
image_path: The complete path to image.
annotations: A dictionary representing the annotation of a single object
that appears in the image.
label_map_dict: A map from string label names to integer ids.
Returns:
example: The converted tf.Example.
"""
with tf.gfile.GFile(image_path, 'rb') as fid:
encoded_png = fid.read()
encoded_png_io = io.BytesIO(encoded_png)
image = pil.open(encoded_png_io)
image = np.asarray(image)
key = hashlib.sha256(encoded_png).hexdigest()
width = int(image.shape[1])
height = int(image.shape[0])
xmin_norm = annotations['2d_bbox_left'] / float(width)
ymin_norm = annotations['2d_bbox_top'] / float(height)
xmax_norm = annotations['2d_bbox_right'] / float(width)
ymax_norm = annotations['2d_bbox_bottom'] / float(height)
difficult_obj = [0]*len(xmin_norm)
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': dataset_util.int64_feature(height),
'image/width': dataset_util.int64_feature(width),
'image/filename': dataset_util.bytes_feature(image_path.encode('utf8')),
'image/source_id': dataset_util.bytes_feature(image_path.encode('utf8')),
'image/key/sha256': dataset_util.bytes_feature(key.encode('utf8')),
'image/encoded': dataset_util.bytes_feature(encoded_png),
'image/format': dataset_util.bytes_feature('png'.encode('utf8')),
'image/object/bbox/xmin': dataset_util.float_list_feature(xmin_norm),
'image/object/bbox/xmax': dataset_util.float_list_feature(xmax_norm),
'image/object/bbox/ymin': dataset_util.float_list_feature(ymin_norm),
'image/object/bbox/ymax': dataset_util.float_list_feature(ymax_norm),
'image/object/class/text': dataset_util.bytes_list_feature(
[x.encode('utf8') for x in annotations['type']]),
'image/object/class/label': dataset_util.int64_list_feature(
[label_map_dict[x] for x in annotations['type']]),
'image/object/difficult': dataset_util.int64_list_feature(difficult_obj),
'image/object/truncated': dataset_util.float_list_feature(
annotations['truncated']),
'image/object/alpha': dataset_util.float_list_feature(
annotations['alpha']),
'image/object/3d_bbox/height': dataset_util.float_list_feature(
annotations['3d_bbox_height']),
'image/object/3d_bbox/width': dataset_util.float_list_feature(
annotations['3d_bbox_width']),
'image/object/3d_bbox/length': dataset_util.float_list_feature(
annotations['3d_bbox_length']),
'image/object/3d_bbox/x': dataset_util.float_list_feature(
annotations['3d_bbox_x']),
'image/object/3d_bbox/y': dataset_util.float_list_feature(
annotations['3d_bbox_y']),
'image/object/3d_bbox/z': dataset_util.float_list_feature(
annotations['3d_bbox_z']),
'image/object/3d_bbox/rot_y': dataset_util.float_list_feature(
annotations['3d_bbox_rot_y']),
}))
return example
def filter_annotations(img_all_annotations, used_classes):
"""Filters out annotations from the unused classes and dontcare regions.
Filters out the annotations that belong to classes we do now wish to use and
(optionally) also removes all boxes that overlap with dontcare regions.
Args:
img_all_annotations: A list of annotation dictionaries. See documentation of
read_annotation_file for more details about the format of the annotations.
used_classes: A list of strings listing the classes we want to keep, if the
list contains "dontcare", all bounding boxes with overlapping with dont
care regions will also be filtered out.
Returns:
img_filtered_annotations: A list of annotation dictionaries that have passed
the filtering.
"""
img_filtered_annotations = {}
# Filter the type of the objects.
relevant_annotation_indices = [
i for i, x in enumerate(img_all_annotations['type']) if x in used_classes
]
for key in img_all_annotations.keys():
img_filtered_annotations[key] = (
img_all_annotations[key][relevant_annotation_indices])
if 'dontcare' in used_classes:
dont_care_indices = [i for i,
x in enumerate(img_filtered_annotations['type'])
if x == 'dontcare']
# bounding box format [y_min, x_min, y_max, x_max]
all_boxes = np.stack([img_filtered_annotations['2d_bbox_top'],
img_filtered_annotations['2d_bbox_left'],
img_filtered_annotations['2d_bbox_bottom'],
img_filtered_annotations['2d_bbox_right']],
axis=1)
ious = iou(boxes1=all_boxes,
boxes2=all_boxes[dont_care_indices])
# Remove all bounding boxes that overlap with a dontcare region.
if ious.size > 0:
boxes_to_remove = np.amax(ious, axis=1) > 0.0
for key in img_all_annotations.keys():
img_filtered_annotations[key] = (
img_filtered_annotations[key][np.logical_not(boxes_to_remove)])
return img_filtered_annotations
def read_annotation_file(filename):
"""Reads a KITTI annotation file.
Converts a KITTI annotation file into a dictionary containing all the
relevant information.
Args:
filename: the path to the annotataion text file.
Returns:
anno: A dictionary with the converted annotation information. See annotation
README file for details on the different fields.
"""
with open(filename) as f:
content = f.readlines()
content = [x.strip().split(' ') for x in content]
anno = {}
anno['type'] = np.array([x[0].lower() for x in content])
anno['truncated'] = np.array([float(x[1]) for x in content])
anno['occluded'] = np.array([int(x[2]) for x in content])
anno['alpha'] = np.array([float(x[3]) for x in content])
anno['2d_bbox_left'] = np.array([float(x[4]) for x in content])
anno['2d_bbox_top'] = np.array([float(x[5]) for x in content])
anno['2d_bbox_right'] = np.array([float(x[6]) for x in content])
anno['2d_bbox_bottom'] = np.array([float(x[7]) for x in content])
anno['3d_bbox_height'] = np.array([float(x[8]) for x in content])
anno['3d_bbox_width'] = np.array([float(x[9]) for x in content])
anno['3d_bbox_length'] = np.array([float(x[10]) for x in content])
anno['3d_bbox_x'] = np.array([float(x[11]) for x in content])
anno['3d_bbox_y'] = np.array([float(x[12]) for x in content])
anno['3d_bbox_z'] = np.array([float(x[13]) for x in content])
anno['3d_bbox_rot_y'] = np.array([float(x[14]) for x in content])
return anno
def main(_):
convert_kitti_to_tfrecords(
data_dir=FLAGS.data_dir,
output_path=FLAGS.output_path,
classes_to_use=FLAGS.classes_to_use.split(','),
label_map_path=FLAGS.label_map_path,
validation_set_size=FLAGS.validation_set_size)
if __name__ == '__main__':
tf.app.run()
|
TensorFlow/LanguageModeling/BERT | BERT | run_squad | # coding=utf-8
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run BERT on SQuAD 1.1 and SQuAD 2.0."""
from __future__ import absolute_import, division, print_function
import collections
import json
import math
import os
import random
import shutil
import time
import horovod.tensorflow as hvd
import numpy as np
import six
import tensorflow as tf
from tensorflow.python.client import device_lib
import modeling
import optimization
import tokenization
from utils.create_squad_data import *
from utils.utils import LogEvalRunHook, LogTrainRunHook, setup_xla_flags
from utils.gpu_affinity import set_affinity
import utils.dllogger_class
from dllogger import Verbosity
flags = tf.flags
FLAGS = None
def extract_run_squad_flags():
## Required parameters
flags.DEFINE_string(
"bert_config_file", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string("vocab_file", None,
"The vocabulary file that the BERT model was trained on.")
flags.DEFINE_string(
"output_dir", None,
"The output directory where the model checkpoints will be written.")
## Other parameters
flags.DEFINE_string(
"dllog_path", "/results/bert_dllog.json",
"filename where dllogger writes to")
flags.DEFINE_string("train_file", None,
"SQuAD json for training. E.g., train-v1.1.json")
flags.DEFINE_string(
"predict_file", None,
"SQuAD json for predictions. E.g., dev-v1.1.json or test-v1.1.json")
flags.DEFINE_string(
"eval_script", None,
"SQuAD evaluate.py file to compute f1 and exact_match E.g., evaluate-v1.1.py")
flags.DEFINE_string(
"init_checkpoint", None,
"Initial checkpoint (usually from a pre-trained BERT model).")
flags.DEFINE_bool(
"do_lower_case", True,
"Whether to lower case the input text. Should be True for uncased "
"models and False for cased models.")
flags.DEFINE_integer(
"max_seq_length", 384,
"The maximum total input sequence length after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded.")
flags.DEFINE_integer(
"doc_stride", 128,
"When splitting up a long document into chunks, how much stride to "
"take between chunks.")
flags.DEFINE_integer(
"max_query_length", 64,
"The maximum number of tokens for the question. Questions longer than "
"this will be truncated to this length.")
flags.DEFINE_bool("do_train", False, "Whether to run training.")
flags.DEFINE_bool("do_predict", False, "Whether to run eval on the dev set.")
flags.DEFINE_integer("train_batch_size", 8, "Total batch size for training.")
flags.DEFINE_integer("predict_batch_size", 8,
"Total batch size for predictions.")
flags.DEFINE_float("learning_rate", 5e-6, "The initial learning rate for Adam.")
flags.DEFINE_bool("use_trt", False, "Whether to use TF-TRT")
flags.DEFINE_bool("horovod", False, "Whether to use Horovod for multi-gpu runs")
flags.DEFINE_float("num_train_epochs", 3.0,
"Total number of training epochs to perform.")
flags.DEFINE_float(
"warmup_proportion", 0.1,
"Proportion of training to perform linear learning rate warmup for. "
"E.g., 0.1 = 10% of training.")
flags.DEFINE_integer("save_checkpoints_steps", 5000,
"How often to save the model checkpoint.")
flags.DEFINE_integer("display_loss_steps", 10,
"How often to print loss from estimator")
flags.DEFINE_integer("iterations_per_loop", 1000,
"How many steps to make in each estimator call.")
flags.DEFINE_integer("num_accumulation_steps", 1,
"Number of accumulation steps before gradient update"
"Global batch size = num_accumulation_steps * train_batch_size")
flags.DEFINE_integer(
"n_best_size", 20,
"The total number of n-best predictions to generate in the "
"nbest_predictions.json output file.")
flags.DEFINE_integer(
"max_answer_length", 30,
"The maximum length of an answer that can be generated. This is needed "
"because the start and end predictions are not conditioned on one another.")
flags.DEFINE_bool(
"verbose_logging", False,
"If true, all of the warnings related to data processing will be printed. "
"A number of warnings are expected for a normal SQuAD evaluation.")
flags.DEFINE_bool(
"version_2_with_negative", False,
"If true, the SQuAD examples contain some that do not have an answer.")
flags.DEFINE_float(
"null_score_diff_threshold", 0.0,
"If null_score - best_non_null is greater than the threshold predict null.")
flags.DEFINE_bool("amp", True, "Whether to enable AMP ops. When false, uses TF32 on A100 and FP32 on V100 GPUS.")
flags.DEFINE_bool("use_xla", True, "Whether to enable XLA JIT compilation.")
flags.DEFINE_integer("num_eval_iterations", None,
"How many eval iterations to run - performs inference on subset")
# Triton Specific flags
flags.DEFINE_bool("export_triton", False, "Whether to export saved model or run inference with Triton")
flags.DEFINE_string("triton_model_name", "bert", "exports to appropriate directory for Triton")
flags.DEFINE_integer("triton_model_version", 1, "exports to appropriate directory for Triton")
flags.DEFINE_string("triton_server_url", "localhost:8001", "exports to appropriate directory for Triton")
flags.DEFINE_bool("triton_model_overwrite", False, "If True, will overwrite an existing directory with the specified 'model_name' and 'version_name'")
flags.DEFINE_integer("triton_max_batch_size", 8, "Specifies the 'max_batch_size' in the Triton model config. See the Triton documentation for more info.")
flags.DEFINE_float("triton_dyn_batching_delay", 0, "Determines the dynamic_batching queue delay in milliseconds(ms) for the Triton model config. Use '0' or '-1' to specify static batching. See the Triton documentation for more info.")
flags.DEFINE_integer("triton_engine_count", 1, "Specifies the 'instance_group' count value in the Triton model config. See the Triton documentation for more info.")
flags.mark_flag_as_required("vocab_file")
flags.mark_flag_as_required("bert_config_file")
flags.mark_flag_as_required("output_dir")
return flags.FLAGS
def create_model(bert_config, is_training, input_ids, input_mask, segment_ids,
use_one_hot_embeddings):
"""Creates a classification model."""
model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings,
compute_type=tf.float32)
final_hidden = model.get_sequence_output()
final_hidden_shape = modeling.get_shape_list(final_hidden, expected_rank=3)
batch_size = final_hidden_shape[0]
seq_length = final_hidden_shape[1]
hidden_size = final_hidden_shape[2]
output_weights = tf.get_variable(
"cls/squad/output_weights", [2, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"cls/squad/output_bias", [2], initializer=tf.zeros_initializer())
final_hidden_matrix = tf.reshape(final_hidden,
[batch_size * seq_length, hidden_size])
logits = tf.matmul(final_hidden_matrix, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
logits = tf.reshape(logits, [batch_size, seq_length, 2])
logits = tf.transpose(logits, [2, 0, 1])
unstacked_logits = tf.unstack(logits, axis=0, name='unstack')
(start_logits, end_logits) = (unstacked_logits[0], unstacked_logits[1])
return (start_logits, end_logits)
def get_frozen_tftrt_model(bert_config, shape, use_one_hot_embeddings, init_checkpoint):
tf_config = tf.compat.v1.ConfigProto()
tf_config.gpu_options.allow_growth = True
output_node_names = ['unstack']
with tf.Session(config=tf_config) as tf_sess:
input_ids = tf.placeholder(tf.int32, shape, 'input_ids')
input_mask = tf.placeholder(tf.int32, shape, 'input_mask')
segment_ids = tf.placeholder(tf.int32, shape, 'segment_ids')
(start_logits, end_logits) = create_model(bert_config=bert_config,
is_training=False,
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
tvars = tf.trainable_variables()
(assignment_map, initialized_variable_names) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
tf_sess.run(tf.global_variables_initializer())
print("LOADED!")
tf.compat.v1.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
else:
init_string = ", *NOTTTTTTTTTTTTTTTTTTTTT"
tf.compat.v1.logging.info(" name = %s, shape = %s%s", var.name, var.shape, init_string)
frozen_graph = tf.graph_util.convert_variables_to_constants(tf_sess,
tf_sess.graph.as_graph_def(), output_node_names)
num_nodes = len(frozen_graph.node)
print('Converting graph using TensorFlow-TensorRT...')
from tensorflow.python.compiler.tensorrt import trt_convert as trt
converter = trt.TrtGraphConverter(
input_graph_def=frozen_graph,
nodes_blacklist=output_node_names,
max_workspace_size_bytes=(4096 << 20) - 1000,
precision_mode = "FP16" if FLAGS.amp else "FP32",
minimum_segment_size=4,
is_dynamic_op=True,
maximum_cached_engines=1000
)
frozen_graph = converter.convert()
print('Total node count before and after TF-TRT conversion:',
num_nodes, '->', len(frozen_graph.node))
print('TRT node count:',
len([1 for n in frozen_graph.node if str(n.op) == 'TRTEngineOp']))
with tf.io.gfile.GFile("frozen_modelTRT.pb", "wb") as f:
f.write(frozen_graph.SerializeToString())
return frozen_graph
def model_fn_builder(bert_config, init_checkpoint, learning_rate,
num_train_steps, num_warmup_steps,
hvd=None, amp=False, use_one_hot_embeddings=False):
"""Returns `model_fn` closure for Estimator."""
def model_fn(features, labels, mode, params): # pylint: disable=unused-argument
"""The `model_fn` for Estimator."""
if FLAGS.verbose_logging:
tf.compat.v1.logging.info("*** Features ***")
for name in sorted(features.keys()):
tf.compat.v1.logging.info(" name = %s, shape = %s" % (name, features[name].shape))
unique_ids = features["unique_ids"]
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
if not is_training and FLAGS.use_trt:
trt_graph = get_frozen_tftrt_model(bert_config, input_ids.shape, use_one_hot_embeddings, init_checkpoint)
(start_logits, end_logits) = tf.import_graph_def(trt_graph,
input_map={'input_ids':input_ids, 'input_mask':input_mask, 'segment_ids':segment_ids},
return_elements=['unstack:0', 'unstack:1'],
name='')
predictions = {
"unique_ids": unique_ids,
"start_logits": start_logits,
"end_logits": end_logits,
}
output_spec = tf.estimator.EstimatorSpec(
mode=mode, predictions=predictions)
return output_spec
(start_logits, end_logits) = create_model(
bert_config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
tvars = tf.trainable_variables()
initialized_variable_names = {}
if init_checkpoint and (hvd is None or hvd.rank() == 0):
(assignment_map, initialized_variable_names
) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
if FLAGS.verbose_logging:
tf.compat.v1.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.compat.v1.logging.info(" %d name = %s, shape = %s%s", 0 if hvd is None else hvd.rank(), var.name, var.shape,
init_string)
output_spec = None
if mode == tf.estimator.ModeKeys.TRAIN:
seq_length = modeling.get_shape_list(input_ids)[1]
def compute_loss(logits, positions):
one_hot_positions = tf.one_hot(
positions, depth=seq_length, dtype=tf.float32)
log_probs = tf.nn.log_softmax(logits, axis=-1)
loss = -tf.reduce_mean(
tf.reduce_sum(one_hot_positions * log_probs, axis=-1))
return loss
start_positions = features["start_positions"]
end_positions = features["end_positions"]
start_loss = compute_loss(start_logits, start_positions)
end_loss = compute_loss(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2.0
train_op = optimization.create_optimizer(
total_loss, learning_rate, num_train_steps, num_warmup_steps, hvd, False, amp, FLAGS.num_accumulation_steps)
output_spec = tf.estimator.EstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op)
elif mode == tf.estimator.ModeKeys.PREDICT:
dummy_op = tf.no_op()
# Need to call mixed precision graph rewrite if fp16 to enable graph rewrite
if amp:
loss_scaler = tf.train.experimental.FixedLossScale(1)
dummy_op = tf.train.experimental.enable_mixed_precision_graph_rewrite(
optimization.LAMBOptimizer(learning_rate=0.0), loss_scaler)
predictions = {
"unique_ids": tf.identity(unique_ids),
"start_logits": start_logits,
"end_logits": end_logits,
}
output_spec = tf.estimator.EstimatorSpec(
mode=mode, predictions=predictions)
else:
raise ValueError(
"Only TRAIN and PREDICT modes are supported: %s" % (mode))
return output_spec
return model_fn
def input_fn_builder(input_file, batch_size, seq_length, is_training, drop_remainder, hvd=None):
"""Creates an `input_fn` closure to be passed to Estimator."""
name_to_features = {
"unique_ids": tf.io.FixedLenFeature([], tf.int64),
"input_ids": tf.io.FixedLenFeature([seq_length], tf.int64),
"input_mask": tf.io.FixedLenFeature([seq_length], tf.int64),
"segment_ids": tf.io.FixedLenFeature([seq_length], tf.int64),
}
if is_training:
name_to_features["start_positions"] = tf.io.FixedLenFeature([], tf.int64)
name_to_features["end_positions"] = tf.io.FixedLenFeature([], tf.int64)
def _decode_record(record, name_to_features):
"""Decodes a record to a TensorFlow example."""
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(example.keys()):
t = example[name]
if t.dtype == tf.int64:
t = tf.to_int32(t)
example[name] = t
return example
def input_fn():
"""The actual input function."""
# For training, we want a lot of parallel reading and shuffling.
# For eval, we want no shuffling and parallel reading doesn't matter.
if is_training:
d = tf.data.TFRecordDataset(input_file, num_parallel_reads=4)
if hvd is not None: d = d.shard(hvd.size(), hvd.rank())
d = d.apply(tf.data.experimental.ignore_errors())
d = d.shuffle(buffer_size=100)
d = d.repeat()
else:
d = tf.data.TFRecordDataset(input_file)
d = d.apply(
tf.contrib.data.map_and_batch(
lambda record: _decode_record(record, name_to_features),
batch_size=batch_size,
drop_remainder=drop_remainder))
return d
return input_fn
RawResult = collections.namedtuple("RawResult",
["unique_id", "start_logits", "end_logits"])
def get_predictions(all_examples, all_features, all_results, n_best_size, max_answer_length,
do_lower_case, version_2_with_negative, verbose_logging):
"""Get final predictions"""
example_index_to_features = collections.defaultdict(list)
for feature in all_features:
example_index_to_features[feature.example_index].append(feature)
unique_id_to_result = {}
for result in all_results:
unique_id_to_result[result.unique_id] = result
_PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name
"PrelimPrediction",
["feature_index", "start_index", "end_index", "start_logit", "end_logit"])
all_predictions = collections.OrderedDict()
all_nbest_json = collections.OrderedDict()
scores_diff_json = collections.OrderedDict()
for (example_index, example) in enumerate(all_examples):
features = example_index_to_features[example_index]
prelim_predictions = []
# keep track of the minimum score of null start+end of position 0
score_null = 1000000 # large and positive
min_null_feature_index = 0 # the paragraph slice with min mull score
null_start_logit = 0 # the start logit at the slice with min null score
null_end_logit = 0 # the end logit at the slice with min null score
for (feature_index, feature) in enumerate(features):
result = unique_id_to_result[feature.unique_id]
start_indexes = _get_best_indexes(result.start_logits, n_best_size)
end_indexes = _get_best_indexes(result.end_logits, n_best_size)
# if we could have irrelevant answers, get the min score of irrelevant
if version_2_with_negative:
feature_null_score = result.start_logits[0] + result.end_logits[0]
if feature_null_score < score_null:
score_null = feature_null_score
min_null_feature_index = feature_index
null_start_logit = result.start_logits[0]
null_end_logit = result.end_logits[0]
for start_index in start_indexes:
for end_index in end_indexes:
# We could hypothetically create invalid predictions, e.g., predict
# that the start of the span is in the question. We throw out all
# invalid predictions.
if start_index >= len(feature.tokens):
continue
if end_index >= len(feature.tokens):
continue
if start_index not in feature.token_to_orig_map:
continue
if end_index not in feature.token_to_orig_map:
continue
if not feature.token_is_max_context.get(start_index, False):
continue
if end_index < start_index:
continue
length = end_index - start_index + 1
if length > max_answer_length:
continue
prelim_predictions.append(
_PrelimPrediction(
feature_index=feature_index,
start_index=start_index,
end_index=end_index,
start_logit=result.start_logits[start_index],
end_logit=result.end_logits[end_index]))
if version_2_with_negative:
prelim_predictions.append(
_PrelimPrediction(
feature_index=min_null_feature_index,
start_index=0,
end_index=0,
start_logit=null_start_logit,
end_logit=null_end_logit))
prelim_predictions = sorted(
prelim_predictions,
key=lambda x: (x.start_logit + x.end_logit),
reverse=True)
_NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name
"NbestPrediction", ["text", "start_logit", "end_logit"])
seen_predictions = {}
nbest = []
for pred in prelim_predictions:
if len(nbest) >= n_best_size:
break
feature = features[pred.feature_index]
if pred.start_index > 0: # this is a non-null prediction
tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)]
orig_doc_start = feature.token_to_orig_map[pred.start_index]
orig_doc_end = feature.token_to_orig_map[pred.end_index]
orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)]
tok_text = " ".join(tok_tokens)
# De-tokenize WordPieces that have been split off.
tok_text = tok_text.replace(" ##", "")
tok_text = tok_text.replace("##", "")
# Clean whitespace
tok_text = tok_text.strip()
tok_text = " ".join(tok_text.split())
orig_text = " ".join(orig_tokens)
final_text = get_final_text(tok_text, orig_text, do_lower_case, verbose_logging)
if final_text in seen_predictions:
continue
seen_predictions[final_text] = True
else:
final_text = ""
seen_predictions[final_text] = True
nbest.append(
_NbestPrediction(
text=final_text,
start_logit=pred.start_logit,
end_logit=pred.end_logit))
# if we didn't inlude the empty option in the n-best, inlcude it
if version_2_with_negative:
if "" not in seen_predictions:
nbest.append(
_NbestPrediction(
text="", start_logit=null_start_logit,
end_logit=null_end_logit))
# In very rare edge cases we could have no valid predictions. So we
# just create a nonce prediction in this case to avoid failure.
if not nbest:
nbest.append(
_NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0))
assert len(nbest) >= 1
total_scores = []
best_non_null_entry = None
for entry in nbest:
total_scores.append(entry.start_logit + entry.end_logit)
if not best_non_null_entry:
if entry.text:
best_non_null_entry = entry
probs = _compute_softmax(total_scores)
nbest_json = []
for (i, entry) in enumerate(nbest):
output = collections.OrderedDict()
output["text"] = entry.text
output["probability"] = probs[i]
output["start_logit"] = entry.start_logit
output["end_logit"] = entry.end_logit
nbest_json.append(output)
assert len(nbest_json) >= 1
if not version_2_with_negative:
all_predictions[example.qas_id] = nbest_json[0]["text"]
else:
# predict "" iff the null score - the score of best non-null > threshold
score_diff = score_null - best_non_null_entry.start_logit - (
best_non_null_entry.end_logit)
scores_diff_json[example.qas_id] = score_diff
try:
null_score_diff_threshold = FLAGS.null_score_diff_threshold
except:
null_score_diff_threshold = 0.0
if score_diff > null_score_diff_threshold:
all_predictions[example.qas_id] = ""
else:
all_predictions[example.qas_id] = best_non_null_entry.text
all_nbest_json[example.qas_id] = nbest_json
return all_predictions, all_nbest_json, scores_diff_json
def write_predictions(all_examples, all_features, all_results, n_best_size,
max_answer_length, do_lower_case, output_prediction_file,
output_nbest_file, output_null_log_odds_file,
version_2_with_negative, verbose_logging):
"""Write final predictions to the json file and log-odds of null if needed."""
tf.compat.v1.logging.info("Writing predictions to: %s" % (output_prediction_file))
tf.compat.v1.logging.info("Writing nbest to: %s" % (output_nbest_file))
all_predictions, all_nbest_json, scores_diff_json = get_predictions(all_examples, all_features,
all_results, n_best_size, max_answer_length, do_lower_case, version_2_with_negative, verbose_logging)
with tf.io.gfile.GFile(output_prediction_file, "w") as writer:
writer.write(json.dumps(all_predictions, indent=4) + "\n")
with tf.io.gfile.GFile(output_nbest_file, "w") as writer:
writer.write(json.dumps(all_nbest_json, indent=4) + "\n")
if version_2_with_negative:
with tf.io.gfile.GFile(output_null_log_odds_file, "w") as writer:
writer.write(json.dumps(scores_diff_json, indent=4) + "\n")
def get_final_text(pred_text, orig_text, do_lower_case, verbose_logging):
"""Project the tokenized prediction back to the original text."""
# When we created the data, we kept track of the alignment between original
# (whitespace tokenized) tokens and our WordPiece tokenized tokens. So
# now `orig_text` contains the span of our original text corresponding to the
# span that we predicted.
#
# However, `orig_text` may contain extra characters that we don't want in
# our prediction.
#
# For example, let's say:
# pred_text = steve smith
# orig_text = Steve Smith's
#
# We don't want to return `orig_text` because it contains the extra "'s".
#
# We don't want to return `pred_text` because it's already been normalized
# (the SQuAD eval script also does punctuation stripping/lower casing but
# our tokenizer does additional normalization like stripping accent
# characters).
#
# What we really want to return is "Steve Smith".
#
# Therefore, we have to apply a semi-complicated alignment heruistic between
# `pred_text` and `orig_text` to get a character-to-charcter alignment. This
# can fail in certain cases in which case we just return `orig_text`.
def _strip_spaces(text):
ns_chars = []
ns_to_s_map = collections.OrderedDict()
for (i, c) in enumerate(text):
if c == " ":
continue
ns_to_s_map[len(ns_chars)] = i
ns_chars.append(c)
ns_text = "".join(ns_chars)
return (ns_text, ns_to_s_map)
# We first tokenize `orig_text`, strip whitespace from the result
# and `pred_text`, and check if they are the same length. If they are
# NOT the same length, the heuristic has failed. If they are the same
# length, we assume the characters are one-to-one aligned.
tokenizer = tokenization.BasicTokenizer(do_lower_case=do_lower_case)
tok_text = " ".join(tokenizer.tokenize(orig_text))
start_position = tok_text.find(pred_text)
if start_position == -1:
if verbose_logging:
tf.compat.v1.logging.info(
"Unable to find text: '%s' in '%s'" % (pred_text, orig_text))
return orig_text
end_position = start_position + len(pred_text) - 1
(orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text)
(tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text)
if len(orig_ns_text) != len(tok_ns_text):
if verbose_logging:
tf.compat.v1.logging.info("Length not equal after stripping spaces: '%s' vs '%s'",
orig_ns_text, tok_ns_text)
return orig_text
# We then project the characters in `pred_text` back to `orig_text` using
# the character-to-character alignment.
tok_s_to_ns_map = {}
for (i, tok_index) in six.iteritems(tok_ns_to_s_map):
tok_s_to_ns_map[tok_index] = i
orig_start_position = None
if start_position in tok_s_to_ns_map:
ns_start_position = tok_s_to_ns_map[start_position]
if ns_start_position in orig_ns_to_s_map:
orig_start_position = orig_ns_to_s_map[ns_start_position]
if orig_start_position is None:
if verbose_logging:
tf.compat.v1.logging.info("Couldn't map start position")
return orig_text
orig_end_position = None
if end_position in tok_s_to_ns_map:
ns_end_position = tok_s_to_ns_map[end_position]
if ns_end_position in orig_ns_to_s_map:
orig_end_position = orig_ns_to_s_map[ns_end_position]
if orig_end_position is None:
if verbose_logging:
tf.compat.v1.logging.info("Couldn't map end position")
return orig_text
output_text = orig_text[orig_start_position:(orig_end_position + 1)]
return output_text
def _get_best_indexes(logits, n_best_size):
"""Get the n-best logits from a list."""
index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True)
best_indexes = []
for i in range(len(index_and_score)):
if i >= n_best_size:
break
best_indexes.append(index_and_score[i][0])
return best_indexes
def _compute_softmax(scores):
"""Compute softmax probability over raw logits."""
if not scores:
return []
max_score = None
for score in scores:
if max_score is None or score > max_score:
max_score = score
exp_scores = []
total_sum = 0.0
for score in scores:
x = math.exp(score - max_score)
exp_scores.append(x)
total_sum += x
probs = []
for score in exp_scores:
probs.append(score / total_sum)
return probs
def validate_flags_or_throw(bert_config):
"""Validate the input FLAGS or throw an exception."""
tokenization.validate_case_matches_checkpoint(FLAGS.do_lower_case,
FLAGS.init_checkpoint)
if not FLAGS.do_train and not FLAGS.do_predict and not FLAGS.export_triton:
raise ValueError("At least one of `do_train` or `do_predict` or `export_SavedModel` must be True.")
if FLAGS.do_train:
if not FLAGS.train_file:
raise ValueError(
"If `do_train` is True, then `train_file` must be specified.")
if FLAGS.do_predict:
if not FLAGS.predict_file:
raise ValueError(
"If `do_predict` is True, then `predict_file` must be specified.")
if FLAGS.max_seq_length > bert_config.max_position_embeddings:
raise ValueError(
"Cannot use sequence length %d because the BERT model "
"was only trained up to sequence length %d" %
(FLAGS.max_seq_length, bert_config.max_position_embeddings))
if FLAGS.max_seq_length <= FLAGS.max_query_length + 3:
raise ValueError(
"The max_seq_length (%d) must be greater than max_query_length "
"(%d) + 3" % (FLAGS.max_seq_length, FLAGS.max_query_length))
def export_model(estimator, export_dir, init_checkpoint):
"""Exports a checkpoint in SavedModel format in a directory structure compatible with Triton."""
def serving_input_fn():
label_ids = tf.placeholder(tf.int32, [None,], name='unique_ids')
input_ids = tf.placeholder(tf.int32, [None, FLAGS.max_seq_length], name='input_ids')
input_mask = tf.placeholder(tf.int32, [None, FLAGS.max_seq_length], name='input_mask')
segment_ids = tf.placeholder(tf.int32, [None, FLAGS.max_seq_length], name='segment_ids')
input_fn = tf.estimator.export.build_raw_serving_input_receiver_fn({
'unique_ids': label_ids,
'input_ids': input_ids,
'input_mask': input_mask,
'segment_ids': segment_ids,
})()
return input_fn
saved_dir = estimator.export_savedmodel(
export_dir,
serving_input_fn,
assets_extra=None,
as_text=False,
checkpoint_path=init_checkpoint,
strip_default_attrs=False)
model_name = FLAGS.triton_model_name
model_folder = export_dir + "/triton_models/" + model_name
version_folder = model_folder + "/" + str(FLAGS.triton_model_version)
final_model_folder = version_folder + "/model.savedmodel"
if not os.path.exists(version_folder):
os.makedirs(version_folder)
if (not os.path.exists(final_model_folder)):
os.rename(saved_dir, final_model_folder)
print("Model saved to dir", final_model_folder)
else:
if (FLAGS.triton_model_overwrite):
shutil.rmtree(final_model_folder)
os.rename(saved_dir, final_model_folder)
print("WARNING: Existing model was overwritten. Model dir: {}".format(final_model_folder))
else:
print("ERROR: Could not save Triton model. Folder already exists. Use '--triton_model_overwrite=True' if you would like to overwrite an existing model. Model dir: {}".format(final_model_folder))
return
# Now build the config for Triton. Check to make sure we can overwrite it, if it exists
config_filename = os.path.join(model_folder, "config.pbtxt")
optimization_str = ""
if FLAGS.amp:
optimization_str = r"""
optimization {
execution_accelerators
{
gpu_execution_accelerator :
[ {
name : "auto_mixed_precision"
} ]
}
}"""
if (os.path.exists(config_filename) and not FLAGS.triton_model_overwrite):
print("ERROR: Could not save Triton model config. Config file already exists. Use '--triton_model_overwrite=True' if you would like to overwrite an existing model config. Model config: {}".format(config_filename))
return
config_template = r"""
name: "{model_name}"
platform: "tensorflow_savedmodel"
max_batch_size: {max_batch_size}
{optimization_str}
input [
{{
name: "unique_ids"
data_type: TYPE_INT32
dims: [ 1 ]
reshape: {{ shape: [ ] }}
}},
{{
name: "segment_ids"
data_type: TYPE_INT32
dims: {seq_length}
}},
{{
name: "input_ids"
data_type: TYPE_INT32
dims: {seq_length}
}},
{{
name: "input_mask"
data_type: TYPE_INT32
dims: {seq_length}
}}
]
output [
{{
name: "end_logits"
data_type: TYPE_FP32
dims: {seq_length}
}},
{{
name: "start_logits"
data_type: TYPE_FP32
dims: {seq_length}
}}
]
{dynamic_batching}
instance_group [
{{
count: {engine_count}
}}
]"""
batching_str = ""
max_batch_size = FLAGS.triton_max_batch_size
if (FLAGS.triton_dyn_batching_delay > 0):
# Use only full and half full batches
pref_batch_size = [int(max_batch_size / 2.0), max_batch_size]
batching_str = r"""
dynamic_batching {{
preferred_batch_size: [{0}]
max_queue_delay_microseconds: {1}
}}""".format(", ".join([str(x) for x in pref_batch_size]), int(FLAGS.triton_dyn_batching_delay * 1000.0))
config_values = {
"model_name": model_name,
"max_batch_size": max_batch_size,
"seq_length": FLAGS.max_seq_length,
"dynamic_batching": batching_str,
"engine_count": FLAGS.triton_engine_count,
"optimization_str":optimization_str,
}
with open(model_folder + "/config.pbtxt", "w") as file:
final_config_str = config_template.format_map(config_values)
file.write(final_config_str)
def main(_):
setup_xla_flags()
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)
dllogging = utils.dllogger_class.dllogger_class(FLAGS.dllog_path)
if FLAGS.horovod:
hvd.init()
bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
validate_flags_or_throw(bert_config)
tf.io.gfile.makedirs(FLAGS.output_dir)
tokenizer = tokenization.FullTokenizer(
vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case)
master_process = True
training_hooks = []
global_batch_size = FLAGS.train_batch_size * FLAGS.num_accumulation_steps
hvd_rank = 0
config = tf.compat.v1.ConfigProto()
learning_rate = FLAGS.learning_rate
if FLAGS.horovod:
tf.compat.v1.logging.info("Multi-GPU training with TF Horovod")
tf.compat.v1.logging.info("hvd.size() = %d hvd.rank() = %d", hvd.size(), hvd.rank())
global_batch_size = FLAGS.train_batch_size * hvd.size() * FLAGS.num_accumulation_steps
learning_rate = learning_rate * hvd.size()
master_process = (hvd.rank() == 0)
hvd_rank = hvd.rank()
config.gpu_options.visible_device_list = str(hvd.local_rank())
set_affinity(hvd.local_rank())
if hvd.size() > 1:
training_hooks.append(hvd.BroadcastGlobalVariablesHook(0))
if FLAGS.use_xla:
config.graph_options.optimizer_options.global_jit_level = tf.compat.v1.OptimizerOptions.ON_1
if FLAGS.amp:
tf.enable_resource_variables()
run_config = tf.estimator.RunConfig(
model_dir=FLAGS.output_dir if master_process else None,
session_config=config,
save_checkpoints_steps=FLAGS.save_checkpoints_steps if master_process else None,
save_summary_steps=FLAGS.save_checkpoints_steps if master_process else None,
log_step_count_steps=FLAGS.display_loss_steps,
keep_checkpoint_max=1)
if master_process:
tf.compat.v1.logging.info("***** Configuaration *****")
for key in FLAGS.__flags.keys():
tf.compat.v1.logging.info(' {}: {}'.format(key, getattr(FLAGS, key)))
tf.compat.v1.logging.info("**************************")
train_examples = None
num_train_steps = None
num_warmup_steps = None
training_hooks.append(LogTrainRunHook(global_batch_size, hvd_rank, FLAGS.save_checkpoints_steps))
# Prepare Training Data
if FLAGS.do_train:
train_examples = read_squad_examples(
input_file=FLAGS.train_file, is_training=True,
version_2_with_negative=FLAGS.version_2_with_negative)
num_train_steps = int(
len(train_examples) / global_batch_size * FLAGS.num_train_epochs)
num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion)
# Pre-shuffle the input to avoid having to make a very large shuffle
# buffer in in the `input_fn`.
rng = random.Random(12345)
rng.shuffle(train_examples)
start_index = 0
end_index = len(train_examples)
tmp_filenames = [os.path.join(FLAGS.output_dir, "train.tf_record")]
if FLAGS.horovod:
tmp_filenames = [os.path.join(FLAGS.output_dir, "train.tf_record{}".format(i)) for i in range(hvd.size())]
num_examples_per_rank = len(train_examples) // hvd.size()
remainder = len(train_examples) % hvd.size()
if hvd.rank() < remainder:
start_index = hvd.rank() * (num_examples_per_rank+1)
end_index = start_index + num_examples_per_rank + 1
else:
start_index = hvd.rank() * num_examples_per_rank + remainder
end_index = start_index + (num_examples_per_rank)
model_fn = model_fn_builder(
bert_config=bert_config,
init_checkpoint=FLAGS.init_checkpoint,
learning_rate=learning_rate,
num_train_steps=num_train_steps,
num_warmup_steps=num_warmup_steps,
hvd=None if not FLAGS.horovod else hvd,
amp=FLAGS.amp)
estimator = tf.estimator.Estimator(
model_fn=model_fn,
config=run_config)
if FLAGS.do_train:
# We write to a temporary file to avoid storing very large constant tensors
# in memory.
train_writer = FeatureWriter(
filename=tmp_filenames[hvd_rank],
is_training=True)
convert_examples_to_features(
examples=train_examples[start_index:end_index],
tokenizer=tokenizer,
max_seq_length=FLAGS.max_seq_length,
doc_stride=FLAGS.doc_stride,
max_query_length=FLAGS.max_query_length,
is_training=True,
output_fn=train_writer.process_feature,
verbose_logging=FLAGS.verbose_logging)
train_writer.close()
tf.compat.v1.logging.info("***** Running training *****")
tf.compat.v1.logging.info(" Num orig examples = %d", end_index - start_index)
tf.compat.v1.logging.info(" Num split examples = %d", train_writer.num_features)
tf.compat.v1.logging.info(" Batch size = %d", FLAGS.train_batch_size)
tf.compat.v1.logging.info(" Num steps = %d", num_train_steps)
tf.compat.v1.logging.info(" LR = %f", learning_rate)
del train_examples
train_input_fn = input_fn_builder(
input_file=tmp_filenames,
batch_size=FLAGS.train_batch_size,
seq_length=FLAGS.max_seq_length,
is_training=True,
drop_remainder=True,
hvd=None if not FLAGS.horovod else hvd)
train_start_time = time.time()
estimator.train(input_fn=train_input_fn, hooks=training_hooks, max_steps=num_train_steps)
train_time_elapsed = time.time() - train_start_time
train_time_wo_overhead = training_hooks[-1].total_time
avg_sentences_per_second = num_train_steps * global_batch_size * 1.0 / train_time_elapsed
ss_sentences_per_second = (num_train_steps - training_hooks[-1].skipped) * global_batch_size * 1.0 / train_time_wo_overhead
if master_process:
tf.compat.v1.logging.info("-----------------------------")
tf.compat.v1.logging.info("Total Training Time = %0.2f for Sentences = %d", train_time_elapsed,
num_train_steps * global_batch_size)
tf.compat.v1.logging.info("Total Training Time W/O Overhead = %0.2f for Sentences = %d", train_time_wo_overhead,
(num_train_steps - training_hooks[-1].skipped) * global_batch_size)
tf.compat.v1.logging.info("Throughput Average (sentences/sec) with overhead = %0.2f", avg_sentences_per_second)
tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second)
dllogging.logger.log(step=(), data={"throughput_train": ss_sentences_per_second}, verbosity=Verbosity.DEFAULT)
tf.compat.v1.logging.info("-----------------------------")
if FLAGS.export_triton and master_process:
export_model(estimator, FLAGS.output_dir, FLAGS.init_checkpoint)
if FLAGS.do_predict and master_process:
eval_examples = read_squad_examples(
input_file=FLAGS.predict_file, is_training=False,
version_2_with_negative=FLAGS.version_2_with_negative)
# Perform evaluation on subset, useful for profiling
if FLAGS.num_eval_iterations is not None:
eval_examples = eval_examples[:FLAGS.num_eval_iterations*FLAGS.predict_batch_size]
eval_writer = FeatureWriter(
filename=os.path.join(FLAGS.output_dir, "eval.tf_record"),
is_training=False)
eval_features = []
def append_feature(feature):
eval_features.append(feature)
eval_writer.process_feature(feature)
convert_examples_to_features(
examples=eval_examples,
tokenizer=tokenizer,
max_seq_length=FLAGS.max_seq_length,
doc_stride=FLAGS.doc_stride,
max_query_length=FLAGS.max_query_length,
is_training=False,
output_fn=append_feature,
verbose_logging=FLAGS.verbose_logging)
eval_writer.close()
tf.compat.v1.logging.info("***** Running predictions *****")
tf.compat.v1.logging.info(" Num orig examples = %d", len(eval_examples))
tf.compat.v1.logging.info(" Num split examples = %d", len(eval_features))
tf.compat.v1.logging.info(" Batch size = %d", FLAGS.predict_batch_size)
predict_input_fn = input_fn_builder(
input_file=eval_writer.filename,
batch_size=FLAGS.predict_batch_size,
seq_length=FLAGS.max_seq_length,
is_training=False,
drop_remainder=False)
all_results = []
eval_hooks = [LogEvalRunHook(FLAGS.predict_batch_size)]
eval_start_time = time.time()
for result in estimator.predict(
predict_input_fn, yield_single_examples=True, hooks=eval_hooks):
if len(all_results) % 1000 == 0:
tf.compat.v1.logging.info("Processing example: %d" % (len(all_results)))
unique_id = int(result["unique_ids"])
start_logits = [float(x) for x in result["start_logits"].flat]
end_logits = [float(x) for x in result["end_logits"].flat]
all_results.append(
RawResult(
unique_id=unique_id,
start_logits=start_logits,
end_logits=end_logits))
eval_time_elapsed = time.time() - eval_start_time
time_list = eval_hooks[-1].time_list
time_list.sort()
# Removing outliers (init/warmup) in throughput computation.
eval_time_wo_overhead = sum(time_list[:int(len(time_list) * 0.99)])
num_sentences = (int(len(time_list) * 0.99)) * FLAGS.predict_batch_size
avg = np.mean(time_list)
cf_50 = max(time_list[:int(len(time_list) * 0.50)])
cf_90 = max(time_list[:int(len(time_list) * 0.90)])
cf_95 = max(time_list[:int(len(time_list) * 0.95)])
cf_99 = max(time_list[:int(len(time_list) * 0.99)])
cf_100 = max(time_list[:int(len(time_list) * 1)])
ss_sentences_per_second = num_sentences * 1.0 / eval_time_wo_overhead
tf.compat.v1.logging.info("-----------------------------")
tf.compat.v1.logging.info("Total Inference Time = %0.2f for Sentences = %d", eval_time_elapsed,
eval_hooks[-1].count * FLAGS.predict_batch_size)
tf.compat.v1.logging.info("Total Inference Time W/O Overhead = %0.2f for Sentences = %d", eval_time_wo_overhead,
num_sentences)
tf.compat.v1.logging.info("Summary Inference Statistics")
tf.compat.v1.logging.info("Batch size = %d", FLAGS.predict_batch_size)
tf.compat.v1.logging.info("Sequence Length = %d", FLAGS.max_seq_length)
tf.compat.v1.logging.info("Precision = %s", "fp16" if FLAGS.amp else "fp32")
tf.compat.v1.logging.info("Latency Confidence Level 50 (ms) = %0.2f", cf_50 * 1000)
tf.compat.v1.logging.info("Latency Confidence Level 90 (ms) = %0.2f", cf_90 * 1000)
tf.compat.v1.logging.info("Latency Confidence Level 95 (ms) = %0.2f", cf_95 * 1000)
tf.compat.v1.logging.info("Latency Confidence Level 99 (ms) = %0.2f", cf_99 * 1000)
tf.compat.v1.logging.info("Latency Confidence Level 100 (ms) = %0.2f", cf_100 * 1000)
tf.compat.v1.logging.info("Latency Average (ms) = %0.2f", avg * 1000)
tf.compat.v1.logging.info("Throughput Average (sentences/sec) = %0.2f", ss_sentences_per_second)
dllogging.logger.log(step=(), data={"throughput_val": ss_sentences_per_second}, verbosity=Verbosity.DEFAULT)
tf.compat.v1.logging.info("-----------------------------")
output_prediction_file = os.path.join(FLAGS.output_dir, "predictions.json")
output_nbest_file = os.path.join(FLAGS.output_dir, "nbest_predictions.json")
output_null_log_odds_file = os.path.join(FLAGS.output_dir, "null_odds.json")
write_predictions(eval_examples, eval_features, all_results,
FLAGS.n_best_size, FLAGS.max_answer_length,
FLAGS.do_lower_case, output_prediction_file,
output_nbest_file, output_null_log_odds_file,
FLAGS.version_2_with_negative, FLAGS.verbose_logging)
if FLAGS.eval_script:
import sys
import subprocess
eval_out = subprocess.check_output([sys.executable, FLAGS.eval_script,
FLAGS.predict_file, output_prediction_file])
scores = str(eval_out).strip()
exact_match = float(scores.split(":")[1].split(",")[0])
f1 = float(scores.split(":")[2].split("}")[0])
dllogging.logger.log(step=(), data={"f1": f1}, verbosity=Verbosity.DEFAULT)
dllogging.logger.log(step=(), data={"exact_match": exact_match}, verbosity=Verbosity.DEFAULT)
print(str(eval_out))
if __name__ == "__main__":
FLAGS = extract_run_squad_flags()
tf.app.run() |
CUDA-Optimized/FastSpeech/fastspeech/utils | utils | tensorboard | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NVIDIA CORPORATION nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import matplotlib.pyplot as plt
import numpy as np
import cv2
import data as global_data
plt.switch_backend('Agg')
def image_plot(x, name='image'):
fig, ax = plt.subplots()
ax.imshow(x, cmap='magma', aspect='auto')
fig.canvas.draw()
buf = np.array(fig.canvas.renderer._renderer)
plt.clf()
plt.close('all')
cv2.imshow(name, buf)
cv2.waitKey(0)
def plot_to_buf(x, align=True):
fig, ax = plt.subplots()
ax.plot(x)
if align:
ax.set_ylim([-1, 1])
fig.canvas.draw()
im = np.array(fig.canvas.renderer._renderer)
plt.clf()
plt.close('all')
return np.rollaxis(im[..., :3], 2)
def imshow_to_buf(x, scale01=False):
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
return np.exp(x) / np.sum(np.exp(x), axis=0)
if scale01:
x = (x - x.min()) / (x.max() - x.min())
if x.max() > 1.:
x = softmax(x)
if len(x.shape) == 3:
x = x[0]
fig, ax = plt.subplots()
ax.imshow(x, cmap='magma', aspect='auto')
fig.canvas.draw()
im = np.array(fig.canvas.renderer._renderer)
plt.clf()
plt.close('all')
return np.rollaxis(im[..., :3], 2)
def origin_to_chrs(target):
results = []
for t in target:
idx = t - 1 if t - 1 >= 0 else 0
if idx < len(global_data.idx2chr):
results.append(global_data.idx2chr[idx])
else:
break
return ''.join(results) |
PyTorch/DrugDiscovery/MoFlow/scripts | scripts | predict | #!/bin/bash
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bs=${1:-512}
prec=${2:-amp}
flags="${@:3}"
cmd="python \
/workspace/moflow_pyt/moflow/runtime/generate.py \
--batch_size ${bs} \
--jit \
--correct_validity \
${flags} \
"
if [ $prec == "amp" ]; then
cmd="${cmd} --amp"
fi
set -x
bash -c "${cmd}"
|
PaddlePaddle/LanguageModeling/BERT/vocab | vocab | bert-base-uncased-vocab | [PAD]
[unused0]
[unused1]
[unused2]
[unused3]
[unused4]
[unused5]
[unused6]
[unused7]
[unused8]
[unused9]
[unused10]
[unused11]
[unused12]
[unused13]
[unused14]
[unused15]
[unused16]
[unused17]
[unused18]
[unused19]
[unused20]
[unused21]
[unused22]
[unused23]
[unused24]
[unused25]
[unused26]
[unused27]
[unused28]
[unused29]
[unused30]
[unused31]
[unused32]
[unused33]
[unused34]
[unused35]
[unused36]
[unused37]
[unused38]
[unused39]
[unused40]
[unused41]
[unused42]
[unused43]
[unused44]
[unused45]
[unused46]
[unused47]
[unused48]
[unused49]
[unused50]
[unused51]
[unused52]
[unused53]
[unused54]
[unused55]
[unused56]
[unused57]
[unused58]
[unused59]
[unused60]
[unused61]
[unused62]
[unused63]
[unused64]
[unused65]
[unused66]
[unused67]
[unused68]
[unused69]
[unused70]
[unused71]
[unused72]
[unused73]
[unused74]
[unused75]
[unused76]
[unused77]
[unused78]
[unused79]
[unused80]
[unused81]
[unused82]
[unused83]
[unused84]
[unused85]
[unused86]
[unused87]
[unused88]
[unused89]
[unused90]
[unused91]
[unused92]
[unused93]
[unused94]
[unused95]
[unused96]
[unused97]
[unused98]
[UNK]
[CLS]
[SEP]
[MASK]
[unused99]
[unused100]
[unused101]
[unused102]
[unused103]
[unused104]
[unused105]
[unused106]
[unused107]
[unused108]
[unused109]
[unused110]
[unused111]
[unused112]
[unused113]
[unused114]
[unused115]
[unused116]
[unused117]
[unused118]
[unused119]
[unused120]
[unused121]
[unused122]
[unused123]
[unused124]
[unused125]
[unused126]
[unused127]
[unused128]
[unused129]
[unused130]
[unused131]
[unused132]
[unused133]
[unused134]
[unused135]
[unused136]
[unused137]
[unused138]
[unused139]
[unused140]
[unused141]
[unused142]
[unused143]
[unused144]
[unused145]
[unused146]
[unused147]
[unused148]
[unused149]
[unused150]
[unused151]
[unused152]
[unused153]
[unused154]
[unused155]
[unused156]
[unused157]
[unused158]
[unused159]
[unused160]
[unused161]
[unused162]
[unused163]
[unused164]
[unused165]
[unused166]
[unused167]
[unused168]
[unused169]
[unused170]
[unused171]
[unused172]
[unused173]
[unused174]
[unused175]
[unused176]
[unused177]
[unused178]
[unused179]
[unused180]
[unused181]
[unused182]
[unused183]
[unused184]
[unused185]
[unused186]
[unused187]
[unused188]
[unused189]
[unused190]
[unused191]
[unused192]
[unused193]
[unused194]
[unused195]
[unused196]
[unused197]
[unused198]
[unused199]
[unused200]
[unused201]
[unused202]
[unused203]
[unused204]
[unused205]
[unused206]
[unused207]
[unused208]
[unused209]
[unused210]
[unused211]
[unused212]
[unused213]
[unused214]
[unused215]
[unused216]
[unused217]
[unused218]
[unused219]
[unused220]
[unused221]
[unused222]
[unused223]
[unused224]
[unused225]
[unused226]
[unused227]
[unused228]
[unused229]
[unused230]
[unused231]
[unused232]
[unused233]
[unused234]
[unused235]
[unused236]
[unused237]
[unused238]
[unused239]
[unused240]
[unused241]
[unused242]
[unused243]
[unused244]
[unused245]
[unused246]
[unused247]
[unused248]
[unused249]
[unused250]
[unused251]
[unused252]
[unused253]
[unused254]
[unused255]
[unused256]
[unused257]
[unused258]
[unused259]
[unused260]
[unused261]
[unused262]
[unused263]
[unused264]
[unused265]
[unused266]
[unused267]
[unused268]
[unused269]
[unused270]
[unused271]
[unused272]
[unused273]
[unused274]
[unused275]
[unused276]
[unused277]
[unused278]
[unused279]
[unused280]
[unused281]
[unused282]
[unused283]
[unused284]
[unused285]
[unused286]
[unused287]
[unused288]
[unused289]
[unused290]
[unused291]
[unused292]
[unused293]
[unused294]
[unused295]
[unused296]
[unused297]
[unused298]
[unused299]
[unused300]
[unused301]
[unused302]
[unused303]
[unused304]
[unused305]
[unused306]
[unused307]
[unused308]
[unused309]
[unused310]
[unused311]
[unused312]
[unused313]
[unused314]
[unused315]
[unused316]
[unused317]
[unused318]
[unused319]
[unused320]
[unused321]
[unused322]
[unused323]
[unused324]
[unused325]
[unused326]
[unused327]
[unused328]
[unused329]
[unused330]
[unused331]
[unused332]
[unused333]
[unused334]
[unused335]
[unused336]
[unused337]
[unused338]
[unused339]
[unused340]
[unused341]
[unused342]
[unused343]
[unused344]
[unused345]
[unused346]
[unused347]
[unused348]
[unused349]
[unused350]
[unused351]
[unused352]
[unused353]
[unused354]
[unused355]
[unused356]
[unused357]
[unused358]
[unused359]
[unused360]
[unused361]
[unused362]
[unused363]
[unused364]
[unused365]
[unused366]
[unused367]
[unused368]
[unused369]
[unused370]
[unused371]
[unused372]
[unused373]
[unused374]
[unused375]
[unused376]
[unused377]
[unused378]
[unused379]
[unused380]
[unused381]
[unused382]
[unused383]
[unused384]
[unused385]
[unused386]
[unused387]
[unused388]
[unused389]
[unused390]
[unused391]
[unused392]
[unused393]
[unused394]
[unused395]
[unused396]
[unused397]
[unused398]
[unused399]
[unused400]
[unused401]
[unused402]
[unused403]
[unused404]
[unused405]
[unused406]
[unused407]
[unused408]
[unused409]
[unused410]
[unused411]
[unused412]
[unused413]
[unused414]
[unused415]
[unused416]
[unused417]
[unused418]
[unused419]
[unused420]
[unused421]
[unused422]
[unused423]
[unused424]
[unused425]
[unused426]
[unused427]
[unused428]
[unused429]
[unused430]
[unused431]
[unused432]
[unused433]
[unused434]
[unused435]
[unused436]
[unused437]
[unused438]
[unused439]
[unused440]
[unused441]
[unused442]
[unused443]
[unused444]
[unused445]
[unused446]
[unused447]
[unused448]
[unused449]
[unused450]
[unused451]
[unused452]
[unused453]
[unused454]
[unused455]
[unused456]
[unused457]
[unused458]
[unused459]
[unused460]
[unused461]
[unused462]
[unused463]
[unused464]
[unused465]
[unused466]
[unused467]
[unused468]
[unused469]
[unused470]
[unused471]
[unused472]
[unused473]
[unused474]
[unused475]
[unused476]
[unused477]
[unused478]
[unused479]
[unused480]
[unused481]
[unused482]
[unused483]
[unused484]
[unused485]
[unused486]
[unused487]
[unused488]
[unused489]
[unused490]
[unused491]
[unused492]
[unused493]
[unused494]
[unused495]
[unused496]
[unused497]
[unused498]
[unused499]
[unused500]
[unused501]
[unused502]
[unused503]
[unused504]
[unused505]
[unused506]
[unused507]
[unused508]
[unused509]
[unused510]
[unused511]
[unused512]
[unused513]
[unused514]
[unused515]
[unused516]
[unused517]
[unused518]
[unused519]
[unused520]
[unused521]
[unused522]
[unused523]
[unused524]
[unused525]
[unused526]
[unused527]
[unused528]
[unused529]
[unused530]
[unused531]
[unused532]
[unused533]
[unused534]
[unused535]
[unused536]
[unused537]
[unused538]
[unused539]
[unused540]
[unused541]
[unused542]
[unused543]
[unused544]
[unused545]
[unused546]
[unused547]
[unused548]
[unused549]
[unused550]
[unused551]
[unused552]
[unused553]
[unused554]
[unused555]
[unused556]
[unused557]
[unused558]
[unused559]
[unused560]
[unused561]
[unused562]
[unused563]
[unused564]
[unused565]
[unused566]
[unused567]
[unused568]
[unused569]
[unused570]
[unused571]
[unused572]
[unused573]
[unused574]
[unused575]
[unused576]
[unused577]
[unused578]
[unused579]
[unused580]
[unused581]
[unused582]
[unused583]
[unused584]
[unused585]
[unused586]
[unused587]
[unused588]
[unused589]
[unused590]
[unused591]
[unused592]
[unused593]
[unused594]
[unused595]
[unused596]
[unused597]
[unused598]
[unused599]
[unused600]
[unused601]
[unused602]
[unused603]
[unused604]
[unused605]
[unused606]
[unused607]
[unused608]
[unused609]
[unused610]
[unused611]
[unused612]
[unused613]
[unused614]
[unused615]
[unused616]
[unused617]
[unused618]
[unused619]
[unused620]
[unused621]
[unused622]
[unused623]
[unused624]
[unused625]
[unused626]
[unused627]
[unused628]
[unused629]
[unused630]
[unused631]
[unused632]
[unused633]
[unused634]
[unused635]
[unused636]
[unused637]
[unused638]
[unused639]
[unused640]
[unused641]
[unused642]
[unused643]
[unused644]
[unused645]
[unused646]
[unused647]
[unused648]
[unused649]
[unused650]
[unused651]
[unused652]
[unused653]
[unused654]
[unused655]
[unused656]
[unused657]
[unused658]
[unused659]
[unused660]
[unused661]
[unused662]
[unused663]
[unused664]
[unused665]
[unused666]
[unused667]
[unused668]
[unused669]
[unused670]
[unused671]
[unused672]
[unused673]
[unused674]
[unused675]
[unused676]
[unused677]
[unused678]
[unused679]
[unused680]
[unused681]
[unused682]
[unused683]
[unused684]
[unused685]
[unused686]
[unused687]
[unused688]
[unused689]
[unused690]
[unused691]
[unused692]
[unused693]
[unused694]
[unused695]
[unused696]
[unused697]
[unused698]
[unused699]
[unused700]
[unused701]
[unused702]
[unused703]
[unused704]
[unused705]
[unused706]
[unused707]
[unused708]
[unused709]
[unused710]
[unused711]
[unused712]
[unused713]
[unused714]
[unused715]
[unused716]
[unused717]
[unused718]
[unused719]
[unused720]
[unused721]
[unused722]
[unused723]
[unused724]
[unused725]
[unused726]
[unused727]
[unused728]
[unused729]
[unused730]
[unused731]
[unused732]
[unused733]
[unused734]
[unused735]
[unused736]
[unused737]
[unused738]
[unused739]
[unused740]
[unused741]
[unused742]
[unused743]
[unused744]
[unused745]
[unused746]
[unused747]
[unused748]
[unused749]
[unused750]
[unused751]
[unused752]
[unused753]
[unused754]
[unused755]
[unused756]
[unused757]
[unused758]
[unused759]
[unused760]
[unused761]
[unused762]
[unused763]
[unused764]
[unused765]
[unused766]
[unused767]
[unused768]
[unused769]
[unused770]
[unused771]
[unused772]
[unused773]
[unused774]
[unused775]
[unused776]
[unused777]
[unused778]
[unused779]
[unused780]
[unused781]
[unused782]
[unused783]
[unused784]
[unused785]
[unused786]
[unused787]
[unused788]
[unused789]
[unused790]
[unused791]
[unused792]
[unused793]
[unused794]
[unused795]
[unused796]
[unused797]
[unused798]
[unused799]
[unused800]
[unused801]
[unused802]
[unused803]
[unused804]
[unused805]
[unused806]
[unused807]
[unused808]
[unused809]
[unused810]
[unused811]
[unused812]
[unused813]
[unused814]
[unused815]
[unused816]
[unused817]
[unused818]
[unused819]
[unused820]
[unused821]
[unused822]
[unused823]
[unused824]
[unused825]
[unused826]
[unused827]
[unused828]
[unused829]
[unused830]
[unused831]
[unused832]
[unused833]
[unused834]
[unused835]
[unused836]
[unused837]
[unused838]
[unused839]
[unused840]
[unused841]
[unused842]
[unused843]
[unused844]
[unused845]
[unused846]
[unused847]
[unused848]
[unused849]
[unused850]
[unused851]
[unused852]
[unused853]
[unused854]
[unused855]
[unused856]
[unused857]
[unused858]
[unused859]
[unused860]
[unused861]
[unused862]
[unused863]
[unused864]
[unused865]
[unused866]
[unused867]
[unused868]
[unused869]
[unused870]
[unused871]
[unused872]
[unused873]
[unused874]
[unused875]
[unused876]
[unused877]
[unused878]
[unused879]
[unused880]
[unused881]
[unused882]
[unused883]
[unused884]
[unused885]
[unused886]
[unused887]
[unused888]
[unused889]
[unused890]
[unused891]
[unused892]
[unused893]
[unused894]
[unused895]
[unused896]
[unused897]
[unused898]
[unused899]
[unused900]
[unused901]
[unused902]
[unused903]
[unused904]
[unused905]
[unused906]
[unused907]
[unused908]
[unused909]
[unused910]
[unused911]
[unused912]
[unused913]
[unused914]
[unused915]
[unused916]
[unused917]
[unused918]
[unused919]
[unused920]
[unused921]
[unused922]
[unused923]
[unused924]
[unused925]
[unused926]
[unused927]
[unused928]
[unused929]
[unused930]
[unused931]
[unused932]
[unused933]
[unused934]
[unused935]
[unused936]
[unused937]
[unused938]
[unused939]
[unused940]
[unused941]
[unused942]
[unused943]
[unused944]
[unused945]
[unused946]
[unused947]
[unused948]
[unused949]
[unused950]
[unused951]
[unused952]
[unused953]
[unused954]
[unused955]
[unused956]
[unused957]
[unused958]
[unused959]
[unused960]
[unused961]
[unused962]
[unused963]
[unused964]
[unused965]
[unused966]
[unused967]
[unused968]
[unused969]
[unused970]
[unused971]
[unused972]
[unused973]
[unused974]
[unused975]
[unused976]
[unused977]
[unused978]
[unused979]
[unused980]
[unused981]
[unused982]
[unused983]
[unused984]
[unused985]
[unused986]
[unused987]
[unused988]
[unused989]
[unused990]
[unused991]
[unused992]
[unused993]
!
"
#
$
%
&
'
(
)
*
+
,
-
.
/
0
1
2
3
4
5
6
7
8
9
:
;
<
=
>
?
@
[
\
]
^
_
`
a
b
c
d
e
f
g
h
i
j
k
l
m
n
o
p
q
r
s
t
u
v
w
x
y
z
{
|
}
~
¡
¢
£
¤
¥
¦
§
¨
©
ª
«
¬
®
°
±
²
³
´
µ
¶
·
¹
º
»
¼
½
¾
¿
×
ß
æ
ð
÷
ø
þ
đ
ħ
ı
ł
ŋ
œ
ƒ
ɐ
ɑ
ɒ
ɔ
ɕ
ə
ɛ
ɡ
ɣ
ɨ
ɪ
ɫ
ɬ
ɯ
ɲ
ɴ
ɹ
ɾ
ʀ
ʁ
ʂ
ʃ
ʉ
ʊ
ʋ
ʌ
ʎ
ʐ
ʑ
ʒ
ʔ
ʰ
ʲ
ʳ
ʷ
ʸ
ʻ
ʼ
ʾ
ʿ
ˈ
ː
ˡ
ˢ
ˣ
ˤ
α
β
γ
δ
ε
ζ
η
θ
ι
κ
λ
μ
ν
ξ
ο
π
ρ
ς
σ
τ
υ
φ
χ
ψ
ω
а
б
в
г
д
е
ж
з
и
к
л
м
н
о
п
р
с
т
у
ф
х
ц
ч
ш
щ
ъ
ы
ь
э
ю
я
ђ
є
і
ј
љ
њ
ћ
ӏ
ա
բ
գ
դ
ե
թ
ի
լ
կ
հ
մ
յ
ն
ո
պ
ս
վ
տ
ր
ւ
ք
־
א
ב
ג
ד
ה
ו
ז
ח
ט
י
ך
כ
ל
ם
מ
ן
נ
ס
ע
ף
פ
ץ
צ
ק
ר
ש
ת
،
ء
ا
ب
ة
ت
ث
ج
ح
خ
د
ذ
ر
ز
س
ش
ص
ض
ط
ظ
ع
غ
ـ
ف
ق
ك
ل
م
ن
ه
و
ى
ي
ٹ
پ
چ
ک
گ
ں
ھ
ہ
ی
ے
अ
आ
उ
ए
क
ख
ग
च
ज
ट
ड
ण
त
थ
द
ध
न
प
ब
भ
म
य
र
ल
व
श
ष
स
ह
ा
ि
ी
ो
।
॥
ং
অ
আ
ই
উ
এ
ও
ক
খ
গ
চ
ছ
জ
ট
ড
ণ
ত
থ
দ
ধ
ন
প
ব
ভ
ম
য
র
ল
শ
ষ
স
হ
া
ি
ী
ে
க
ச
ட
த
ந
ன
ப
ம
ய
ர
ல
ள
வ
ா
ி
ு
ே
ை
ನ
ರ
ಾ
ක
ය
ර
ල
ව
ා
ก
ง
ต
ท
น
พ
ม
ย
ร
ล
ว
ส
อ
า
เ
་
།
ག
ང
ད
ན
པ
བ
མ
འ
ར
ལ
ས
မ
ა
ბ
გ
დ
ე
ვ
თ
ი
კ
ლ
მ
ნ
ო
რ
ს
ტ
უ
ᄀ
ᄂ
ᄃ
ᄅ
ᄆ
ᄇ
ᄉ
ᄊ
ᄋ
ᄌ
ᄎ
ᄏ
ᄐ
ᄑ
ᄒ
ᅡ
ᅢ
ᅥ
ᅦ
ᅧ
ᅩ
ᅪ
ᅭ
ᅮ
ᅯ
ᅲ
ᅳ
ᅴ
ᅵ
ᆨ
ᆫ
ᆯ
ᆷ
ᆸ
ᆼ
ᴬ
ᴮ
ᴰ
ᴵ
ᴺ
ᵀ
ᵃ
ᵇ
ᵈ
ᵉ
ᵍ
ᵏ
ᵐ
ᵒ
ᵖ
ᵗ
ᵘ
ᵢ
ᵣ
ᵤ
ᵥ
ᶜ
ᶠ
‐
‑
‒
–
—
―
‖
‘
’
‚
“
”
„
†
‡
•
…
‰
′
″
›
‿
⁄
⁰
ⁱ
⁴
⁵
⁶
⁷
⁸
⁹
⁺
⁻
ⁿ
₀
₁
₂
₃
₄
₅
₆
₇
₈
₉
₊
₍
₎
ₐ
ₑ
ₒ
ₓ
ₕ
ₖ
ₗ
ₘ
ₙ
ₚ
ₛ
ₜ
₤
₩
€
₱
₹
ℓ
№
ℝ
™
⅓
⅔
←
↑
→
↓
↔
↦
⇄
⇌
⇒
∂
∅
∆
∇
∈
−
∗
∘
√
∞
∧
∨
∩
∪
≈
≡
≤
≥
⊂
⊆
⊕
⊗
⋅
─
│
■
▪
●
★
☆
☉
♠
♣
♥
♦
♭
♯
⟨
⟩
ⱼ
⺩
⺼
⽥
、
。
〈
〉
《
》
「
」
『
』
〜
あ
い
う
え
お
か
き
く
け
こ
さ
し
す
せ
そ
た
ち
っ
つ
て
と
な
に
ぬ
ね
の
は
ひ
ふ
へ
ほ
ま
み
む
め
も
や
ゆ
よ
ら
り
る
れ
ろ
を
ん
ァ
ア
ィ
イ
ウ
ェ
エ
オ
カ
キ
ク
ケ
コ
サ
シ
ス
セ
タ
チ
ッ
ツ
テ
ト
ナ
ニ
ノ
ハ
ヒ
フ
ヘ
ホ
マ
ミ
ム
メ
モ
ャ
ュ
ョ
ラ
リ
ル
レ
ロ
ワ
ン
・
ー
一
三
上
下
不
世
中
主
久
之
也
事
二
五
井
京
人
亻
仁
介
代
仮
伊
会
佐
侍
保
信
健
元
光
八
公
内
出
分
前
劉
力
加
勝
北
区
十
千
南
博
原
口
古
史
司
合
吉
同
名
和
囗
四
国
國
土
地
坂
城
堂
場
士
夏
外
大
天
太
夫
奈
女
子
学
宀
宇
安
宗
定
宣
宮
家
宿
寺
將
小
尚
山
岡
島
崎
川
州
巿
帝
平
年
幸
广
弘
張
彳
後
御
德
心
忄
志
忠
愛
成
我
戦
戸
手
扌
政
文
新
方
日
明
星
春
昭
智
曲
書
月
有
朝
木
本
李
村
東
松
林
森
楊
樹
橋
歌
止
正
武
比
氏
民
水
氵
氷
永
江
沢
河
治
法
海
清
漢
瀬
火
版
犬
王
生
田
男
疒
発
白
的
皇
目
相
省
真
石
示
社
神
福
禾
秀
秋
空
立
章
竹
糹
美
義
耳
良
艹
花
英
華
葉
藤
行
街
西
見
訁
語
谷
貝
貴
車
軍
辶
道
郎
郡
部
都
里
野
金
鈴
镇
長
門
間
阝
阿
陳
陽
雄
青
面
風
食
香
馬
高
龍
龸
fi
fl
!
(
)
,
-
.
/
:
?
~
the
of
and
in
to
was
he
is
as
for
on
with
that
it
his
by
at
from
her
##s
she
you
had
an
were
but
be
this
are
not
my
they
one
which
or
have
him
me
first
all
also
their
has
up
who
out
been
when
after
there
into
new
two
its
##a
time
would
no
what
about
said
we
over
then
other
so
more
##e
can
if
like
back
them
only
some
could
##i
where
just
##ing
during
before
##n
do
##o
made
school
through
than
now
years
most
world
may
between
down
well
three
##d
year
while
will
##ed
##r
##y
later
##t
city
under
around
did
such
being
used
state
people
part
know
against
your
many
second
university
both
national
##er
these
don
known
off
way
until
re
how
even
get
head
...
didn
##ly
team
american
because
de
##l
born
united
film
since
still
long
work
south
us
became
any
high
again
day
family
see
right
man
eyes
house
season
war
states
including
took
life
north
same
each
called
name
much
place
however
go
four
group
another
found
won
area
here
going
10
away
series
left
home
music
best
make
hand
number
company
several
never
last
john
000
very
album
take
end
good
too
following
released
game
played
little
began
district
##m
old
want
those
side
held
own
early
county
ll
league
use
west
##u
face
think
##es
2010
government
##h
march
came
small
general
town
june
##on
line
based
something
##k
september
thought
looked
along
international
2011
air
july
club
went
january
october
our
august
april
york
12
few
2012
2008
east
show
member
college
2009
father
public
##us
come
men
five
set
station
church
##c
next
former
november
room
party
located
december
2013
age
got
2007
##g
system
let
love
2006
though
every
2014
look
song
water
century
without
body
black
night
within
great
women
single
ve
building
large
population
river
named
band
white
started
##an
once
15
20
should
18
2015
service
top
built
british
open
death
king
moved
local
times
children
february
book
why
11
door
need
president
order
final
road
wasn
although
due
major
died
village
third
knew
2016
asked
turned
st
wanted
say
##p
together
received
main
son
served
different
##en
behind
himself
felt
members
power
football
law
voice
play
##in
near
park
history
30
having
2005
16
##man
saw
mother
##al
army
point
front
help
english
street
art
late
hands
games
award
##ia
young
14
put
published
country
division
across
told
13
often
ever
french
london
center
six
red
2017
led
days
include
light
25
find
tell
among
species
really
according
central
half
2004
form
original
gave
office
making
enough
lost
full
opened
must
included
live
given
german
player
run
business
woman
community
cup
might
million
land
2000
court
development
17
short
round
ii
km
seen
class
story
always
become
sure
research
almost
director
council
la
##2
career
things
using
island
##z
couldn
car
##is
24
close
force
##1
better
free
support
control
field
students
2003
education
married
##b
nothing
worked
others
record
big
inside
level
anything
continued
give
james
##3
military
established
non
returned
feel
does
title
written
thing
feet
william
far
co
association
hard
already
2002
##ra
championship
human
western
100
##na
department
hall
role
various
production
21
19
heart
2001
living
fire
version
##ers
##f
television
royal
##4
produced
working
act
case
society
region
present
radio
period
looking
least
total
keep
england
wife
program
per
brother
mind
special
22
##le
am
works
soon
##6
political
george
services
taken
created
##7
further
able
reached
david
union
joined
upon
done
important
social
information
either
##ic
##x
appeared
position
ground
lead
rock
dark
election
23
board
france
hair
course
arms
site
police
girl
instead
real
sound
##v
words
moment
##te
someone
##8
summer
project
announced
san
less
wrote
past
followed
##5
blue
founded
al
finally
india
taking
records
america
##ne
1999
design
considered
northern
god
stop
battle
toward
european
outside
described
track
today
playing
language
28
call
26
heard
professional
low
australia
miles
california
win
yet
green
##ie
trying
blood
##ton
southern
science
maybe
everything
match
square
27
mouth
video
race
recorded
leave
above
##9
daughter
points
space
1998
museum
change
middle
common
##0
move
tv
post
##ta
lake
seven
tried
elected
closed
ten
paul
minister
##th
months
start
chief
return
canada
person
sea
release
similar
modern
brought
rest
hit
formed
mr
##la
1997
floor
event
doing
thomas
1996
robert
care
killed
training
star
week
needed
turn
finished
railway
rather
news
health
sent
example
ran
term
michael
coming
currently
yes
forces
despite
gold
areas
50
stage
fact
29
dead
says
popular
2018
originally
germany
probably
developed
result
pulled
friend
stood
money
running
mi
signed
word
songs
child
eventually
met
tour
average
teams
minutes
festival
current
deep
kind
1995
decided
usually
eastern
seemed
##ness
episode
bed
added
table
indian
private
charles
route
available
idea
throughout
centre
addition
appointed
style
1994
books
eight
construction
press
mean
wall
friends
remained
schools
study
##ch
##um
institute
oh
chinese
sometimes
events
possible
1992
australian
type
brown
forward
talk
process
food
debut
seat
performance
committee
features
character
arts
herself
else
lot
strong
russian
range
hours
peter
arm
##da
morning
dr
sold
##ry
quickly
directed
1993
guitar
china
##w
31
list
##ma
performed
media
uk
players
smile
##rs
myself
40
placed
coach
province
towards
wouldn
leading
whole
boy
official
designed
grand
census
##el
europe
attack
japanese
henry
1991
##re
##os
cross
getting
alone
action
lower
network
wide
washington
japan
1990
hospital
believe
changed
sister
##ar
hold
gone
sir
hadn
ship
##ka
studies
academy
shot
rights
below
base
bad
involved
kept
largest
##ist
bank
future
especially
beginning
mark
movement
section
female
magazine
plan
professor
lord
longer
##ian
sat
walked
hill
actually
civil
energy
model
families
size
thus
aircraft
completed
includes
data
captain
##or
fight
vocals
featured
richard
bridge
fourth
1989
officer
stone
hear
##ism
means
medical
groups
management
self
lips
competition
entire
lived
technology
leaving
federal
tournament
bit
passed
hot
independent
awards
kingdom
mary
spent
fine
doesn
reported
##ling
jack
fall
raised
itself
stay
true
studio
1988
sports
replaced
paris
systems
saint
leader
theatre
whose
market
capital
parents
spanish
canadian
earth
##ity
cut
degree
writing
bay
christian
awarded
natural
higher
bill
##as
coast
provided
previous
senior
ft
valley
organization
stopped
onto
countries
parts
conference
queen
security
interest
saying
allowed
master
earlier
phone
matter
smith
winning
try
happened
moving
campaign
los
##ley
breath
nearly
mid
1987
certain
girls
date
italian
african
standing
fell
artist
##ted
shows
deal
mine
industry
1986
##ng
everyone
republic
provide
collection
library
student
##ville
primary
owned
older
via
heavy
1st
makes
##able
attention
anyone
africa
##ri
stated
length
ended
fingers
command
staff
skin
foreign
opening
governor
okay
medal
kill
sun
cover
job
1985
introduced
chest
hell
feeling
##ies
success
meet
reason
standard
meeting
novel
1984
trade
source
buildings
##land
rose
guy
goal
##ur
chapter
native
husband
previously
unit
limited
entered
weeks
producer
operations
mountain
takes
covered
forced
related
roman
complete
successful
key
texas
cold
##ya
channel
1980
traditional
films
dance
clear
approximately
500
nine
van
prince
question
active
tracks
ireland
regional
silver
author
personal
sense
operation
##ine
economic
1983
holding
twenty
isbn
additional
speed
hour
edition
regular
historic
places
whom
shook
movie
km²
secretary
prior
report
chicago
read
foundation
view
engine
scored
1982
units
ask
airport
property
ready
immediately
lady
month
listed
contract
##de
manager
themselves
lines
##ki
navy
writer
meant
##ts
runs
##ro
practice
championships
singer
glass
commission
required
forest
starting
culture
generally
giving
access
attended
test
couple
stand
catholic
martin
caught
executive
##less
eye
##ey
thinking
chair
quite
shoulder
1979
hope
decision
plays
defeated
municipality
whether
structure
offered
slowly
pain
ice
direction
##ion
paper
mission
1981
mostly
200
noted
individual
managed
nature
lives
plant
##ha
helped
except
studied
computer
figure
relationship
issue
significant
loss
die
smiled
gun
ago
highest
1972
##am
male
bring
goals
mexico
problem
distance
commercial
completely
location
annual
famous
drive
1976
neck
1978
surface
caused
italy
understand
greek
highway
wrong
hotel
comes
appearance
joseph
double
issues
musical
companies
castle
income
review
assembly
bass
initially
parliament
artists
experience
1974
particular
walk
foot
engineering
talking
window
dropped
##ter
miss
baby
boys
break
1975
stars
edge
remember
policy
carried
train
stadium
bar
sex
angeles
evidence
##ge
becoming
assistant
soviet
1977
upper
step
wing
1970
youth
financial
reach
##ll
actor
numerous
##se
##st
nodded
arrived
##ation
minute
##nt
believed
sorry
complex
beautiful
victory
associated
temple
1968
1973
chance
perhaps
metal
##son
1945
bishop
##et
lee
launched
particularly
tree
le
retired
subject
prize
contains
yeah
theory
empire
##ce
suddenly
waiting
trust
recording
##to
happy
terms
camp
champion
1971
religious
pass
zealand
names
2nd
port
ancient
tom
corner
represented
watch
legal
anti
justice
cause
watched
brothers
45
material
changes
simply
response
louis
fast
##ting
answer
60
historical
1969
stories
straight
create
feature
increased
rate
administration
virginia
el
activities
cultural
overall
winner
programs
basketball
legs
guard
beyond
cast
doctor
mm
flight
results
remains
cost
effect
winter
##ble
larger
islands
problems
chairman
grew
commander
isn
1967
pay
failed
selected
hurt
fort
box
regiment
majority
journal
35
edward
plans
##ke
##ni
shown
pretty
irish
characters
directly
scene
likely
operated
allow
spring
##j
junior
matches
looks
mike
houses
fellow
##tion
beach
marriage
##ham
##ive
rules
oil
65
florida
expected
nearby
congress
sam
peace
recent
iii
wait
subsequently
cell
##do
variety
serving
agreed
please
poor
joe
pacific
attempt
wood
democratic
piece
prime
##ca
rural
mile
touch
appears
township
1964
1966
soldiers
##men
##ized
1965
pennsylvania
closer
fighting
claimed
score
jones
physical
editor
##ous
filled
genus
specific
sitting
super
mom
##va
therefore
supported
status
fear
cases
store
meaning
wales
minor
spain
tower
focus
vice
frank
follow
parish
separate
golden
horse
fifth
remaining
branch
32
presented
stared
##id
uses
secret
forms
##co
baseball
exactly
##ck
choice
note
discovered
travel
composed
truth
russia
ball
color
kiss
dad
wind
continue
ring
referred
numbers
digital
greater
##ns
metres
slightly
direct
increase
1960
responsible
crew
rule
trees
troops
##no
broke
goes
individuals
hundred
weight
creek
sleep
memory
defense
provides
ordered
code
value
jewish
windows
1944
safe
judge
whatever
corps
realized
growing
pre
##ga
cities
alexander
gaze
lies
spread
scott
letter
showed
situation
mayor
transport
watching
workers
extended
##li
expression
normal
##ment
chart
multiple
border
##ba
host
##ner
daily
mrs
walls
piano
##ko
heat
cannot
##ate
earned
products
drama
era
authority
seasons
join
grade
##io
sign
difficult
machine
1963
territory
mainly
##wood
stations
squadron
1962
stepped
iron
19th
##led
serve
appear
sky
speak
broken
charge
knowledge
kilometres
removed
ships
article
campus
simple
##ty
pushed
britain
##ve
leaves
recently
cd
soft
boston
latter
easy
acquired
poland
##sa
quality
officers
presence
planned
nations
mass
broadcast
jean
share
image
influence
wild
offer
emperor
electric
reading
headed
ability
promoted
yellow
ministry
1942
throat
smaller
politician
##by
latin
spoke
cars
williams
males
lack
pop
80
##ier
acting
seeing
consists
##ti
estate
1961
pressure
johnson
newspaper
jr
chris
olympics
online
conditions
beat
elements
walking
vote
##field
needs
carolina
text
featuring
global
block
shirt
levels
francisco
purpose
females
et
dutch
duke
ahead
gas
twice
safety
serious
turning
highly
lieutenant
firm
maria
amount
mixed
daniel
proposed
perfect
agreement
affairs
3rd
seconds
contemporary
paid
1943
prison
save
kitchen
label
administrative
intended
constructed
academic
nice
teacher
races
1956
formerly
corporation
ben
nation
issued
shut
1958
drums
housing
victoria
seems
opera
1959
graduated
function
von
mentioned
picked
build
recognized
shortly
protection
picture
notable
exchange
elections
1980s
loved
percent
racing
fish
elizabeth
garden
volume
hockey
1941
beside
settled
##ford
1940
competed
replied
drew
1948
actress
marine
scotland
steel
glanced
farm
steve
1957
risk
tonight
positive
magic
singles
effects
gray
screen
dog
##ja
residents
bus
sides
none
secondary
literature
polish
destroyed
flying
founder
households
1939
lay
reserve
usa
gallery
##ler
1946
industrial
younger
approach
appearances
urban
ones
1950
finish
avenue
powerful
fully
growth
page
honor
jersey
projects
advanced
revealed
basic
90
infantry
pair
equipment
visit
33
evening
search
grant
effort
solo
treatment
buried
republican
primarily
bottom
owner
1970s
israel
gives
jim
dream
bob
remain
spot
70
notes
produce
champions
contact
ed
soul
accepted
ways
del
##ally
losing
split
price
capacity
basis
trial
questions
##ina
1955
20th
guess
officially
memorial
naval
initial
##ization
whispered
median
engineer
##ful
sydney
##go
columbia
strength
300
1952
tears
senate
00
card
asian
agent
1947
software
44
draw
warm
supposed
com
pro
##il
transferred
leaned
##at
candidate
escape
mountains
asia
potential
activity
entertainment
seem
traffic
jackson
murder
36
slow
product
orchestra
haven
agency
bbc
taught
website
comedy
unable
storm
planning
albums
rugby
environment
scientific
grabbed
protect
##hi
boat
typically
1954
1953
damage
principal
divided
dedicated
mount
ohio
##berg
pick
fought
driver
##der
empty
shoulders
sort
thank
berlin
prominent
account
freedom
necessary
efforts
alex
headquarters
follows
alongside
des
simon
andrew
suggested
operating
learning
steps
1949
sweet
technical
begin
easily
34
teeth
speaking
settlement
scale
##sh
renamed
ray
max
enemy
semi
joint
compared
##rd
scottish
leadership
analysis
offers
georgia
pieces
captured
animal
deputy
guest
organized
##lin
tony
combined
method
challenge
1960s
huge
wants
battalion
sons
rise
crime
types
facilities
telling
path
1951
platform
sit
1990s
##lo
tells
assigned
rich
pull
##ot
commonly
alive
##za
letters
concept
conducted
wearing
happen
bought
becomes
holy
gets
ocean
defeat
languages
purchased
coffee
occurred
titled
##q
declared
applied
sciences
concert
sounds
jazz
brain
##me
painting
fleet
tax
nick
##ius
michigan
count
animals
leaders
episodes
##line
content
##den
birth
##it
clubs
64
palace
critical
refused
fair
leg
laughed
returning
surrounding
participated
formation
lifted
pointed
connected
rome
medicine
laid
taylor
santa
powers
adam
tall
shared
focused
knowing
yards
entrance
falls
##wa
calling
##ad
sources
chosen
beneath
resources
yard
##ite
nominated
silence
zone
defined
##que
gained
thirty
38
bodies
moon
##ard
adopted
christmas
widely
register
apart
iran
premier
serves
du
unknown
parties
##les
generation
##ff
continues
quick
fields
brigade
quiet
teaching
clothes
impact
weapons
partner
flat
theater
supreme
1938
37
relations
##tor
plants
suffered
1936
wilson
kids
begins
##age
1918
seats
armed
internet
models
worth
laws
400
communities
classes
background
knows
thanks
quarter
reaching
humans
carry
killing
format
kong
hong
setting
75
architecture
disease
railroad
inc
possibly
wish
arthur
thoughts
harry
doors
density
##di
crowd
illinois
stomach
tone
unique
reports
anyway
##ir
liberal
der
vehicle
thick
dry
drug
faced
largely
facility
theme
holds
creation
strange
colonel
##mi
revolution
bell
politics
turns
silent
rail
relief
independence
combat
shape
write
determined
sales
learned
4th
finger
oxford
providing
1937
heritage
fiction
situated
designated
allowing
distribution
hosted
##est
sight
interview
estimated
reduced
##ria
toronto
footballer
keeping
guys
damn
claim
motion
sport
sixth
stayed
##ze
en
rear
receive
handed
twelve
dress
audience
granted
brazil
##well
spirit
##ated
noticed
etc
olympic
representative
eric
tight
trouble
reviews
drink
vampire
missing
roles
ranked
newly
household
finals
wave
critics
##ee
phase
massachusetts
pilot
unlike
philadelphia
bright
guns
crown
organizations
roof
42
respectively
clearly
tongue
marked
circle
fox
korea
bronze
brian
expanded
sexual
supply
yourself
inspired
labour
fc
##ah
reference
vision
draft
connection
brand
reasons
1935
classic
driving
trip
jesus
cells
entry
1920
neither
trail
claims
atlantic
orders
labor
nose
afraid
identified
intelligence
calls
cancer
attacked
passing
stephen
positions
imperial
grey
jason
39
sunday
48
swedish
avoid
extra
uncle
message
covers
allows
surprise
materials
fame
hunter
##ji
1930
citizens
figures
davis
environmental
confirmed
shit
titles
di
performing
difference
acts
attacks
##ov
existing
votes
opportunity
nor
shop
entirely
trains
opposite
pakistan
##pa
develop
resulted
representatives
actions
reality
pressed
##ish
barely
wine
conversation
faculty
northwest
ends
documentary
nuclear
stock
grace
sets
eat
alternative
##ps
bag
resulting
creating
surprised
cemetery
1919
drop
finding
sarah
cricket
streets
tradition
ride
1933
exhibition
target
ear
explained
rain
composer
injury
apartment
municipal
educational
occupied
netherlands
clean
billion
constitution
learn
1914
maximum
classical
francis
lose
opposition
jose
ontario
bear
core
hills
rolled
ending
drawn
permanent
fun
##tes
##lla
lewis
sites
chamber
ryan
##way
scoring
height
1934
##house
lyrics
staring
55
officials
1917
snow
oldest
##tic
orange
##ger
qualified
interior
apparently
succeeded
thousand
dinner
lights
existence
fans
heavily
41
greatest
conservative
send
bowl
plus
enter
catch
##un
economy
duty
1929
speech
authorities
princess
performances
versions
shall
graduate
pictures
effective
remembered
poetry
desk
crossed
starring
starts
passenger
sharp
##ant
acres
ass
weather
falling
rank
fund
supporting
check
adult
publishing
heads
cm
southeast
lane
##burg
application
bc
##ura
les
condition
transfer
prevent
display
ex
regions
earl
federation
cool
relatively
answered
besides
1928
obtained
portion
##town
mix
##ding
reaction
liked
dean
express
peak
1932
##tte
counter
religion
chain
rare
miller
convention
aid
lie
vehicles
mobile
perform
squad
wonder
lying
crazy
sword
##ping
attempted
centuries
weren
philosophy
category
##ize
anna
interested
47
sweden
wolf
frequently
abandoned
kg
literary
alliance
task
entitled
##ay
threw
promotion
factory
tiny
soccer
visited
matt
fm
achieved
52
defence
internal
persian
43
methods
##ging
arrested
otherwise
cambridge
programming
villages
elementary
districts
rooms
criminal
conflict
worry
trained
1931
attempts
waited
signal
bird
truck
subsequent
programme
##ol
ad
49
communist
details
faith
sector
patrick
carrying
laugh
##ss
controlled
korean
showing
origin
fuel
evil
1927
##ent
brief
identity
darkness
address
pool
missed
publication
web
planet
ian
anne
wings
invited
##tt
briefly
standards
kissed
##be
ideas
climate
causing
walter
worse
albert
articles
winners
desire
aged
northeast
dangerous
gate
doubt
1922
wooden
multi
##ky
poet
rising
funding
46
communications
communication
violence
copies
prepared
ford
investigation
skills
1924
pulling
electronic
##ak
##ial
##han
containing
ultimately
offices
singing
understanding
restaurant
tomorrow
fashion
christ
ward
da
pope
stands
5th
flow
studios
aired
commissioned
contained
exist
fresh
americans
##per
wrestling
approved
kid
employed
respect
suit
1925
angel
asking
increasing
frame
angry
selling
1950s
thin
finds
##nd
temperature
statement
ali
explain
inhabitants
towns
extensive
narrow
51
jane
flowers
images
promise
somewhere
object
fly
closely
##ls
1912
bureau
cape
1926
weekly
presidential
legislative
1921
##ai
##au
launch
founding
##ny
978
##ring
artillery
strike
un
institutions
roll
writers
landing
chose
kevin
anymore
pp
##ut
attorney
fit
dan
billboard
receiving
agricultural
breaking
sought
dave
admitted
lands
mexican
##bury
charlie
specifically
hole
iv
howard
credit
moscow
roads
accident
1923
proved
wear
struck
hey
guards
stuff
slid
expansion
1915
cat
anthony
##kin
melbourne
opposed
sub
southwest
architect
failure
plane
1916
##ron
map
camera
tank
listen
regarding
wet
introduction
metropolitan
link
ep
fighter
inch
grown
gene
anger
fixed
buy
dvd
khan
domestic
worldwide
chapel
mill
functions
examples
##head
developing
1910
turkey
hits
pocket
antonio
papers
grow
unless
circuit
18th
concerned
attached
journalist
selection
journey
converted
provincial
painted
hearing
aren
bands
negative
aside
wondered
knight
lap
survey
ma
##ow
noise
billy
##ium
shooting
guide
bedroom
priest
resistance
motor
homes
sounded
giant
##mer
150
scenes
equal
comic
patients
hidden
solid
actual
bringing
afternoon
touched
funds
wedding
consisted
marie
canal
sr
kim
treaty
turkish
recognition
residence
cathedral
broad
knees
incident
shaped
fired
norwegian
handle
cheek
contest
represent
##pe
representing
beauty
##sen
birds
advantage
emergency
wrapped
drawing
notice
pink
broadcasting
##ong
somehow
bachelor
seventh
collected
registered
establishment
alan
assumed
chemical
personnel
roger
retirement
jeff
portuguese
wore
tied
device
threat
progress
advance
##ised
banks
hired
manchester
nfl
teachers
structures
forever
##bo
tennis
helping
saturday
sale
applications
junction
hip
incorporated
neighborhood
dressed
ceremony
##ds
influenced
hers
visual
stairs
decades
inner
kansas
hung
hoped
gain
scheduled
downtown
engaged
austria
clock
norway
certainly
pale
protected
1913
victor
employees
plate
putting
surrounded
##ists
finishing
blues
tropical
##ries
minnesota
consider
philippines
accept
54
retrieved
1900
concern
anderson
properties
institution
gordon
successfully
vietnam
##dy
backing
outstanding
muslim
crossing
folk
producing
usual
demand
occurs
observed
lawyer
educated
##ana
kelly
string
pleasure
budget
items
quietly
colorado
philip
typical
##worth
derived
600
survived
asks
mental
##ide
56
jake
jews
distinguished
ltd
1911
sri
extremely
53
athletic
loud
thousands
worried
shadow
transportation
horses
weapon
arena
importance
users
tim
objects
contributed
dragon
douglas
aware
senator
johnny
jordan
sisters
engines
flag
investment
samuel
shock
capable
clark
row
wheel
refers
session
familiar
biggest
wins
hate
maintained
drove
hamilton
request
expressed
injured
underground
churches
walker
wars
tunnel
passes
stupid
agriculture
softly
cabinet
regarded
joining
indiana
##ea
##ms
push
dates
spend
behavior
woods
protein
gently
chase
morgan
mention
burning
wake
combination
occur
mirror
leads
jimmy
indeed
impossible
singapore
paintings
covering
##nes
soldier
locations
attendance
sell
historian
wisconsin
invasion
argued
painter
diego
changing
egypt
##don
experienced
inches
##ku
missouri
vol
grounds
spoken
switzerland
##gan
reform
rolling
ha
forget
massive
resigned
burned
allen
tennessee
locked
values
improved
##mo
wounded
universe
sick
dating
facing
pack
purchase
user
##pur
moments
##ul
merged
anniversary
1908
coal
brick
understood
causes
dynasty
queensland
establish
stores
crisis
promote
hoping
views
cards
referee
extension
##si
raise
arizona
improve
colonial
formal
charged
##rt
palm
lucky
hide
rescue
faces
95
feelings
candidates
juan
##ell
goods
6th
courses
weekend
59
luke
cash
fallen
##om
delivered
affected
installed
carefully
tries
swiss
hollywood
costs
lincoln
responsibility
##he
shore
file
proper
normally
maryland
assistance
jump
constant
offering
friendly
waters
persons
realize
contain
trophy
800
partnership
factor
58
musicians
cry
bound
oregon
indicated
hero
houston
medium
##ure
consisting
somewhat
##ara
57
cycle
##che
beer
moore
frederick
gotten
eleven
worst
weak
approached
arranged
chin
loan
universal
bond
fifteen
pattern
disappeared
##ney
translated
##zed
lip
arab
capture
interests
insurance
##chi
shifted
cave
prix
warning
sections
courts
coat
plot
smell
feed
golf
favorite
maintain
knife
vs
voted
degrees
finance
quebec
opinion
translation
manner
ruled
operate
productions
choose
musician
discovery
confused
tired
separated
stream
techniques
committed
attend
ranking
kings
throw
passengers
measure
horror
fan
mining
sand
danger
salt
calm
decade
dam
require
runner
##ik
rush
associate
greece
##ker
rivers
consecutive
matthew
##ski
sighed
sq
documents
steam
edited
closing
tie
accused
1905
##ini
islamic
distributed
directors
organisation
bruce
7th
breathing
mad
lit
arrival
concrete
taste
08
composition
shaking
faster
amateur
adjacent
stating
1906
twin
flew
##ran
tokyo
publications
##tone
obviously
ridge
storage
1907
carl
pages
concluded
desert
driven
universities
ages
terminal
sequence
borough
250
constituency
creative
cousin
economics
dreams
margaret
notably
reduce
montreal
mode
17th
ears
saved
jan
vocal
##ica
1909
andy
##jo
riding
roughly
threatened
##ise
meters
meanwhile
landed
compete
repeated
grass
czech
regularly
charges
tea
sudden
appeal
##ung
solution
describes
pierre
classification
glad
parking
##ning
belt
physics
99
rachel
add
hungarian
participate
expedition
damaged
gift
childhood
85
fifty
##red
mathematics
jumped
letting
defensive
mph
##ux
##gh
testing
##hip
hundreds
shoot
owners
matters
smoke
israeli
kentucky
dancing
mounted
grandfather
emma
designs
profit
argentina
##gs
truly
li
lawrence
cole
begun
detroit
willing
branches
smiling
decide
miami
enjoyed
recordings
##dale
poverty
ethnic
gay
##bi
gary
arabic
09
accompanied
##one
##ons
fishing
determine
residential
acid
##ary
alice
returns
starred
mail
##ang
jonathan
strategy
##ue
net
forty
cook
businesses
equivalent
commonwealth
distinct
ill
##cy
seriously
##ors
##ped
shift
harris
replace
rio
imagine
formula
ensure
##ber
additionally
scheme
conservation
occasionally
purposes
feels
favor
##and
##ore
1930s
contrast
hanging
hunt
movies
1904
instruments
victims
danish
christopher
busy
demon
sugar
earliest
colony
studying
balance
duties
##ks
belgium
slipped
carter
05
visible
stages
iraq
fifa
##im
commune
forming
zero
07
continuing
talked
counties
legend
bathroom
option
tail
clay
daughters
afterwards
severe
jaw
visitors
##ded
devices
aviation
russell
kate
##vi
entering
subjects
##ino
temporary
swimming
forth
smooth
ghost
audio
bush
operates
rocks
movements
signs
eddie
##tz
ann
voices
honorary
06
memories
dallas
pure
measures
racial
promised
66
harvard
ceo
16th
parliamentary
indicate
benefit
flesh
dublin
louisiana
1902
1901
patient
sleeping
1903
membership
coastal
medieval
wanting
element
scholars
rice
62
limit
survive
makeup
rating
definitely
collaboration
obvious
##tan
boss
ms
baron
birthday
linked
soil
diocese
##lan
ncaa
##mann
offensive
shell
shouldn
waist
##tus
plain
ross
organ
resolution
manufacturing
adding
relative
kennedy
98
whilst
moth
marketing
gardens
crash
72
heading
partners
credited
carlos
moves
cable
##zi
marshall
##out
depending
bottle
represents
rejected
responded
existed
04
jobs
denmark
lock
##ating
treated
graham
routes
talent
commissioner
drugs
secure
tests
reign
restored
photography
##gi
contributions
oklahoma
designer
disc
grin
seattle
robin
paused
atlanta
unusual
##gate
praised
las
laughing
satellite
hungary
visiting
##sky
interesting
factors
deck
poems
norman
##water
stuck
speaker
rifle
domain
premiered
##her
dc
comics
actors
01
reputation
eliminated
8th
ceiling
prisoners
script
##nce
leather
austin
mississippi
rapidly
admiral
parallel
charlotte
guilty
tools
gender
divisions
fruit
##bs
laboratory
nelson
fantasy
marry
rapid
aunt
tribe
requirements
aspects
suicide
amongst
adams
bone
ukraine
abc
kick
sees
edinburgh
clothing
column
rough
gods
hunting
broadway
gathered
concerns
##ek
spending
ty
12th
snapped
requires
solar
bones
cavalry
##tta
iowa
drinking
waste
index
franklin
charity
thompson
stewart
tip
flash
landscape
friday
enjoy
singh
poem
listening
##back
eighth
fred
differences
adapted
bomb
ukrainian
surgery
corporate
masters
anywhere
##more
waves
odd
sean
portugal
orleans
dick
debate
kent
eating
puerto
cleared
96
expect
cinema
97
guitarist
blocks
electrical
agree
involving
depth
dying
panel
struggle
##ged
peninsula
adults
novels
emerged
vienna
metro
debuted
shoes
tamil
songwriter
meets
prove
beating
instance
heaven
scared
sending
marks
artistic
passage
superior
03
significantly
shopping
##tive
retained
##izing
malaysia
technique
cheeks
##ola
warren
maintenance
destroy
extreme
allied
120
appearing
##yn
fill
advice
alabama
qualifying
policies
cleveland
hat
battery
smart
authors
10th
soundtrack
acted
dated
lb
glance
equipped
coalition
funny
outer
ambassador
roy
possibility
couples
campbell
dna
loose
ethan
supplies
1898
gonna
88
monster
##res
shake
agents
frequency
springs
dogs
practices
61
gang
plastic
easier
suggests
gulf
blade
exposed
colors
industries
markets
pan
nervous
electoral
charts
legislation
ownership
##idae
mac
appointment
shield
copy
assault
socialist
abbey
monument
license
throne
employment
jay
93
replacement
charter
cloud
powered
suffering
accounts
oak
connecticut
strongly
wright
colour
crystal
13th
context
welsh
networks
voiced
gabriel
jerry
##cing
forehead
mp
##ens
manage
schedule
totally
remix
##ii
forests
occupation
print
nicholas
brazilian
strategic
vampires
engineers
76
roots
seek
correct
instrumental
und
alfred
backed
hop
##des
stanley
robinson
traveled
wayne
welcome
austrian
achieve
67
exit
rates
1899
strip
whereas
##cs
sing
deeply
adventure
bobby
rick
jamie
careful
components
cap
useful
personality
knee
##shi
pushing
hosts
02
protest
ca
ottoman
symphony
##sis
63
boundary
1890
processes
considering
considerable
tons
##work
##ft
##nia
cooper
trading
dear
conduct
91
illegal
apple
revolutionary
holiday
definition
harder
##van
jacob
circumstances
destruction
##lle
popularity
grip
classified
liverpool
donald
baltimore
flows
seeking
honour
approval
92
mechanical
till
happening
statue
critic
increasingly
immediate
describe
commerce
stare
##ster
indonesia
meat
rounds
boats
baker
orthodox
depression
formally
worn
naked
claire
muttered
sentence
11th
emily
document
77
criticism
wished
vessel
spiritual
bent
virgin
parker
minimum
murray
lunch
danny
printed
compilation
keyboards
false
blow
belonged
68
raising
78
cutting
##board
pittsburgh
##up
9th
shadows
81
hated
indigenous
jon
15th
barry
scholar
ah
##zer
oliver
##gy
stick
susan
meetings
attracted
spell
romantic
##ver
ye
1895
photo
demanded
customers
##ac
1896
logan
revival
keys
modified
commanded
jeans
##ious
upset
raw
phil
detective
hiding
resident
vincent
##bly
experiences
diamond
defeating
coverage
lucas
external
parks
franchise
helen
bible
successor
percussion
celebrated
il
lift
profile
clan
romania
##ied
mills
##su
nobody
achievement
shrugged
fault
1897
rhythm
initiative
breakfast
carbon
700
69
lasted
violent
74
wound
ken
killer
gradually
filmed
°c
dollars
processing
94
remove
criticized
guests
sang
chemistry
##vin
legislature
disney
##bridge
uniform
escaped
integrated
proposal
purple
denied
liquid
karl
influential
morris
nights
stones
intense
experimental
twisted
71
84
##ld
pace
nazi
mitchell
ny
blind
reporter
newspapers
14th
centers
burn
basin
forgotten
surviving
filed
collections
monastery
losses
manual
couch
description
appropriate
merely
tag
missions
sebastian
restoration
replacing
triple
73
elder
julia
warriors
benjamin
julian
convinced
stronger
amazing
declined
versus
merchant
happens
output
finland
bare
barbara
absence
ignored
dawn
injuries
##port
producers
##ram
82
luis
##ities
kw
admit
expensive
electricity
nba
exception
symbol
##ving
ladies
shower
sheriff
characteristics
##je
aimed
button
ratio
effectively
summit
angle
jury
bears
foster
vessels
pants
executed
evans
dozen
advertising
kicked
patrol
1889
competitions
lifetime
principles
athletics
##logy
birmingham
sponsored
89
rob
nomination
1893
acoustic
##sm
creature
longest
##tra
credits
harbor
dust
josh
##so
territories
milk
infrastructure
completion
thailand
indians
leon
archbishop
##sy
assist
pitch
blake
arrangement
girlfriend
serbian
operational
hence
sad
scent
fur
dj
sessions
hp
refer
rarely
##ora
exists
1892
##ten
scientists
dirty
penalty
burst
portrait
seed
79
pole
limits
rival
1894
stable
alpha
grave
constitutional
alcohol
arrest
flower
mystery
devil
architectural
relationships
greatly
habitat
##istic
larry
progressive
remote
cotton
##ics
##ok
preserved
reaches
##ming
cited
86
vast
scholarship
decisions
cbs
joy
teach
1885
editions
knocked
eve
searching
partly
participation
gap
animated
fate
excellent
##ett
na
87
alternate
saints
youngest
##ily
climbed
##ita
##tors
suggest
##ct
discussion
staying
choir
lakes
jacket
revenue
nevertheless
peaked
instrument
wondering
annually
managing
neil
1891
signing
terry
##ice
apply
clinical
brooklyn
aim
catherine
fuck
farmers
figured
ninth
pride
hugh
evolution
ordinary
involvement
comfortable
shouted
tech
encouraged
taiwan
representation
sharing
##lia
##em
panic
exact
cargo
competing
fat
cried
83
1920s
occasions
pa
cabin
borders
utah
marcus
##isation
badly
muscles
##ance
victorian
transition
warner
bet
permission
##rin
slave
terrible
similarly
shares
seth
uefa
possession
medals
benefits
colleges
lowered
perfectly
mall
transit
##ye
##kar
publisher
##ened
harrison
deaths
elevation
##ae
asleep
machines
sigh
ash
hardly
argument
occasion
parent
leo
decline
1888
contribution
##ua
concentration
1000
opportunities
hispanic
guardian
extent
emotions
hips
mason
volumes
bloody
controversy
diameter
steady
mistake
phoenix
identify
violin
##sk
departure
richmond
spin
funeral
enemies
1864
gear
literally
connor
random
sergeant
grab
confusion
1865
transmission
informed
op
leaning
sacred
suspended
thinks
gates
portland
luck
agencies
yours
hull
expert
muscle
layer
practical
sculpture
jerusalem
latest
lloyd
statistics
deeper
recommended
warrior
arkansas
mess
supports
greg
eagle
1880
recovered
rated
concerts
rushed
##ano
stops
eggs
files
premiere
keith
##vo
delhi
turner
pit
affair
belief
paint
##zing
mate
##ach
##ev
victim
##ology
withdrew
bonus
styles
fled
##ud
glasgow
technologies
funded
nbc
adaptation
##ata
portrayed
cooperation
supporters
judges
bernard
justin
hallway
ralph
##ick
graduating
controversial
distant
continental
spider
bite
##ho
recognize
intention
mixing
##ese
egyptian
bow
tourism
suppose
claiming
tiger
dominated
participants
vi
##ru
nurse
partially
tape
##rum
psychology
##rn
essential
touring
duo
voting
civilian
emotional
channels
##king
apparent
hebrew
1887
tommy
carrier
intersection
beast
hudson
##gar
##zo
lab
nova
bench
discuss
costa
##ered
detailed
behalf
drivers
unfortunately
obtain
##lis
rocky
##dae
siege
friendship
honey
##rian
1861
amy
hang
posted
governments
collins
respond
wildlife
preferred
operator
##po
laura
pregnant
videos
dennis
suspected
boots
instantly
weird
automatic
businessman
alleged
placing
throwing
ph
mood
1862
perry
venue
jet
remainder
##lli
##ci
passion
biological
boyfriend
1863
dirt
buffalo
ron
segment
fa
abuse
##era
genre
thrown
stroke
colored
stress
exercise
displayed
##gen
struggled
##tti
abroad
dramatic
wonderful
thereafter
madrid
component
widespread
##sed
tale
citizen
todd
monday
1886
vancouver
overseas
forcing
crying
descent
##ris
discussed
substantial
ranks
regime
1870
provinces
switch
drum
zane
ted
tribes
proof
lp
cream
researchers
volunteer
manor
silk
milan
donated
allies
venture
principle
delivery
enterprise
##ves
##ans
bars
traditionally
witch
reminded
copper
##uk
pete
inter
links
colin
grinned
elsewhere
competitive
frequent
##oy
scream
##hu
tension
texts
submarine
finnish
defending
defend
pat
detail
1884
affiliated
stuart
themes
villa
periods
tool
belgian
ruling
crimes
answers
folded
licensed
resort
demolished
hans
lucy
1881
lion
traded
photographs
writes
craig
##fa
trials
generated
beth
noble
debt
percentage
yorkshire
erected
ss
viewed
grades
confidence
ceased
islam
telephone
retail
##ible
chile
m²
roberts
sixteen
##ich
commented
hampshire
innocent
dual
pounds
checked
regulations
afghanistan
sung
rico
liberty
assets
bigger
options
angels
relegated
tribute
wells
attending
leaf
##yan
butler
romanian
forum
monthly
lisa
patterns
gmina
##tory
madison
hurricane
rev
##ians
bristol
##ula
elite
valuable
disaster
democracy
awareness
germans
freyja
##ins
loop
absolutely
paying
populations
maine
sole
prayer
spencer
releases
doorway
bull
##ani
lover
midnight
conclusion
##sson
thirteen
lily
mediterranean
##lt
nhl
proud
sample
##hill
drummer
guinea
##ova
murphy
climb
##ston
instant
attributed
horn
ain
railways
steven
##ao
autumn
ferry
opponent
root
traveling
secured
corridor
stretched
tales
sheet
trinity
cattle
helps
indicates
manhattan
murdered
fitted
1882
gentle
grandmother
mines
shocked
vegas
produces
##light
caribbean
##ou
belong
continuous
desperate
drunk
historically
trio
waved
raf
dealing
nathan
bat
murmured
interrupted
residing
scientist
pioneer
harold
aaron
##net
delta
attempting
minority
mini
believes
chorus
tend
lots
eyed
indoor
load
shots
updated
jail
##llo
concerning
connecting
wealth
##ved
slaves
arrive
rangers
sufficient
rebuilt
##wick
cardinal
flood
muhammad
whenever
relation
runners
moral
repair
viewers
arriving
revenge
punk
assisted
bath
fairly
breathe
lists
innings
illustrated
whisper
nearest
voters
clinton
ties
ultimate
screamed
beijing
lions
andre
fictional
gathering
comfort
radar
suitable
dismissed
hms
ban
pine
wrist
atmosphere
voivodeship
bid
timber
##ned
##nan
giants
##ane
cameron
recovery
uss
identical
categories
switched
serbia
laughter
noah
ensemble
therapy
peoples
touching
##off
locally
pearl
platforms
everywhere
ballet
tables
lanka
herbert
outdoor
toured
derek
1883
spaces
contested
swept
1878
exclusive
slight
connections
##dra
winds
prisoner
collective
bangladesh
tube
publicly
wealthy
thai
##ys
isolated
select
##ric
insisted
pen
fortune
ticket
spotted
reportedly
animation
enforcement
tanks
110
decides
wider
lowest
owen
##time
nod
hitting
##hn
gregory
furthermore
magazines
fighters
solutions
##ery
pointing
requested
peru
reed
chancellor
knights
mask
worker
eldest
flames
reduction
1860
volunteers
##tis
reporting
##hl
wire
advisory
endemic
origins
settlers
pursue
knock
consumer
1876
eu
compound
creatures
mansion
sentenced
ivan
deployed
guitars
frowned
involves
mechanism
kilometers
perspective
shops
maps
terminus
duncan
alien
fist
bridges
##pers
heroes
fed
derby
swallowed
##ros
patent
sara
illness
characterized
adventures
slide
hawaii
jurisdiction
##op
organised
##side
adelaide
walks
biology
se
##ties
rogers
swing
tightly
boundaries
##rie
prepare
implementation
stolen
##sha
certified
colombia
edwards
garage
##mm
recalled
##ball
rage
harm
nigeria
breast
##ren
furniture
pupils
settle
##lus
cuba
balls
client
alaska
21st
linear
thrust
celebration
latino
genetic
terror
##cia
##ening
lightning
fee
witness
lodge
establishing
skull
##ique
earning
hood
##ei
rebellion
wang
sporting
warned
missile
devoted
activist
porch
worship
fourteen
package
1871
decorated
##shire
housed
##ock
chess
sailed
doctors
oscar
joan
treat
garcia
harbour
jeremy
##ire
traditions
dominant
jacques
##gon
##wan
relocated
1879
amendment
sized
companion
simultaneously
volleyball
spun
acre
increases
stopping
loves
belongs
affect
drafted
tossed
scout
battles
1875
filming
shoved
munich
tenure
vertical
romance
pc
##cher
argue
##ical
craft
ranging
www
opens
honest
tyler
yesterday
virtual
##let
muslims
reveal
snake
immigrants
radical
screaming
speakers
firing
saving
belonging
ease
lighting
prefecture
blame
farmer
hungry
grows
rubbed
beam
sur
subsidiary
##cha
armenian
sao
dropping
conventional
##fer
microsoft
reply
qualify
spots
1867
sweat
festivals
##ken
immigration
physician
discover
exposure
sandy
explanation
isaac
implemented
##fish
hart
initiated
connect
stakes
presents
heights
householder
pleased
tourist
regardless
slip
closest
##ction
surely
sultan
brings
riley
preparation
aboard
slammed
baptist
experiment
ongoing
interstate
organic
playoffs
##ika
1877
130
##tar
hindu
error
tours
tier
plenty
arrangements
talks
trapped
excited
sank
ho
athens
1872
denver
welfare
suburb
athletes
trick
diverse
belly
exclusively
yelled
1868
##med
conversion
##ette
1874
internationally
computers
conductor
abilities
sensitive
hello
dispute
measured
globe
rocket
prices
amsterdam
flights
tigers
inn
municipalities
emotion
references
3d
##mus
explains
airlines
manufactured
pm
archaeological
1873
interpretation
devon
comment
##ites
settlements
kissing
absolute
improvement
suite
impressed
barcelona
sullivan
jefferson
towers
jesse
julie
##tin
##lu
grandson
hi
gauge
regard
rings
interviews
trace
raymond
thumb
departments
burns
serial
bulgarian
scores
demonstrated
##ix
1866
kyle
alberta
underneath
romanized
##ward
relieved
acquisition
phrase
cliff
reveals
han
cuts
merger
custom
##dar
nee
gilbert
graduation
##nts
assessment
cafe
difficulty
demands
swung
democrat
jennifer
commons
1940s
grove
##yo
completing
focuses
sum
substitute
bearing
stretch
reception
##py
reflected
essentially
destination
pairs
##ched
survival
resource
##bach
promoting
doubles
messages
tear
##down
##fully
parade
florence
harvey
incumbent
partial
framework
900
pedro
frozen
procedure
olivia
controls
##mic
shelter
personally
temperatures
##od
brisbane
tested
sits
marble
comprehensive
oxygen
leonard
##kov
inaugural
iranian
referring
quarters
attitude
##ivity
mainstream
lined
mars
dakota
norfolk
unsuccessful
##°
explosion
helicopter
congressional
##sing
inspector
bitch
seal
departed
divine
##ters
coaching
examination
punishment
manufacturer
sink
columns
unincorporated
signals
nevada
squeezed
dylan
dining
photos
martial
manuel
eighteen
elevator
brushed
plates
ministers
ivy
congregation
##len
slept
specialized
taxes
curve
restricted
negotiations
likes
statistical
arnold
inspiration
execution
bold
intermediate
significance
margin
ruler
wheels
gothic
intellectual
dependent
listened
eligible
buses
widow
syria
earn
cincinnati
collapsed
recipient
secrets
accessible
philippine
maritime
goddess
clerk
surrender
breaks
playoff
database
##ified
##lon
ideal
beetle
aspect
soap
regulation
strings
expand
anglo
shorter
crosses
retreat
tough
coins
wallace
directions
pressing
##oon
shipping
locomotives
comparison
topics
nephew
##mes
distinction
honors
travelled
sierra
ibn
##over
fortress
sa
recognised
carved
1869
clients
##dan
intent
##mar
coaches
describing
bread
##ington
beaten
northwestern
##ona
merit
youtube
collapse
challenges
em
historians
objective
submitted
virus
attacking
drake
assume
##ere
diseases
marc
stem
leeds
##cus
##ab
farming
glasses
##lock
visits
nowhere
fellowship
relevant
carries
restaurants
experiments
101
constantly
bases
targets
shah
tenth
opponents
verse
territorial
##ira
writings
corruption
##hs
instruction
inherited
reverse
emphasis
##vic
employee
arch
keeps
rabbi
watson
payment
uh
##ala
nancy
##tre
venice
fastest
sexy
banned
adrian
properly
ruth
touchdown
dollar
boards
metre
circles
edges
favour
comments
ok
travels
liberation
scattered
firmly
##ular
holland
permitted
diesel
kenya
den
originated
##ral
demons
resumed
dragged
rider
##rus
servant
blinked
extend
torn
##ias
##sey
input
meal
everybody
cylinder
kinds
camps
##fe
bullet
logic
##wn
croatian
evolved
healthy
fool
chocolate
wise
preserve
pradesh
##ess
respective
1850
##ew
chicken
artificial
gross
corresponding
convicted
cage
caroline
dialogue
##dor
narrative
stranger
mario
br
christianity
failing
trent
commanding
buddhist
1848
maurice
focusing
yale
bike
altitude
##ering
mouse
revised
##sley
veteran
##ig
pulls
theology
crashed
campaigns
legion
##ability
drag
excellence
customer
cancelled
intensity
excuse
##lar
liga
participating
contributing
printing
##burn
variable
##rk
curious
bin
legacy
renaissance
##my
symptoms
binding
vocalist
dancer
##nie
grammar
gospel
democrats
ya
enters
sc
diplomatic
hitler
##ser
clouds
mathematical
quit
defended
oriented
##heim
fundamental
hardware
impressive
equally
convince
confederate
guilt
chuck
sliding
##ware
magnetic
narrowed
petersburg
bulgaria
otto
phd
skill
##ama
reader
hopes
pitcher
reservoir
hearts
automatically
expecting
mysterious
bennett
extensively
imagined
seeds
monitor
fix
##ative
journalism
struggling
signature
ranch
encounter
photographer
observation
protests
##pin
influences
##hr
calendar
##all
cruz
croatia
locomotive
hughes
naturally
shakespeare
basement
hook
uncredited
faded
theories
approaches
dare
phillips
filling
fury
obama
##ain
efficient
arc
deliver
min
raid
breeding
inducted
leagues
efficiency
axis
montana
eagles
##ked
supplied
instructions
karen
picking
indicating
trap
anchor
practically
christians
tomb
vary
occasional
electronics
lords
readers
newcastle
faint
innovation
collect
situations
engagement
160
claude
mixture
##feld
peer
tissue
logo
lean
##ration
°f
floors
##ven
architects
reducing
##our
##ments
rope
1859
ottawa
##har
samples
banking
declaration
proteins
resignation
francois
saudi
advocate
exhibited
armor
twins
divorce
##ras
abraham
reviewed
jo
temporarily
matrix
physically
pulse
curled
##ena
difficulties
bengal
usage
##ban
annie
riders
certificate
##pi
holes
warsaw
distinctive
jessica
##mon
mutual
1857
customs
circular
eugene
removal
loaded
mere
vulnerable
depicted
generations
dame
heir
enormous
lightly
climbing
pitched
lessons
pilots
nepal
ram
google
preparing
brad
louise
renowned
##₂
liam
##ably
plaza
shaw
sophie
brilliant
bills
##bar
##nik
fucking
mainland
server
pleasant
seized
veterans
jerked
fail
beta
brush
radiation
stored
warmth
southeastern
nate
sin
raced
berkeley
joke
athlete
designation
trunk
##low
roland
qualification
archives
heels
artwork
receives
judicial
reserves
##bed
woke
installation
abu
floating
fake
lesser
excitement
interface
concentrated
addressed
characteristic
amanda
saxophone
monk
auto
##bus
releasing
egg
dies
interaction
defender
ce
outbreak
glory
loving
##bert
sequel
consciousness
http
awake
ski
enrolled
##ress
handling
rookie
brow
somebody
biography
warfare
amounts
contracts
presentation
fabric
dissolved
challenged
meter
psychological
lt
elevated
rally
accurate
##tha
hospitals
undergraduate
specialist
venezuela
exhibit
shed
nursing
protestant
fluid
structural
footage
jared
consistent
prey
##ska
succession
reflect
exile
lebanon
wiped
suspect
shanghai
resting
integration
preservation
marvel
variant
pirates
sheep
rounded
capita
sailing
colonies
manuscript
deemed
variations
clarke
functional
emerging
boxing
relaxed
curse
azerbaijan
heavyweight
nickname
editorial
rang
grid
tightened
earthquake
flashed
miguel
rushing
##ches
improvements
boxes
brooks
180
consumption
molecular
felix
societies
repeatedly
variation
aids
civic
graphics
professionals
realm
autonomous
receiver
delayed
workshop
militia
chairs
trump
canyon
##point
harsh
extending
lovely
happiness
##jan
stake
eyebrows
embassy
wellington
hannah
##ella
sony
corners
bishops
swear
cloth
contents
xi
namely
commenced
1854
stanford
nashville
courage
graphic
commitment
garrison
##bin
hamlet
clearing
rebels
attraction
literacy
cooking
ruins
temples
jenny
humanity
celebrate
hasn
freight
sixty
rebel
bastard
##art
newton
##ada
deer
##ges
##ching
smiles
delaware
singers
##ets
approaching
assists
flame
##ph
boulevard
barrel
planted
##ome
pursuit
##sia
consequences
posts
shallow
invitation
rode
depot
ernest
kane
rod
concepts
preston
topic
chambers
striking
blast
arrives
descendants
montgomery
ranges
worlds
##lay
##ari
span
chaos
praise
##ag
fewer
1855
sanctuary
mud
fbi
##ions
programmes
maintaining
unity
harper
bore
handsome
closure
tournaments
thunder
nebraska
linda
facade
puts
satisfied
argentine
dale
cork
dome
panama
##yl
1858
tasks
experts
##ates
feeding
equation
##las
##ida
##tu
engage
bryan
##ax
um
quartet
melody
disbanded
sheffield
blocked
gasped
delay
kisses
maggie
connects
##non
sts
poured
creator
publishers
##we
guided
ellis
extinct
hug
gaining
##ord
complicated
##bility
poll
clenched
investigate
##use
thereby
quantum
spine
cdp
humor
kills
administered
semifinals
##du
encountered
ignore
##bu
commentary
##maker
bother
roosevelt
140
plains
halfway
flowing
cultures
crack
imprisoned
neighboring
airline
##ses
##view
##mate
##ec
gather
wolves
marathon
transformed
##ill
cruise
organisations
carol
punch
exhibitions
numbered
alarm
ratings
daddy
silently
##stein
queens
colours
impression
guidance
liu
tactical
##rat
marshal
della
arrow
##ings
rested
feared
tender
owns
bitter
advisor
escort
##ides
spare
farms
grants
##ene
dragons
encourage
colleagues
cameras
##und
sucked
pile
spirits
prague
statements
suspension
landmark
fence
torture
recreation
bags
permanently
survivors
pond
spy
predecessor
bombing
coup
##og
protecting
transformation
glow
##lands
##book
dug
priests
andrea
feat
barn
jumping
##chen
##ologist
##con
casualties
stern
auckland
pipe
serie
revealing
ba
##bel
trevor
mercy
spectrum
yang
consist
governing
collaborated
possessed
epic
comprises
blew
shane
##ack
lopez
honored
magical
sacrifice
judgment
perceived
hammer
mtv
baronet
tune
das
missionary
sheets
350
neutral
oral
threatening
attractive
shade
aims
seminary
##master
estates
1856
michel
wounds
refugees
manufacturers
##nic
mercury
syndrome
porter
##iya
##din
hamburg
identification
upstairs
purse
widened
pause
cared
breathed
affiliate
santiago
prevented
celtic
fisher
125
recruited
byzantine
reconstruction
farther
##mp
diet
sake
au
spite
sensation
##ert
blank
separation
105
##hon
vladimir
armies
anime
##lie
accommodate
orbit
cult
sofia
archive
##ify
##box
founders
sustained
disorder
honours
northeastern
mia
crops
violet
threats
blanket
fires
canton
followers
southwestern
prototype
voyage
assignment
altered
moderate
protocol
pistol
##eo
questioned
brass
lifting
1852
math
authored
##ual
doug
dimensional
dynamic
##san
1851
pronounced
grateful
quest
uncomfortable
boom
presidency
stevens
relating
politicians
chen
barrier
quinn
diana
mosque
tribal
cheese
palmer
portions
sometime
chester
treasure
wu
bend
download
millions
reforms
registration
##osa
consequently
monitoring
ate
preliminary
brandon
invented
ps
eaten
exterior
intervention
ports
documented
log
displays
lecture
sally
favourite
##itz
vermont
lo
invisible
isle
breed
##ator
journalists
relay
speaks
backward
explore
midfielder
actively
stefan
procedures
cannon
blond
kenneth
centered
servants
chains
libraries
malcolm
essex
henri
slavery
##hal
facts
fairy
coached
cassie
cats
washed
cop
##fi
announcement
item
2000s
vinyl
activated
marco
frontier
growled
curriculum
##das
loyal
accomplished
leslie
ritual
kenny
##00
vii
napoleon
hollow
hybrid
jungle
stationed
friedrich
counted
##ulated
platinum
theatrical
seated
col
rubber
glen
1840
diversity
healing
extends
id
provisions
administrator
columbus
##oe
tributary
te
assured
org
##uous
prestigious
examined
lectures
grammy
ronald
associations
bailey
allan
essays
flute
believing
consultant
proceedings
travelling
1853
kit
kerala
yugoslavia
buddy
methodist
##ith
burial
centres
batman
##nda
discontinued
bo
dock
stockholm
lungs
severely
##nk
citing
manga
##ugh
steal
mumbai
iraqi
robot
celebrity
bride
broadcasts
abolished
pot
joel
overhead
franz
packed
reconnaissance
johann
acknowledged
introduce
handled
doctorate
developments
drinks
alley
palestine
##nis
##aki
proceeded
recover
bradley
grain
patch
afford
infection
nationalist
legendary
##ath
interchange
virtually
gen
gravity
exploration
amber
vital
wishes
powell
doctrine
elbow
screenplay
##bird
contribute
indonesian
pet
creates
##com
enzyme
kylie
discipline
drops
manila
hunger
##ien
layers
suffer
fever
bits
monica
keyboard
manages
##hood
searched
appeals
##bad
testament
grande
reid
##war
beliefs
congo
##ification
##dia
si
requiring
##via
casey
1849
regret
streak
rape
depends
syrian
sprint
pound
tourists
upcoming
pub
##xi
tense
##els
practiced
echo
nationwide
guild
motorcycle
liz
##zar
chiefs
desired
elena
bye
precious
absorbed
relatives
booth
pianist
##mal
citizenship
exhausted
wilhelm
##ceae
##hed
noting
quarterback
urge
hectares
##gue
ace
holly
##tal
blonde
davies
parked
sustainable
stepping
twentieth
airfield
galaxy
nest
chip
##nell
tan
shaft
paulo
requirement
##zy
paradise
tobacco
trans
renewed
vietnamese
##cker
##ju
suggesting
catching
holmes
enjoying
md
trips
colt
holder
butterfly
nerve
reformed
cherry
bowling
trailer
carriage
goodbye
appreciate
toy
joshua
interactive
enabled
involve
##kan
collar
determination
bunch
facebook
recall
shorts
superintendent
episcopal
frustration
giovanni
nineteenth
laser
privately
array
circulation
##ovic
armstrong
deals
painful
permit
discrimination
##wi
aires
retiring
cottage
ni
##sta
horizon
ellen
jamaica
ripped
fernando
chapters
playstation
patron
lecturer
navigation
behaviour
genes
georgian
export
solomon
rivals
swift
seventeen
rodriguez
princeton
independently
sox
1847
arguing
entity
casting
hank
criteria
oakland
geographic
milwaukee
reflection
expanding
conquest
dubbed
##tv
halt
brave
brunswick
doi
arched
curtis
divorced
predominantly
somerset
streams
ugly
zoo
horrible
curved
buenos
fierce
dictionary
vector
theological
unions
handful
stability
chan
punjab
segments
##lly
altar
ignoring
gesture
monsters
pastor
##stone
thighs
unexpected
operators
abruptly
coin
compiled
associates
improving
migration
pin
##ose
compact
collegiate
reserved
##urs
quarterfinals
roster
restore
assembled
hurry
oval
##cies
1846
flags
martha
##del
victories
sharply
##rated
argues
deadly
neo
drawings
symbols
performer
##iel
griffin
restrictions
editing
andrews
java
journals
arabia
compositions
dee
pierce
removing
hindi
casino
runway
civilians
minds
nasa
hotels
##zation
refuge
rent
retain
potentially
conferences
suburban
conducting
##tto
##tions
##tle
descended
massacre
##cal
ammunition
terrain
fork
souls
counts
chelsea
durham
drives
cab
##bank
perth
realizing
palestinian
finn
simpson
##dal
betty
##ule
moreover
particles
cardinals
tent
evaluation
extraordinary
##oid
inscription
##works
wednesday
chloe
maintains
panels
ashley
trucks
##nation
cluster
sunlight
strikes
zhang
##wing
dialect
canon
##ap
tucked
##ws
collecting
##mas
##can
##sville
maker
quoted
evan
franco
aria
buying
cleaning
eva
closet
provision
apollo
clinic
rat
##ez
necessarily
ac
##gle
##ising
venues
flipped
cent
spreading
trustees
checking
authorized
##sco
disappointed
##ado
notion
duration
trumpet
hesitated
topped
brussels
rolls
theoretical
hint
define
aggressive
repeat
wash
peaceful
optical
width
allegedly
mcdonald
strict
copyright
##illa
investors
mar
jam
witnesses
sounding
miranda
michelle
privacy
hugo
harmony
##pp
valid
lynn
glared
nina
102
headquartered
diving
boarding
gibson
##ncy
albanian
marsh
routine
dealt
enhanced
er
intelligent
substance
targeted
enlisted
discovers
spinning
observations
pissed
smoking
rebecca
capitol
visa
varied
costume
seemingly
indies
compensation
surgeon
thursday
arsenal
westminster
suburbs
rid
anglican
##ridge
knots
foods
alumni
lighter
fraser
whoever
portal
scandal
##ray
gavin
advised
instructor
flooding
terrorist
##ale
teenage
interim
senses
duck
teen
thesis
abby
eager
overcome
##ile
newport
glenn
rises
shame
##cc
prompted
priority
forgot
bomber
nicolas
protective
360
cartoon
katherine
breeze
lonely
trusted
henderson
richardson
relax
banner
candy
palms
remarkable
##rio
legends
cricketer
essay
ordained
edmund
rifles
trigger
##uri
##away
sail
alert
1830
audiences
penn
sussex
siblings
pursued
indianapolis
resist
rosa
consequence
succeed
avoided
1845
##ulation
inland
##tie
##nna
counsel
profession
chronicle
hurried
##una
eyebrow
eventual
bleeding
innovative
cure
##dom
committees
accounting
con
scope
hardy
heather
tenor
gut
herald
codes
tore
scales
wagon
##oo
luxury
tin
prefer
fountain
triangle
bonds
darling
convoy
dried
traced
beings
troy
accidentally
slam
findings
smelled
joey
lawyers
outcome
steep
bosnia
configuration
shifting
toll
brook
performers
lobby
philosophical
construct
shrine
aggregate
boot
cox
phenomenon
savage
insane
solely
reynolds
lifestyle
##ima
nationally
holdings
consideration
enable
edgar
mo
mama
##tein
fights
relegation
chances
atomic
hub
conjunction
awkward
reactions
currency
finale
kumar
underwent
steering
elaborate
gifts
comprising
melissa
veins
reasonable
sunshine
chi
solve
trails
inhabited
elimination
ethics
huh
ana
molly
consent
apartments
layout
marines
##ces
hunters
bulk
##oma
hometown
##wall
##mont
cracked
reads
neighbouring
withdrawn
admission
wingspan
damned
anthology
lancashire
brands
batting
forgive
cuban
awful
##lyn
104
dimensions
imagination
##ade
dante
##ship
tracking
desperately
goalkeeper
##yne
groaned
workshops
confident
burton
gerald
milton
circus
uncertain
slope
copenhagen
sophia
fog
philosopher
portraits
accent
cycling
varying
gripped
larvae
garrett
specified
scotia
mature
luther
kurt
rap
##kes
aerial
750
ferdinand
heated
es
transported
##shan
safely
nonetheless
##orn
##gal
motors
demanding
##sburg
startled
##brook
ally
generate
caps
ghana
stained
demo
mentions
beds
ap
afterward
diary
##bling
utility
##iro
richards
1837
conspiracy
conscious
shining
footsteps
observer
cyprus
urged
loyalty
developer
probability
olive
upgraded
gym
miracle
insects
graves
1844
ourselves
hydrogen
amazon
katie
tickets
poets
##pm
planes
##pan
prevention
witnessed
dense
jin
randy
tang
warehouse
monroe
bang
archived
elderly
investigations
alec
granite
mineral
conflicts
controlling
aboriginal
carlo
##zu
mechanics
stan
stark
rhode
skirt
est
##berry
bombs
respected
##horn
imposed
limestone
deny
nominee
memphis
grabbing
disabled
##als
amusement
aa
frankfurt
corn
referendum
varies
slowed
disk
firms
unconscious
incredible
clue
sue
##zhou
twist
##cio
joins
idaho
chad
developers
computing
destroyer
103
mortal
tucker
kingston
choices
yu
carson
1800
os
whitney
geneva
pretend
dimension
staged
plateau
maya
##une
freestyle
##bc
rovers
hiv
##ids
tristan
classroom
prospect
##hus
honestly
diploma
lied
thermal
auxiliary
feast
unlikely
iata
##tel
morocco
pounding
treasury
lithuania
considerably
1841
dish
1812
geological
matching
stumbled
destroying
marched
brien
advances
cake
nicole
belle
settling
measuring
directing
##mie
tuesday
bassist
capabilities
stunned
fraud
torpedo
##list
##phone
anton
wisdom
surveillance
ruined
##ulate
lawsuit
healthcare
theorem
halls
trend
aka
horizontal
dozens
acquire
lasting
swim
hawk
gorgeous
fees
vicinity
decrease
adoption
tactics
##ography
pakistani
##ole
draws
##hall
willie
burke
heath
algorithm
integral
powder
elliott
brigadier
jackie
tate
varieties
darker
##cho
lately
cigarette
specimens
adds
##ree
##ensis
##inger
exploded
finalist
cia
murders
wilderness
arguments
nicknamed
acceptance
onwards
manufacture
robertson
jets
tampa
enterprises
blog
loudly
composers
nominations
1838
ai
malta
inquiry
automobile
hosting
viii
rays
tilted
grief
museums
strategies
furious
euro
equality
cohen
poison
surrey
wireless
governed
ridiculous
moses
##esh
##room
vanished
##ito
barnes
attract
morrison
istanbul
##iness
absent
rotation
petition
janet
##logical
satisfaction
custody
deliberately
observatory
comedian
surfaces
pinyin
novelist
strictly
canterbury
oslo
monks
embrace
ibm
jealous
photograph
continent
dorothy
marina
doc
excess
holden
allegations
explaining
stack
avoiding
lance
storyline
majesty
poorly
spike
dos
bradford
raven
travis
classics
proven
voltage
pillow
fists
butt
1842
interpreted
##car
1839
gage
telegraph
lens
promising
expelled
casual
collector
zones
##min
silly
nintendo
##kh
##bra
downstairs
chef
suspicious
afl
flies
vacant
uganda
pregnancy
condemned
lutheran
estimates
cheap
decree
saxon
proximity
stripped
idiot
deposits
contrary
presenter
magnus
glacier
im
offense
edwin
##ori
upright
##long
bolt
##ois
toss
geographical
##izes
environments
delicate
marking
abstract
xavier
nails
windsor
plantation
occurring
equity
saskatchewan
fears
drifted
sequences
vegetation
revolt
##stic
1843
sooner
fusion
opposing
nato
skating
1836
secretly
ruin
lease
##oc
edit
##nne
flora
anxiety
ruby
##ological
##mia
tel
bout
taxi
emmy
frost
rainbow
compounds
foundations
rainfall
assassination
nightmare
dominican
##win
achievements
deserve
orlando
intact
armenia
##nte
calgary
valentine
106
marion
proclaimed
theodore
bells
courtyard
thigh
gonzalez
console
troop
minimal
monte
everyday
##ence
##if
supporter
terrorism
buck
openly
presbyterian
activists
carpet
##iers
rubbing
uprising
##yi
cute
conceived
legally
##cht
millennium
cello
velocity
ji
rescued
cardiff
1835
rex
concentrate
senators
beard
rendered
glowing
battalions
scouts
competitors
sculptor
catalogue
arctic
ion
raja
bicycle
wow
glancing
lawn
##woman
gentleman
lighthouse
publish
predicted
calculated
##val
variants
##gne
strain
##ui
winston
deceased
##nus
touchdowns
brady
caleb
sinking
echoed
crush
hon
blessed
protagonist
hayes
endangered
magnitude
editors
##tine
estimate
responsibilities
##mel
backup
laying
consumed
sealed
zurich
lovers
frustrated
##eau
ahmed
kicking
mit
treasurer
1832
biblical
refuse
terrified
pump
agrees
genuine
imprisonment
refuses
plymouth
##hen
lou
##nen
tara
trembling
antarctic
ton
learns
##tas
crap
crucial
faction
atop
##borough
wrap
lancaster
odds
hopkins
erik
lyon
##eon
bros
##ode
snap
locality
tips
empress
crowned
cal
acclaimed
chuckled
##ory
clara
sends
mild
towel
##fl
##day
##а
wishing
assuming
interviewed
##bal
##die
interactions
eden
cups
helena
##lf
indie
beck
##fire
batteries
filipino
wizard
parted
##lam
traces
##born
rows
idol
albany
delegates
##ees
##sar
discussions
##ex
notre
instructed
belgrade
highways
suggestion
lauren
possess
orientation
alexandria
abdul
beats
salary
reunion
ludwig
alright
wagner
intimate
pockets
slovenia
hugged
brighton
merchants
cruel
stole
trek
slopes
repairs
enrollment
politically
underlying
promotional
counting
boeing
##bb
isabella
naming
##и
keen
bacteria
listing
separately
belfast
ussr
450
lithuanian
anybody
ribs
sphere
martinez
cock
embarrassed
proposals
fragments
nationals
##fs
##wski
premises
fin
1500
alpine
matched
freely
bounded
jace
sleeve
##af
gaming
pier
populated
evident
##like
frances
flooded
##dle
frightened
pour
trainer
framed
visitor
challenging
pig
wickets
##fold
infected
email
##pes
arose
##aw
reward
ecuador
oblast
vale
ch
shuttle
##usa
bach
rankings
forbidden
cornwall
accordance
salem
consumers
bruno
fantastic
toes
machinery
resolved
julius
remembering
propaganda
iceland
bombardment
tide
contacts
wives
##rah
concerto
macdonald
albania
implement
daisy
tapped
sudan
helmet
angela
mistress
##lic
crop
sunk
finest
##craft
hostile
##ute
##tsu
boxer
fr
paths
adjusted
habit
ballot
supervision
soprano
##zen
bullets
wicked
sunset
regiments
disappear
lamp
performs
app
##gia
##oa
rabbit
digging
incidents
entries
##cion
dishes
##oi
introducing
##ati
##fied
freshman
slot
jill
tackles
baroque
backs
##iest
lone
sponsor
destiny
altogether
convert
##aro
consensus
shapes
demonstration
basically
feminist
auction
artifacts
##bing
strongest
twitter
halifax
2019
allmusic
mighty
smallest
precise
alexandra
viola
##los
##ille
manuscripts
##illo
dancers
ari
managers
monuments
blades
barracks
springfield
maiden
consolidated
electron
##end
berry
airing
wheat
nobel
inclusion
blair
payments
geography
bee
cc
eleanor
react
##hurst
afc
manitoba
##yu
su
lineup
fitness
recreational
investments
airborne
disappointment
##dis
edmonton
viewing
##row
renovation
##cast
infant
bankruptcy
roses
aftermath
pavilion
##yer
carpenter
withdrawal
ladder
##hy
discussing
popped
reliable
agreements
rochester
##abad
curves
bombers
220
rao
reverend
decreased
choosing
107
stiff
consulting
naples
crawford
tracy
ka
ribbon
cops
##lee
crushed
deciding
unified
teenager
accepting
flagship
explorer
poles
sanchez
inspection
revived
skilled
induced
exchanged
flee
locals
tragedy
swallow
loading
hanna
demonstrate
##ela
salvador
flown
contestants
civilization
##ines
wanna
rhodes
fletcher
hector
knocking
considers
##ough
nash
mechanisms
sensed
mentally
walt
unclear
##eus
renovated
madame
##cks
crews
governmental
##hin
undertaken
monkey
##ben
##ato
fatal
armored
copa
caves
governance
grasp
perception
certification
froze
damp
tugged
wyoming
##rg
##ero
newman
##lor
nerves
curiosity
graph
115
##ami
withdraw
tunnels
dull
meredith
moss
exhibits
neighbors
communicate
accuracy
explored
raiders
republicans
secular
kat
superman
penny
criticised
##tch
freed
update
conviction
wade
ham
likewise
delegation
gotta
doll
promises
technological
myth
nationality
resolve
convent
##mark
sharon
dig
sip
coordinator
entrepreneur
fold
##dine
capability
councillor
synonym
blown
swan
cursed
1815
jonas
haired
sofa
canvas
keeper
rivalry
##hart
rapper
speedway
swords
postal
maxwell
estonia
potter
recurring
##nn
##ave
errors
##oni
cognitive
1834
##²
claws
nadu
roberto
bce
wrestler
ellie
##ations
infinite
ink
##tia
presumably
finite
staircase
108
noel
patricia
nacional
##cation
chill
eternal
tu
preventing
prussia
fossil
limbs
##logist
ernst
frog
perez
rene
##ace
pizza
prussian
##ios
##vy
molecules
regulatory
answering
opinions
sworn
lengths
supposedly
hypothesis
upward
habitats
seating
ancestors
drank
yield
hd
synthesis
researcher
modest
##var
mothers
peered
voluntary
homeland
##the
acclaim
##igan
static
valve
luxembourg
alto
carroll
fe
receptor
norton
ambulance
##tian
johnston
catholics
depicting
jointly
elephant
gloria
mentor
badge
ahmad
distinguish
remarked
councils
precisely
allison
advancing
detection
crowded
##10
cooperative
ankle
mercedes
dagger
surrendered
pollution
commit
subway
jeffrey
lesson
sculptures
provider
##fication
membrane
timothy
rectangular
fiscal
heating
teammate
basket
particle
anonymous
deployment
##ple
missiles
courthouse
proportion
shoe
sec
##ller
complaints
forbes
blacks
abandon
remind
sizes
overwhelming
autobiography
natalie
##awa
risks
contestant
countryside
babies
scorer
invaded
enclosed
proceed
hurling
disorders
##cu
reflecting
continuously
cruiser
graduates
freeway
investigated
ore
deserved
maid
blocking
phillip
jorge
shakes
dove
mann
variables
lacked
burden
accompanying
que
consistently
organizing
provisional
complained
endless
##rm
tubes
juice
georges
krishna
mick
labels
thriller
##uch
laps
arcade
sage
snail
##table
shannon
fi
laurence
seoul
vacation
presenting
hire
churchill
surprisingly
prohibited
savannah
technically
##oli
170
##lessly
testimony
suited
speeds
toys
romans
mlb
flowering
measurement
talented
kay
settings
charleston
expectations
shattered
achieving
triumph
ceremonies
portsmouth
lanes
mandatory
loser
stretching
cologne
realizes
seventy
cornell
careers
webb
##ulating
americas
budapest
ava
suspicion
##ison
yo
conrad
##hai
sterling
jessie
rector
##az
1831
transform
organize
loans
christine
volcanic
warrant
slender
summers
subfamily
newer
danced
dynamics
rhine
proceeds
heinrich
gastropod
commands
sings
facilitate
easter
ra
positioned
responses
expense
fruits
yanked
imported
25th
velvet
vic
primitive
tribune
baldwin
neighbourhood
donna
rip
hay
pr
##uro
1814
espn
welcomed
##aria
qualifier
glare
highland
timing
##cted
shells
eased
geometry
louder
exciting
slovakia
##sion
##iz
##lot
savings
prairie
##ques
marching
rafael
tonnes
##lled
curtain
preceding
shy
heal
greene
worthy
##pot
detachment
bury
sherman
##eck
reinforced
seeks
bottles
contracted
duchess
outfit
walsh
##sc
mickey
##ase
geoffrey
archer
squeeze
dawson
eliminate
invention
##enberg
neal
##eth
stance
dealer
coral
maple
retire
polo
simplified
##ht
1833
hid
watts
backwards
jules
##oke
genesis
mt
frames
rebounds
burma
woodland
moist
santos
whispers
drained
subspecies
##aa
streaming
ulster
burnt
correspondence
maternal
gerard
denis
stealing
##load
genius
duchy
##oria
inaugurated
momentum
suits
placement
sovereign
clause
thames
##hara
confederation
reservation
sketch
yankees
lets
rotten
charm
hal
verses
ultra
commercially
dot
salon
citation
adopt
winnipeg
mist
allocated
cairo
##boy
jenkins
interference
objectives
##wind
1820
portfolio
armoured
sectors
##eh
initiatives
##world
integrity
exercises
robe
tap
ab
gazed
##tones
distracted
rulers
111
favorable
jerome
tended
cart
factories
##eri
diplomat
valued
gravel
charitable
##try
calvin
exploring
chang
shepherd
terrace
pdf
pupil
##ural
reflects
ups
##rch
governors
shelf
depths
##nberg
trailed
crest
tackle
##nian
##ats
hatred
##kai
clare
makers
ethiopia
longtime
detected
embedded
lacking
slapped
rely
thomson
anticipation
iso
morton
successive
agnes
screenwriter
straightened
philippe
playwright
haunted
licence
iris
intentions
sutton
112
logical
correctly
##weight
branded
licked
tipped
silva
ricky
narrator
requests
##ents
greeted
supernatural
cow
##wald
lung
refusing
employer
strait
gaelic
liner
##piece
zoe
sabha
##mba
driveway
harvest
prints
bates
reluctantly
threshold
algebra
ira
wherever
coupled
240
assumption
picks
##air
designers
raids
gentlemen
##ean
roller
blowing
leipzig
locks
screw
dressing
strand
##lings
scar
dwarf
depicts
##nu
nods
##mine
differ
boris
##eur
yuan
flip
##gie
mob
invested
questioning
applying
##ture
shout
##sel
gameplay
blamed
illustrations
bothered
weakness
rehabilitation
##of
##zes
envelope
rumors
miners
leicester
subtle
kerry
##ico
ferguson
##fu
premiership
ne
##cat
bengali
prof
catches
remnants
dana
##rily
shouting
presidents
baltic
ought
ghosts
dances
sailors
shirley
fancy
dominic
##bie
madonna
##rick
bark
buttons
gymnasium
ashes
liver
toby
oath
providence
doyle
evangelical
nixon
cement
carnegie
embarked
hatch
surroundings
guarantee
needing
pirate
essence
##bee
filter
crane
hammond
projected
immune
percy
twelfth
##ult
regent
doctoral
damon
mikhail
##ichi
lu
critically
elect
realised
abortion
acute
screening
mythology
steadily
##fc
frown
nottingham
kirk
wa
minneapolis
##rra
module
algeria
mc
nautical
encounters
surprising
statues
availability
shirts
pie
alma
brows
munster
mack
soup
crater
tornado
sanskrit
cedar
explosive
bordered
dixon
planets
stamp
exam
happily
##bble
carriers
kidnapped
##vis
accommodation
emigrated
##met
knockout
correspondent
violation
profits
peaks
lang
specimen
agenda
ancestry
pottery
spelling
equations
obtaining
ki
linking
1825
debris
asylum
##20
buddhism
teddy
##ants
gazette
##nger
##sse
dental
eligibility
utc
fathers
averaged
zimbabwe
francesco
coloured
hissed
translator
lynch
mandate
humanities
mackenzie
uniforms
lin
##iana
##gio
asset
mhz
fitting
samantha
genera
wei
rim
beloved
shark
riot
entities
expressions
indo
carmen
slipping
owing
abbot
neighbor
sidney
##av
rats
recommendations
encouraging
squadrons
anticipated
commanders
conquered
##oto
donations
diagnosed
##mond
divide
##iva
guessed
decoration
vernon
auditorium
revelation
conversations
##kers
##power
herzegovina
dash
alike
protested
lateral
herman
accredited
mg
##gent
freeman
mel
fiji
crow
crimson
##rine
livestock
##pped
humanitarian
bored
oz
whip
##lene
##ali
legitimate
alter
grinning
spelled
anxious
oriental
wesley
##nin
##hole
carnival
controller
detect
##ssa
bowed
educator
kosovo
macedonia
##sin
occupy
mastering
stephanie
janeiro
para
unaware
nurses
noon
135
cam
hopefully
ranger
combine
sociology
polar
rica
##eer
neill
##sman
holocaust
##ip
doubled
lust
1828
109
decent
cooling
unveiled
##card
1829
nsw
homer
chapman
meyer
##gin
dive
mae
reagan
expertise
##gled
darwin
brooke
sided
prosecution
investigating
comprised
petroleum
genres
reluctant
differently
trilogy
johns
vegetables
corpse
highlighted
lounge
pension
unsuccessfully
elegant
aided
ivory
beatles
amelia
cain
dubai
sunny
immigrant
babe
click
##nder
underwater
pepper
combining
mumbled
atlas
horns
accessed
ballad
physicians
homeless
gestured
rpm
freak
louisville
corporations
patriots
prizes
rational
warn
modes
decorative
overnight
din
troubled
phantom
##ort
monarch
sheer
##dorf
generals
guidelines
organs
addresses
##zon
enhance
curling
parishes
cord
##kie
linux
caesar
deutsche
bavaria
##bia
coleman
cyclone
##eria
bacon
petty
##yama
##old
hampton
diagnosis
1824
throws
complexity
rita
disputed
##₃
pablo
##sch
marketed
trafficking
##ulus
examine
plague
formats
##oh
vault
faithful
##bourne
webster
##ox
highlights
##ient
##ann
phones
vacuum
sandwich
modeling
##gated
bolivia
clergy
qualities
isabel
##nas
##ars
wears
screams
reunited
annoyed
bra
##ancy
##rate
differential
transmitter
tattoo
container
poker
##och
excessive
resides
cowboys
##tum
augustus
trash
providers
statute
retreated
balcony
reversed
void
storey
preceded
masses
leap
laughs
neighborhoods
wards
schemes
falcon
santo
battlefield
pad
ronnie
thread
lesbian
venus
##dian
beg
sandstone
daylight
punched
gwen
analog
stroked
wwe
acceptable
measurements
dec
toxic
##kel
adequate
surgical
economist
parameters
varsity
##sberg
quantity
ella
##chy
##rton
countess
generating
precision
diamonds
expressway
ga
##ı
1821
uruguay
talents
galleries
expenses
scanned
colleague
outlets
ryder
lucien
##ila
paramount
##bon
syracuse
dim
fangs
gown
sweep
##sie
toyota
missionaries
websites
##nsis
sentences
adviser
val
trademark
spells
##plane
patience
starter
slim
##borg
toe
incredibly
shoots
elliot
nobility
##wyn
cowboy
endorsed
gardner
tendency
persuaded
organisms
emissions
kazakhstan
amused
boring
chips
themed
##hand
llc
constantinople
chasing
systematic
guatemala
borrowed
erin
carey
##hard
highlands
struggles
1810
##ifying
##ced
wong
exceptions
develops
enlarged
kindergarten
castro
##ern
##rina
leigh
zombie
juvenile
##most
consul
##nar
sailor
hyde
clarence
intensive
pinned
nasty
useless
jung
clayton
stuffed
exceptional
ix
apostolic
230
transactions
##dge
exempt
swinging
cove
religions
##ash
shields
dairy
bypass
190
pursuing
bug
joyce
bombay
chassis
southampton
chat
interact
redesignated
##pen
nascar
pray
salmon
rigid
regained
malaysian
grim
publicity
constituted
capturing
toilet
delegate
purely
tray
drift
loosely
striker
weakened
trinidad
mitch
itv
defines
transmitted
ming
scarlet
nodding
fitzgerald
fu
narrowly
sp
tooth
standings
virtue
##₁
##wara
##cting
chateau
gloves
lid
##nel
hurting
conservatory
##pel
sinclair
reopened
sympathy
nigerian
strode
advocated
optional
chronic
discharge
##rc
suck
compatible
laurel
stella
shi
fails
wage
dodge
128
informal
sorts
levi
buddha
villagers
##aka
chronicles
heavier
summoned
gateway
3000
eleventh
jewelry
translations
accordingly
seas
##ency
fiber
pyramid
cubic
dragging
##ista
caring
##ops
android
contacted
lunar
##dt
kai
lisbon
patted
1826
sacramento
theft
madagascar
subtropical
disputes
ta
holidays
piper
willow
mare
cane
itunes
newfoundland
benny
companions
dong
raj
observe
roar
charming
plaque
tibetan
fossils
enacted
manning
bubble
tina
tanzania
##eda
##hir
funk
swamp
deputies
cloak
ufc
scenario
par
scratch
metals
anthem
guru
engaging
specially
##boat
dialects
nineteen
cecil
duet
disability
messenger
unofficial
##lies
defunct
eds
moonlight
drainage
surname
puzzle
honda
switching
conservatives
mammals
knox
broadcaster
sidewalk
cope
##ried
benson
princes
peterson
##sal
bedford
sharks
eli
wreck
alberto
gasp
archaeology
lgbt
teaches
securities
madness
compromise
waving
coordination
davidson
visions
leased
possibilities
eighty
jun
fernandez
enthusiasm
assassin
sponsorship
reviewer
kingdoms
estonian
laboratories
##fy
##nal
applies
verb
celebrations
##zzo
rowing
lightweight
sadness
submit
mvp
balanced
dude
##vas
explicitly
metric
magnificent
mound
brett
mohammad
mistakes
irregular
##hing
##ass
sanders
betrayed
shipped
surge
##enburg
reporters
termed
georg
pity
verbal
bulls
abbreviated
enabling
appealed
##are
##atic
sicily
sting
heel
sweetheart
bart
spacecraft
brutal
monarchy
##tter
aberdeen
cameo
diane
##ub
survivor
clyde
##aries
complaint
##makers
clarinet
delicious
chilean
karnataka
coordinates
1818
panties
##rst
pretending
ar
dramatically
kiev
bella
tends
distances
113
catalog
launching
instances
telecommunications
portable
lindsay
vatican
##eim
angles
aliens
marker
stint
screens
bolton
##rne
judy
wool
benedict
plasma
europa
spark
imaging
filmmaker
swiftly
##een
contributor
##nor
opted
stamps
apologize
financing
butter
gideon
sophisticated
alignment
avery
chemicals
yearly
speculation
prominence
professionally
##ils
immortal
institutional
inception
wrists
identifying
tribunal
derives
gains
##wo
papal
preference
linguistic
vince
operative
brewery
##ont
unemployment
boyd
##ured
##outs
albeit
prophet
1813
bi
##rr
##face
##rad
quarterly
asteroid
cleaned
radius
temper
##llen
telugu
jerk
viscount
menu
##ote
glimpse
##aya
yacht
hawaiian
baden
##rl
laptop
readily
##gu
monetary
offshore
scots
watches
##yang
##arian
upgrade
needle
xbox
lea
encyclopedia
flank
fingertips
##pus
delight
teachings
confirm
roth
beaches
midway
winters
##iah
teasing
daytime
beverly
gambling
bonnie
##backs
regulated
clement
hermann
tricks
knot
##shing
##uring
##vre
detached
ecological
owed
specialty
byron
inventor
bats
stays
screened
unesco
midland
trim
affection
##ander
##rry
jess
thoroughly
feedback
##uma
chennai
strained
heartbeat
wrapping
overtime
pleaded
##sworth
mon
leisure
oclc
##tate
##ele
feathers
angelo
thirds
nuts
surveys
clever
gill
commentator
##dos
darren
rides
gibraltar
##nc
##mu
dissolution
dedication
shin
meals
saddle
elvis
reds
chaired
taller
appreciation
functioning
niece
favored
advocacy
robbie
criminals
suffolk
yugoslav
passport
constable
congressman
hastings
vera
##rov
consecrated
sparks
ecclesiastical
confined
##ovich
muller
floyd
nora
1822
paved
1827
cumberland
ned
saga
spiral
##flow
appreciated
yi
collaborative
treating
similarities
feminine
finishes
##ib
jade
import
##nse
##hot
champagne
mice
securing
celebrities
helsinki
attributes
##gos
cousins
phases
ache
lucia
gandhi
submission
vicar
spear
shine
tasmania
biting
detention
constitute
tighter
seasonal
##gus
terrestrial
matthews
##oka
effectiveness
parody
philharmonic
##onic
1816
strangers
encoded
consortium
guaranteed
regards
shifts
tortured
collision
supervisor
inform
broader
insight
theaters
armour
emeritus
blink
incorporates
mapping
##50
##ein
handball
flexible
##nta
substantially
generous
thief
##own
carr
loses
1793
prose
ucla
romeo
generic
metallic
realization
damages
mk
commissioners
zach
default
##ther
helicopters
lengthy
stems
spa
partnered
spectators
rogue
indication
penalties
teresa
1801
sen
##tric
dalton
##wich
irving
photographic
##vey
dell
deaf
peters
excluded
unsure
##vable
patterson
crawled
##zio
resided
whipped
latvia
slower
ecole
pipes
employers
maharashtra
comparable
va
textile
pageant
##gel
alphabet
binary
irrigation
chartered
choked
antoine
offs
waking
supplement
##wen
quantities
demolition
regain
locate
urdu
folks
alt
114
##mc
scary
andreas
whites
##ava
classrooms
mw
aesthetic
publishes
valleys
guides
cubs
johannes
bryant
conventions
affecting
##itt
drain
awesome
isolation
prosecutor
ambitious
apology
captive
downs
atmospheric
lorenzo
aisle
beef
foul
##onia
kidding
composite
disturbed
illusion
natives
##ffer
emi
rockets
riverside
wartime
painters
adolf
melted
##ail
uncertainty
simulation
hawks
progressed
meantime
builder
spray
breach
unhappy
regina
russians
##urg
determining
##tation
tram
1806
##quin
aging
##12
1823
garion
rented
mister
diaz
terminated
clip
1817
depend
nervously
disco
owe
defenders
shiva
notorious
disbelief
shiny
worcester
##gation
##yr
trailing
undertook
islander
belarus
limitations
watershed
fuller
overlooking
utilized
raphael
1819
synthetic
breakdown
klein
##nate
moaned
memoir
lamb
practicing
##erly
cellular
arrows
exotic
##graphy
witches
117
charted
rey
hut
hierarchy
subdivision
freshwater
giuseppe
aloud
reyes
qatar
marty
sideways
utterly
sexually
jude
prayers
mccarthy
softball
blend
damien
##gging
##metric
wholly
erupted
lebanese
negro
revenues
tasted
comparative
teamed
transaction
labeled
maori
sovereignty
parkway
trauma
gran
malay
121
advancement
descendant
2020
buzz
salvation
inventory
symbolic
##making
antarctica
mps
##gas
##bro
mohammed
myanmar
holt
submarines
tones
##lman
locker
patriarch
bangkok
emerson
remarks
predators
kin
afghan
confession
norwich
rental
emerge
advantages
##zel
rca
##hold
shortened
storms
aidan
##matic
autonomy
compliance
##quet
dudley
atp
##osis
1803
motto
documentation
summary
professors
spectacular
christina
archdiocese
flashing
innocence
remake
##dell
psychic
reef
scare
employ
rs
sticks
meg
gus
leans
##ude
accompany
bergen
tomas
##iko
doom
wages
pools
##nch
##bes
breasts
scholarly
alison
outline
brittany
breakthrough
willis
realistic
##cut
##boro
competitor
##stan
pike
picnic
icon
designing
commercials
washing
villain
skiing
micro
costumes
auburn
halted
executives
##hat
logistics
cycles
vowel
applicable
barrett
exclaimed
eurovision
eternity
ramon
##umi
##lls
modifications
sweeping
disgust
##uck
torch
aviv
ensuring
rude
dusty
sonic
donovan
outskirts
cu
pathway
##band
##gun
##lines
disciplines
acids
cadet
paired
##40
sketches
##sive
marriages
##⁺
folding
peers
slovak
implies
admired
##beck
1880s
leopold
instinct
attained
weston
megan
horace
##ination
dorsal
ingredients
evolutionary
##its
complications
deity
lethal
brushing
levy
deserted
institutes
posthumously
delivering
telescope
coronation
motivated
rapids
luc
flicked
pays
volcano
tanner
weighed
##nica
crowds
frankie
gifted
addressing
granddaughter
winding
##rna
constantine
gomez
##front
landscapes
rudolf
anthropology
slate
werewolf
##lio
astronomy
circa
rouge
dreaming
sack
knelt
drowned
naomi
prolific
tracked
freezing
herb
##dium
agony
randall
twisting
wendy
deposit
touches
vein
wheeler
##bbled
##bor
batted
retaining
tire
presently
compare
specification
daemon
nigel
##grave
merry
recommendation
czechoslovakia
sandra
ng
roma
##sts
lambert
inheritance
sheikh
winchester
cries
examining
##yle
comeback
cuisine
nave
##iv
ko
retrieve
tomatoes
barker
polished
defining
irene
lantern
personalities
begging
tract
swore
1809
175
##gic
omaha
brotherhood
##rley
haiti
##ots
exeter
##ete
##zia
steele
dumb
pearson
210
surveyed
elisabeth
trends
##ef
fritz
##rf
premium
bugs
fraction
calmly
viking
##birds
tug
inserted
unusually
##ield
confronted
distress
crashing
brent
turks
resign
##olo
cambodia
gabe
sauce
##kal
evelyn
116
extant
clusters
quarry
teenagers
luna
##lers
##ister
affiliation
drill
##ashi
panthers
scenic
libya
anita
strengthen
inscriptions
##cated
lace
sued
judith
riots
##uted
mint
##eta
preparations
midst
dub
challenger
##vich
mock
cf
displaced
wicket
breaths
enables
schmidt
analyst
##lum
ag
highlight
automotive
axe
josef
newark
sufficiently
resembles
50th
##pal
flushed
mum
traits
##ante
commodore
incomplete
warming
titular
ceremonial
ethical
118
celebrating
eighteenth
cao
lima
medalist
mobility
strips
snakes
##city
miniature
zagreb
barton
escapes
umbrella
automated
doubted
differs
cooled
georgetown
dresden
cooked
fade
wyatt
rna
jacobs
carlton
abundant
stereo
boost
madras
inning
##hia
spur
ip
malayalam
begged
osaka
groan
escaping
charging
dose
vista
##aj
bud
papa
communists
advocates
edged
tri
##cent
resemble
peaking
necklace
fried
montenegro
saxony
goose
glances
stuttgart
curator
recruit
grocery
sympathetic
##tting
##fort
127
lotus
randolph
ancestor
##rand
succeeding
jupiter
1798
macedonian
##heads
hiking
1808
handing
fischer
##itive
garbage
node
##pies
prone
singular
papua
inclined
attractions
italia
pouring
motioned
grandma
garnered
jacksonville
corp
ego
ringing
aluminum
##hausen
ordering
##foot
drawer
traders
synagogue
##play
##kawa
resistant
wandering
fragile
fiona
teased
var
hardcore
soaked
jubilee
decisive
exposition
mercer
poster
valencia
hale
kuwait
1811
##ises
##wr
##eed
tavern
gamma
122
johan
##uer
airways
amino
gil
##ury
vocational
domains
torres
##sp
generator
folklore
outcomes
##keeper
canberra
shooter
fl
beams
confrontation
##lling
##gram
feb
aligned
forestry
pipeline
jax
motorway
conception
decay
##tos
coffin
##cott
stalin
1805
escorted
minded
##nam
sitcom
purchasing
twilight
veronica
additions
passive
tensions
straw
123
frequencies
1804
refugee
cultivation
##iate
christie
clary
bulletin
crept
disposal
##rich
##zong
processor
crescent
##rol
bmw
emphasized
whale
nazis
aurora
##eng
dwelling
hauled
sponsors
toledo
mega
ideology
theatres
tessa
cerambycidae
saves
turtle
cone
suspects
kara
rusty
yelling
greeks
mozart
shades
cocked
participant
##tro
shire
spit
freeze
necessity
##cos
inmates
nielsen
councillors
loaned
uncommon
omar
peasants
botanical
offspring
daniels
formations
jokes
1794
pioneers
sigma
licensing
##sus
wheelchair
polite
1807
liquor
pratt
trustee
##uta
forewings
balloon
##zz
kilometre
camping
explicit
casually
shawn
foolish
teammates
nm
hassan
carrie
judged
satisfy
vanessa
knives
selective
cnn
flowed
##lice
eclipse
stressed
eliza
mathematician
cease
cultivated
##roy
commissions
browns
##ania
destroyers
sheridan
meadow
##rius
minerals
##cial
downstream
clash
gram
memoirs
ventures
baha
seymour
archie
midlands
edith
fare
flynn
invite
canceled
tiles
stabbed
boulder
incorporate
amended
camden
facial
mollusk
unreleased
descriptions
yoga
grabs
550
raises
ramp
shiver
##rose
coined
pioneering
tunes
qing
warwick
tops
119
melanie
giles
##rous
wandered
##inal
annexed
nov
30th
unnamed
##ished
organizational
airplane
normandy
stoke
whistle
blessing
violations
chased
holders
shotgun
##ctic
outlet
reactor
##vik
tires
tearing
shores
fortified
mascot
constituencies
nc
columnist
productive
tibet
##rta
lineage
hooked
oct
tapes
judging
cody
##gger
hansen
kashmir
triggered
##eva
solved
cliffs
##tree
resisted
anatomy
protesters
transparent
implied
##iga
injection
mattress
excluding
##mbo
defenses
helpless
devotion
##elli
growl
liberals
weber
phenomena
atoms
plug
##iff
mortality
apprentice
howe
convincing
aaa
swimmer
barber
leone
promptly
sodium
def
nowadays
arise
##oning
gloucester
corrected
dignity
norm
erie
##ders
elders
evacuated
sylvia
compression
##yar
hartford
pose
backpack
reasoning
accepts
24th
wipe
millimetres
marcel
##oda
dodgers
albion
1790
overwhelmed
aerospace
oaks
1795
showcase
acknowledge
recovering
nolan
ashe
hurts
geology
fashioned
disappearance
farewell
swollen
shrug
marquis
wimbledon
124
rue
1792
commemorate
reduces
experiencing
inevitable
calcutta
intel
##court
murderer
sticking
fisheries
imagery
bloom
280
brake
##inus
gustav
hesitation
memorable
po
viral
beans
accidents
tunisia
antenna
spilled
consort
treatments
aye
perimeter
##gard
donation
hostage
migrated
banker
addiction
apex
lil
trout
##ously
conscience
##nova
rams
sands
genome
passionate
troubles
##lets
##set
amid
##ibility
##ret
higgins
exceed
vikings
##vie
payne
##zan
muscular
##ste
defendant
sucking
##wal
ibrahim
fuselage
claudia
vfl
europeans
snails
interval
##garh
preparatory
statewide
tasked
lacrosse
viktor
##lation
angola
##hra
flint
implications
employs
teens
patrons
stall
weekends
barriers
scrambled
nucleus
tehran
jenna
parsons
lifelong
robots
displacement
5000
##bles
precipitation
##gt
knuckles
clutched
1802
marrying
ecology
marx
accusations
declare
scars
kolkata
mat
meadows
bermuda
skeleton
finalists
vintage
crawl
coordinate
affects
subjected
orchestral
mistaken
##tc
mirrors
dipped
relied
260
arches
candle
##nick
incorporating
wildly
fond
basilica
owl
fringe
rituals
whispering
stirred
feud
tertiary
slick
goat
honorable
whereby
skip
ricardo
stripes
parachute
adjoining
submerged
synthesizer
##gren
intend
positively
ninety
phi
beaver
partition
fellows
alexis
prohibition
carlisle
bizarre
fraternity
##bre
doubts
icy
cbc
aquatic
sneak
sonny
combines
airports
crude
supervised
spatial
merge
alfonso
##bic
corrupt
scan
undergo
##ams
disabilities
colombian
comparing
dolphins
perkins
##lish
reprinted
unanimous
bounced
hairs
underworld
midwest
semester
bucket
paperback
miniseries
coventry
demise
##leigh
demonstrations
sensor
rotating
yan
##hler
arrange
soils
##idge
hyderabad
labs
##dr
brakes
grandchildren
##nde
negotiated
rover
ferrari
continuation
directorate
augusta
stevenson
counterpart
gore
##rda
nursery
rican
ave
collectively
broadly
pastoral
repertoire
asserted
discovering
nordic
styled
fiba
cunningham
harley
middlesex
survives
tumor
tempo
zack
aiming
lok
urgent
##rade
##nto
devils
##ement
contractor
turin
##wl
##ool
bliss
repaired
simmons
moan
astronomical
cr
negotiate
lyric
1890s
lara
bred
clad
angus
pbs
##ience
engineered
posed
##lk
hernandez
possessions
elbows
psychiatric
strokes
confluence
electorate
lifts
campuses
lava
alps
##ep
##ution
##date
physicist
woody
##page
##ographic
##itis
juliet
reformation
sparhawk
320
complement
suppressed
jewel
##½
floated
##kas
continuity
sadly
##ische
inability
melting
scanning
paula
flour
judaism
safer
vague
##lm
solving
curb
##stown
financially
gable
bees
expired
miserable
cassidy
dominion
1789
cupped
145
robbery
facto
amos
warden
resume
tallest
marvin
ing
pounded
usd
declaring
gasoline
##aux
darkened
270
650
sophomore
##mere
erection
gossip
televised
risen
dial
##eu
pillars
##link
passages
profound
##tina
arabian
ashton
silicon
nail
##ead
##lated
##wer
##hardt
fleming
firearms
ducked
circuits
blows
waterloo
titans
##lina
atom
fireplace
cheshire
financed
activation
algorithms
##zzi
constituent
catcher
cherokee
partnerships
sexuality
platoon
tragic
vivian
guarded
whiskey
meditation
poetic
##late
##nga
##ake
porto
listeners
dominance
kendra
mona
chandler
factions
22nd
salisbury
attitudes
derivative
##ido
##haus
intake
paced
javier
illustrator
barrels
bias
cockpit
burnett
dreamed
ensuing
##anda
receptors
someday
hawkins
mattered
##lal
slavic
1799
jesuit
cameroon
wasted
tai
wax
lowering
victorious
freaking
outright
hancock
librarian
sensing
bald
calcium
myers
tablet
announcing
barack
shipyard
pharmaceutical
##uan
greenwich
flush
medley
patches
wolfgang
pt
speeches
acquiring
exams
nikolai
##gg
hayden
kannada
##type
reilly
##pt
waitress
abdomen
devastated
capped
pseudonym
pharmacy
fulfill
paraguay
1796
clicked
##trom
archipelago
syndicated
##hman
lumber
orgasm
rejection
clifford
lorraine
advent
mafia
rodney
brock
##ght
##used
##elia
cassette
chamberlain
despair
mongolia
sensors
developmental
upstream
##eg
##alis
spanning
165
trombone
basque
seeded
interred
renewable
rhys
leapt
revision
molecule
##ages
chord
vicious
nord
shivered
23rd
arlington
debts
corpus
sunrise
bays
blackburn
centimetres
##uded
shuddered
gm
strangely
gripping
cartoons
isabelle
orbital
##ppa
seals
proving
##lton
refusal
strengthened
bust
assisting
baghdad
batsman
portrayal
mara
pushes
spears
og
##cock
reside
nathaniel
brennan
1776
confirmation
caucus
##worthy
markings
yemen
nobles
ku
lazy
viewer
catalan
encompasses
sawyer
##fall
sparked
substances
patents
braves
arranger
evacuation
sergio
persuade
dover
tolerance
penguin
cum
jockey
insufficient
townships
occupying
declining
plural
processed
projection
puppet
flanders
introduces
liability
##yon
gymnastics
antwerp
taipei
hobart
candles
jeep
wes
observers
126
chaplain
bundle
glorious
##hine
hazel
flung
sol
excavations
dumped
stares
sh
bangalore
triangular
icelandic
intervals
expressing
turbine
##vers
songwriting
crafts
##igo
jasmine
ditch
rite
##ways
entertaining
comply
sorrow
wrestlers
basel
emirates
marian
rivera
helpful
##some
caution
downward
networking
##atory
##tered
darted
genocide
emergence
replies
specializing
spokesman
convenient
unlocked
fading
augustine
concentrations
resemblance
elijah
investigator
andhra
##uda
promotes
bean
##rrell
fleeing
wan
simone
announcer
##ame
##bby
lydia
weaver
132
residency
modification
##fest
stretches
##ast
alternatively
nat
lowe
lacks
##ented
pam
tile
concealed
inferior
abdullah
residences
tissues
vengeance
##ided
moisture
peculiar
groove
zip
bologna
jennings
ninja
oversaw
zombies
pumping
batch
livingston
emerald
installations
1797
peel
nitrogen
rama
##fying
##star
schooling
strands
responding
werner
##ost
lime
casa
accurately
targeting
##rod
underway
##uru
hemisphere
lester
##yard
occupies
2d
griffith
angrily
reorganized
##owing
courtney
deposited
##dd
##30
estadio
##ifies
dunn
exiled
##ying
checks
##combe
##о
##fly
successes
unexpectedly
blu
assessed
##flower
##ه
observing
sacked
spiders
kn
##tail
mu
nodes
prosperity
audrey
divisional
155
broncos
tangled
adjust
feeds
erosion
paolo
surf
directory
snatched
humid
admiralty
screwed
gt
reddish
##nese
modules
trench
lamps
bind
leah
bucks
competes
##nz
##form
transcription
##uc
isles
violently
clutching
pga
cyclist
inflation
flats
ragged
unnecessary
##hian
stubborn
coordinated
harriet
baba
disqualified
330
insect
wolfe
##fies
reinforcements
rocked
duel
winked
embraced
bricks
##raj
hiatus
defeats
pending
brightly
jealousy
##xton
##hm
##uki
lena
gdp
colorful
##dley
stein
kidney
##shu
underwear
wanderers
##haw
##icus
guardians
m³
roared
habits
##wise
permits
gp
uranium
punished
disguise
bundesliga
elise
dundee
erotic
partisan
pi
collectors
float
individually
rendering
behavioral
bucharest
ser
hare
valerie
corporal
nutrition
proportional
##isa
immense
##kis
pavement
##zie
##eld
sutherland
crouched
1775
##lp
suzuki
trades
endurance
operas
crosby
prayed
priory
rory
socially
##urn
gujarat
##pu
walton
cube
pasha
privilege
lennon
floods
thorne
waterfall
nipple
scouting
approve
##lov
minorities
voter
dwight
extensions
assure
ballroom
slap
dripping
privileges
rejoined
confessed
demonstrating
patriotic
yell
investor
##uth
pagan
slumped
squares
##cle
##kins
confront
bert
embarrassment
##aid
aston
urging
sweater
starr
yuri
brains
williamson
commuter
mortar
structured
selfish
exports
##jon
cds
##him
unfinished
##rre
mortgage
destinations
##nagar
canoe
solitary
buchanan
delays
magistrate
fk
##pling
motivation
##lier
##vier
recruiting
assess
##mouth
malik
antique
1791
pius
rahman
reich
tub
zhou
smashed
airs
galway
xii
conditioning
honduras
discharged
dexter
##pf
lionel
129
debates
lemon
tiffany
volunteered
dom
dioxide
procession
devi
sic
tremendous
advertisements
colts
transferring
verdict
hanover
decommissioned
utter
relate
pac
racism
##top
beacon
limp
similarity
terra
occurrence
ant
##how
becky
capt
updates
armament
richie
pal
##graph
halloween
mayo
##ssen
##bone
cara
serena
fcc
dolls
obligations
##dling
violated
lafayette
jakarta
exploitation
##ime
infamous
iconic
##lah
##park
kitty
moody
reginald
dread
spill
crystals
olivier
modeled
bluff
equilibrium
separating
notices
ordnance
extinction
onset
cosmic
attachment
sammy
expose
privy
anchored
##bil
abbott
admits
bending
baritone
emmanuel
policeman
vaughan
winged
climax
dresses
denny
polytechnic
mohamed
burmese
authentic
nikki
genetics
grandparents
homestead
gaza
postponed
metacritic
una
##sby
##bat
unstable
dissertation
##rial
##cian
curls
obscure
uncovered
bronx
praying
disappearing
##hoe
prehistoric
coke
turret
mutations
nonprofit
pits
monaco
##ي
##usion
prominently
dispatched
podium
##mir
uci
##uation
133
fortifications
birthplace
kendall
##lby
##oll
preacher
rack
goodman
##rman
persistent
##ott
countless
jaime
recorder
lexington
persecution
jumps
renewal
wagons
##11
crushing
##holder
decorations
##lake
abundance
wrath
laundry
£1
garde
##rp
jeanne
beetles
peasant
##sl
splitting
caste
sergei
##rer
##ema
scripts
##ively
rub
satellites
##vor
inscribed
verlag
scrapped
gale
packages
chick
potato
slogan
kathleen
arabs
##culture
counterparts
reminiscent
choral
##tead
rand
retains
bushes
dane
accomplish
courtesy
closes
##oth
slaughter
hague
krakow
lawson
tailed
elias
ginger
##ttes
canopy
betrayal
rebuilding
turf
##hof
frowning
allegiance
brigades
kicks
rebuild
polls
alias
nationalism
td
rowan
audition
bowie
fortunately
recognizes
harp
dillon
horrified
##oro
renault
##tics
ropes
##α
presumed
rewarded
infrared
wiping
accelerated
illustration
##rid
presses
practitioners
badminton
##iard
detained
##tera
recognizing
relates
misery
##sies
##tly
reproduction
piercing
potatoes
thornton
esther
manners
hbo
##aan
ours
bullshit
ernie
perennial
sensitivity
illuminated
rupert
##jin
##iss
##ear
rfc
nassau
##dock
staggered
socialism
##haven
appointments
nonsense
prestige
sharma
haul
##tical
solidarity
gps
##ook
##rata
igor
pedestrian
##uit
baxter
tenants
wires
medication
unlimited
guiding
impacts
diabetes
##rama
sasha
pas
clive
extraction
131
continually
constraints
##bilities
sonata
hunted
sixteenth
chu
planting
quote
mayer
pretended
abs
spat
##hua
ceramic
##cci
curtains
pigs
pitching
##dad
latvian
sore
dayton
##sted
##qi
patrols
slice
playground
##nted
shone
stool
apparatus
inadequate
mates
treason
##ija
desires
##liga
##croft
somalia
laurent
mir
leonardo
oracle
grape
obliged
chevrolet
thirteenth
stunning
enthusiastic
##ede
accounted
concludes
currents
basil
##kovic
drought
##rica
mai
##aire
shove
posting
##shed
pilgrimage
humorous
packing
fry
pencil
wines
smells
144
marilyn
aching
newest
clung
bon
neighbours
sanctioned
##pie
mug
##stock
drowning
##mma
hydraulic
##vil
hiring
reminder
lilly
investigators
##ncies
sour
##eous
compulsory
packet
##rion
##graphic
##elle
cannes
##inate
depressed
##rit
heroic
importantly
theresa
##tled
conway
saturn
marginal
rae
##xia
corresponds
royce
pact
jasper
explosives
packaging
aluminium
##ttered
denotes
rhythmic
spans
assignments
hereditary
outlined
originating
sundays
lad
reissued
greeting
beatrice
##dic
pillar
marcos
plots
handbook
alcoholic
judiciary
avant
slides
extract
masculine
blur
##eum
##force
homage
trembled
owens
hymn
trey
omega
signaling
socks
accumulated
reacted
attic
theo
lining
angie
distraction
primera
talbot
##key
1200
ti
creativity
billed
##hey
deacon
eduardo
identifies
proposition
dizzy
gunner
hogan
##yam
##pping
##hol
ja
##chan
jensen
reconstructed
##berger
clearance
darius
##nier
abe
harlem
plea
dei
circled
emotionally
notation
fascist
neville
exceeded
upwards
viable
ducks
##fo
workforce
racer
limiting
shri
##lson
possesses
1600
kerr
moths
devastating
laden
disturbing
locking
##cture
gal
fearing
accreditation
flavor
aide
1870s
mountainous
##baum
melt
##ures
motel
texture
servers
soda
##mb
herd
##nium
erect
puzzled
hum
peggy
examinations
gould
testified
geoff
ren
devised
sacks
##law
denial
posters
grunted
cesar
tutor
ec
gerry
offerings
byrne
falcons
combinations
ct
incoming
pardon
rocking
26th
avengers
flared
mankind
seller
uttar
loch
nadia
stroking
exposing
##hd
fertile
ancestral
instituted
##has
noises
prophecy
taxation
eminent
vivid
pol
##bol
dart
indirect
multimedia
notebook
upside
displaying
adrenaline
referenced
geometric
##iving
progression
##ddy
blunt
announce
##far
implementing
##lav
aggression
liaison
cooler
cares
headache
plantations
gorge
dots
impulse
thickness
ashamed
averaging
kathy
obligation
precursor
137
fowler
symmetry
thee
225
hears
##rai
undergoing
ads
butcher
bowler
##lip
cigarettes
subscription
goodness
##ically
browne
##hos
##tech
kyoto
donor
##erty
damaging
friction
drifting
expeditions
hardened
prostitution
152
fauna
blankets
claw
tossing
snarled
butterflies
recruits
investigative
coated
healed
138
communal
hai
xiii
academics
boone
psychologist
restless
lahore
stephens
mba
brendan
foreigners
printer
##pc
ached
explode
27th
deed
scratched
dared
##pole
cardiac
1780
okinawa
proto
commando
compelled
oddly
electrons
##base
replica
thanksgiving
##rist
sheila
deliberate
stafford
tidal
representations
hercules
ou
##path
##iated
kidnapping
lenses
##tling
deficit
samoa
mouths
consuming
computational
maze
granting
smirk
razor
fixture
ideals
inviting
aiden
nominal
##vs
issuing
julio
pitt
ramsey
docks
##oss
exhaust
##owed
bavarian
draped
anterior
mating
ethiopian
explores
noticing
##nton
discarded
convenience
hoffman
endowment
beasts
cartridge
mormon
paternal
probe
sleeves
interfere
lump
deadline
##rail
jenks
bulldogs
scrap
alternating
justified
reproductive
nam
seize
descending
secretariat
kirby
coupe
grouped
smash
panther
sedan
tapping
##18
lola
cheer
germanic
unfortunate
##eter
unrelated
##fan
subordinate
##sdale
suzanne
advertisement
##ility
horsepower
##lda
cautiously
discourse
luigi
##mans
##fields
noun
prevalent
mao
schneider
everett
surround
governorate
kira
##avia
westward
##take
misty
rails
sustainability
134
unused
##rating
packs
toast
unwilling
regulate
thy
suffrage
nile
awe
assam
definitions
travelers
affordable
##rb
conferred
sells
undefeated
beneficial
torso
basal
repeating
remixes
##pass
bahrain
cables
fang
##itated
excavated
numbering
statutory
##rey
deluxe
##lian
forested
ramirez
derbyshire
zeus
slamming
transfers
astronomer
banana
lottery
berg
histories
bamboo
##uchi
resurrection
posterior
bowls
vaguely
##thi
thou
preserving
tensed
offence
##inas
meyrick
callum
ridden
watt
langdon
tying
lowland
snorted
daring
truman
##hale
##girl
aura
overly
filing
weighing
goa
infections
philanthropist
saunders
eponymous
##owski
latitude
perspectives
reviewing
mets
commandant
radial
##kha
flashlight
reliability
koch
vowels
amazed
ada
elaine
supper
##rth
##encies
predator
debated
soviets
cola
##boards
##nah
compartment
crooked
arbitrary
fourteenth
##ctive
havana
majors
steelers
clips
profitable
ambush
exited
packers
##tile
nude
cracks
fungi
##е
limb
trousers
josie
shelby
tens
frederic
##ος
definite
smoothly
constellation
insult
baton
discs
lingering
##nco
conclusions
lent
staging
becker
grandpa
shaky
##tron
einstein
obstacles
sk
adverse
elle
economically
##moto
mccartney
thor
dismissal
motions
readings
nostrils
treatise
##pace
squeezing
evidently
prolonged
1783
venezuelan
je
marguerite
beirut
takeover
shareholders
##vent
denise
digit
airplay
norse
##bbling
imaginary
pills
hubert
blaze
vacated
eliminating
##ello
vine
mansfield
##tty
retrospective
barrow
borne
clutch
bail
forensic
weaving
##nett
##witz
desktop
citadel
promotions
worrying
dorset
ieee
subdivided
##iating
manned
expeditionary
pickup
synod
chuckle
185
barney
##rz
##ffin
functionality
karachi
litigation
meanings
uc
lick
turbo
anders
##ffed
execute
curl
oppose
ankles
typhoon
##د
##ache
##asia
linguistics
compassion
pressures
grazing
perfection
##iting
immunity
monopoly
muddy
backgrounds
136
namibia
francesca
monitors
attracting
stunt
tuition
##ии
vegetable
##mates
##quent
mgm
jen
complexes
forts
##ond
cellar
bites
seventeenth
royals
flemish
failures
mast
charities
##cular
peruvian
capitals
macmillan
ipswich
outward
frigate
postgraduate
folds
employing
##ouse
concurrently
fiery
##tai
contingent
nightmares
monumental
nicaragua
##kowski
lizard
mal
fielding
gig
reject
##pad
harding
##ipe
coastline
##cin
##nos
beethoven
humphrey
innovations
##tam
##nge
norris
doris
solicitor
huang
obey
141
##lc
niagara
##tton
shelves
aug
bourbon
curry
nightclub
specifications
hilton
##ndo
centennial
dispersed
worm
neglected
briggs
sm
font
kuala
uneasy
plc
##nstein
##bound
##aking
##burgh
awaiting
pronunciation
##bbed
##quest
eh
optimal
zhu
raped
greens
presided
brenda
worries
##life
venetian
marxist
turnout
##lius
refined
braced
sins
grasped
sunderland
nickel
speculated
lowell
cyrillic
communism
fundraising
resembling
colonists
mutant
freddie
usc
##mos
gratitude
##run
mural
##lous
chemist
wi
reminds
28th
steals
tess
pietro
##ingen
promoter
ri
microphone
honoured
rai
sant
##qui
feather
##nson
burlington
kurdish
terrorists
deborah
sickness
##wed
##eet
hazard
irritated
desperation
veil
clarity
##rik
jewels
xv
##gged
##ows
##cup
berkshire
unfair
mysteries
orchid
winced
exhaustion
renovations
stranded
obe
infinity
##nies
adapt
redevelopment
thanked
registry
olga
domingo
noir
tudor
ole
##atus
commenting
behaviors
##ais
crisp
pauline
probable
stirling
wigan
##bian
paralympics
panting
surpassed
##rew
luca
barred
pony
famed
##sters
cassandra
waiter
carolyn
exported
##orted
andres
destructive
deeds
jonah
castles
vacancy
suv
##glass
1788
orchard
yep
famine
belarusian
sprang
##forth
skinny
##mis
administrators
rotterdam
zambia
zhao
boiler
discoveries
##ride
##physics
lucius
disappointing
outreach
spoon
##frame
qualifications
unanimously
enjoys
regency
##iidae
stade
realism
veterinary
rodgers
dump
alain
chestnut
castile
censorship
rumble
gibbs
##itor
communion
reggae
inactivated
logs
loads
##houses
homosexual
##iano
ale
informs
##cas
phrases
plaster
linebacker
ambrose
kaiser
fascinated
850
limerick
recruitment
forge
mastered
##nding
leinster
rooted
threaten
##strom
borneo
##hes
suggestions
scholarships
propeller
documentaries
patronage
coats
constructing
invest
neurons
comet
entirety
shouts
identities
annoying
unchanged
wary
##antly
##ogy
neat
oversight
##kos
phillies
replay
constance
##kka
incarnation
humble
skies
minus
##acy
smithsonian
##chel
guerrilla
jar
cadets
##plate
surplus
audit
##aru
cracking
joanna
louisa
pacing
##lights
intentionally
##iri
diner
nwa
imprint
australians
tong
unprecedented
bunker
naive
specialists
ark
nichols
railing
leaked
pedal
##uka
shrub
longing
roofs
v8
captains
neural
tuned
##ntal
##jet
emission
medina
frantic
codex
definitive
sid
abolition
intensified
stocks
enrique
sustain
genoa
oxide
##written
clues
cha
##gers
tributaries
fragment
venom
##rity
##ente
##sca
muffled
vain
sire
laos
##ingly
##hana
hastily
snapping
surfaced
sentiment
motive
##oft
contests
approximate
mesa
luckily
dinosaur
exchanges
propelled
accord
bourne
relieve
tow
masks
offended
##ues
cynthia
##mmer
rains
bartender
zinc
reviewers
lois
##sai
legged
arrogant
rafe
rosie
comprise
handicap
blockade
inlet
lagoon
copied
drilling
shelley
petals
##inian
mandarin
obsolete
##inated
onward
arguably
productivity
cindy
praising
seldom
busch
discusses
raleigh
shortage
ranged
stanton
encouragement
firstly
conceded
overs
temporal
##uke
cbe
##bos
woo
certainty
pumps
##pton
stalked
##uli
lizzie
periodic
thieves
weaker
##night
gases
shoving
chooses
wc
##chemical
prompting
weights
##kill
robust
flanked
sticky
hu
tuberculosis
##eb
##eal
christchurch
resembled
wallet
reese
inappropriate
pictured
distract
fixing
fiddle
giggled
burger
heirs
hairy
mechanic
torque
apache
obsessed
chiefly
cheng
logging
##tag
extracted
meaningful
numb
##vsky
gloucestershire
reminding
##bay
unite
##lit
breeds
diminished
clown
glove
1860s
##ن
##ug
archibald
focal
freelance
sliced
depiction
##yk
organism
switches
sights
stray
crawling
##ril
lever
leningrad
interpretations
loops
anytime
reel
alicia
delighted
##ech
inhaled
xiv
suitcase
bernie
vega
licenses
northampton
exclusion
induction
monasteries
racecourse
homosexuality
##right
##sfield
##rky
dimitri
michele
alternatives
ions
commentators
genuinely
objected
pork
hospitality
fencing
stephan
warships
peripheral
wit
drunken
wrinkled
quentin
spends
departing
chung
numerical
spokesperson
##zone
johannesburg
caliber
killers
##udge
assumes
neatly
demographic
abigail
bloc
##vel
mounting
##lain
bentley
slightest
xu
recipients
##jk
merlin
##writer
seniors
prisons
blinking
hindwings
flickered
kappa
##hel
80s
strengthening
appealing
brewing
gypsy
mali
lashes
hulk
unpleasant
harassment
bio
treaties
predict
instrumentation
pulp
troupe
boiling
mantle
##ffe
ins
##vn
dividing
handles
verbs
##onal
coconut
senegal
340
thorough
gum
momentarily
##sto
cocaine
panicked
destined
##turing
teatro
denying
weary
captained
mans
##hawks
##code
wakefield
bollywood
thankfully
##16
cyril
##wu
amendments
##bahn
consultation
stud
reflections
kindness
1787
internally
##ovo
tex
mosaic
distribute
paddy
seeming
143
##hic
piers
##15
##mura
##verse
popularly
winger
kang
sentinel
mccoy
##anza
covenant
##bag
verge
fireworks
suppress
thrilled
dominate
##jar
swansea
##60
142
reconciliation
##ndi
stiffened
cue
dorian
##uf
damascus
amor
ida
foremost
##aga
porsche
unseen
dir
##had
##azi
stony
lexi
melodies
##nko
angular
integer
podcast
ants
inherent
jaws
justify
persona
##olved
josephine
##nr
##ressed
customary
flashes
gala
cyrus
glaring
backyard
ariel
physiology
greenland
html
stir
avon
atletico
finch
methodology
ked
##lent
mas
catholicism
townsend
branding
quincy
fits
containers
1777
ashore
aragon
##19
forearm
poisoning
##sd
adopting
conquer
grinding
amnesty
keller
finances
evaluate
forged
lankan
instincts
##uto
guam
bosnian
photographed
workplace
desirable
protector
##dog
allocation
intently
encourages
willy
##sten
bodyguard
electro
brighter
##ν
bihar
##chev
lasts
opener
amphibious
sal
verde
arte
##cope
captivity
vocabulary
yields
##tted
agreeing
desmond
pioneered
##chus
strap
campaigned
railroads
##ович
emblem
##dre
stormed
501
##ulous
marijuana
northumberland
##gn
##nath
bowen
landmarks
beaumont
##qua
danube
##bler
attorneys
th
ge
flyers
critique
villains
cass
mutation
acc
##0s
colombo
mckay
motif
sampling
concluding
syndicate
##rell
neon
stables
ds
warnings
clint
mourning
wilkinson
##tated
merrill
leopard
evenings
exhaled
emil
sonia
ezra
discrete
stove
farrell
fifteenth
prescribed
superhero
##rier
worms
helm
wren
##duction
##hc
expo
##rator
hq
unfamiliar
antony
prevents
acceleration
fiercely
mari
painfully
calculations
cheaper
ign
clifton
irvine
davenport
mozambique
##np
pierced
##evich
wonders
##wig
##cate
##iling
crusade
ware
##uel
enzymes
reasonably
mls
##coe
mater
ambition
bunny
eliot
kernel
##fin
asphalt
headmaster
torah
aden
lush
pins
waived
##care
##yas
joao
substrate
enforce
##grad
##ules
alvarez
selections
epidemic
tempted
##bit
bremen
translates
ensured
waterfront
29th
forrest
manny
malone
kramer
reigning
cookies
simpler
absorption
205
engraved
##ffy
evaluated
1778
haze
146
comforting
crossover
##abe
thorn
##rift
##imo
##pop
suppression
fatigue
cutter
##tr
201
wurttemberg
##orf
enforced
hovering
proprietary
gb
samurai
syllable
ascent
lacey
tick
lars
tractor
merchandise
rep
bouncing
defendants
##yre
huntington
##ground
##oko
standardized
##hor
##hima
assassinated
nu
predecessors
rainy
liar
assurance
lyrical
##uga
secondly
flattened
ios
parameter
undercover
##mity
bordeaux
punish
ridges
markers
exodus
inactive
hesitate
debbie
nyc
pledge
savoy
nagar
offset
organist
##tium
hesse
marin
converting
##iver
diagram
propulsion
pu
validity
reverted
supportive
##dc
ministries
clans
responds
proclamation
##inae
##ø
##rea
ein
pleading
patriot
sf
birch
islanders
strauss
hates
##dh
brandenburg
concession
rd
##ob
1900s
killings
textbook
antiquity
cinematography
wharf
embarrassing
setup
creed
farmland
inequality
centred
signatures
fallon
370
##ingham
##uts
ceylon
gazing
directive
laurie
##tern
globally
##uated
##dent
allah
excavation
threads
##cross
148
frantically
icc
utilize
determines
respiratory
thoughtful
receptions
##dicate
merging
chandra
seine
147
builders
builds
diagnostic
dev
visibility
goddamn
analyses
dhaka
cho
proves
chancel
concurrent
curiously
canadians
pumped
restoring
1850s
turtles
jaguar
sinister
spinal
traction
declan
vows
1784
glowed
capitalism
swirling
install
universidad
##lder
##oat
soloist
##genic
##oor
coincidence
beginnings
nissan
dip
resorts
caucasus
combustion
infectious
##eno
pigeon
serpent
##itating
conclude
masked
salad
jew
##gr
surreal
toni
##wc
harmonica
151
##gins
##etic
##coat
fishermen
intending
bravery
##wave
klaus
titan
wembley
taiwanese
ransom
40th
incorrect
hussein
eyelids
jp
cooke
dramas
utilities
##etta
##print
eisenhower
principally
granada
lana
##rak
openings
concord
##bl
bethany
connie
morality
sega
##mons
##nard
earnings
##kara
##cine
wii
communes
##rel
coma
composing
softened
severed
grapes
##17
nguyen
analyzed
warlord
hubbard
heavenly
behave
slovenian
##hit
##ony
hailed
filmmakers
trance
caldwell
skye
unrest
coward
likelihood
##aging
bern
sci
taliban
honolulu
propose
##wang
1700
browser
imagining
cobra
contributes
dukes
instinctively
conan
violinist
##ores
accessories
gradual
##amp
quotes
sioux
##dating
undertake
intercepted
sparkling
compressed
139
fungus
tombs
haley
imposing
rests
degradation
lincolnshire
retailers
wetlands
tulsa
distributor
dungeon
nun
greenhouse
convey
atlantis
aft
exits
oman
dresser
lyons
##sti
joking
eddy
judgement
omitted
digits
##cts
##game
juniors
##rae
cents
stricken
une
##ngo
wizards
weir
breton
nan
technician
fibers
liking
royalty
##cca
154
persia
terribly
magician
##rable
##unt
vance
cafeteria
booker
camille
warmer
##static
consume
cavern
gaps
compass
contemporaries
foyer
soothing
graveyard
maj
plunged
blush
##wear
cascade
demonstrates
ordinance
##nov
boyle
##lana
rockefeller
shaken
banjo
izzy
##ense
breathless
vines
##32
##eman
alterations
chromosome
dwellings
feudal
mole
153
catalonia
relics
tenant
mandated
##fm
fridge
hats
honesty
patented
raul
heap
cruisers
accusing
enlightenment
infants
wherein
chatham
contractors
zen
affinity
hc
osborne
piston
156
traps
maturity
##rana
lagos
##zal
peering
##nay
attendant
dealers
protocols
subset
prospects
biographical
##cre
artery
##zers
insignia
nuns
endured
##eration
recommend
schwartz
serbs
berger
cromwell
crossroads
##ctor
enduring
clasped
grounded
##bine
marseille
twitched
abel
choke
https
catalyst
moldova
italians
##tist
disastrous
wee
##oured
##nti
wwf
nope
##piration
##asa
expresses
thumbs
167
##nza
coca
1781
cheating
##ption
skipped
sensory
heidelberg
spies
satan
dangers
semifinal
202
bohemia
whitish
confusing
shipbuilding
relies
surgeons
landings
ravi
baku
moor
suffix
alejandro
##yana
litre
upheld
##unk
rajasthan
##rek
coaster
insists
posture
scenarios
etienne
favoured
appoint
transgender
elephants
poked
greenwood
defences
fulfilled
militant
somali
1758
chalk
potent
##ucci
migrants
wink
assistants
nos
restriction
activism
niger
##ario
colon
shaun
##sat
daphne
##erated
swam
congregations
reprise
considerations
magnet
playable
xvi
##р
overthrow
tobias
knob
chavez
coding
##mers
propped
katrina
orient
newcomer
##suke
temperate
##pool
farmhouse
interrogation
##vd
committing
##vert
forthcoming
strawberry
joaquin
macau
ponds
shocking
siberia
##cellular
chant
contributors
##nant
##ologists
sped
absorb
hail
1782
spared
##hore
barbados
karate
opus
originates
saul
##xie
evergreen
leaped
##rock
correlation
exaggerated
weekday
unification
bump
tracing
brig
afb
pathways
utilizing
##ners
mod
mb
disturbance
kneeling
##stad
##guchi
100th
pune
##thy
decreasing
168
manipulation
miriam
academia
ecosystem
occupational
rbi
##lem
rift
##14
rotary
stacked
incorporation
awakening
generators
guerrero
racist
##omy
cyber
derivatives
culminated
allie
annals
panzer
sainte
wikipedia
pops
zu
austro
##vate
algerian
politely
nicholson
mornings
educate
tastes
thrill
dartmouth
##gating
db
##jee
regan
differing
concentrating
choreography
divinity
##media
pledged
alexandre
routing
gregor
madeline
##idal
apocalypse
##hora
gunfire
culminating
elves
fined
liang
lam
programmed
tar
guessing
transparency
gabrielle
##gna
cancellation
flexibility
##lining
accession
shea
stronghold
nets
specializes
##rgan
abused
hasan
sgt
ling
exceeding
##₄
admiration
supermarket
##ark
photographers
specialised
tilt
resonance
hmm
perfume
380
sami
threatens
garland
botany
guarding
boiled
greet
puppy
russo
supplier
wilmington
vibrant
vijay
##bius
paralympic
grumbled
paige
faa
licking
margins
hurricanes
##gong
fest
grenade
ripping
##uz
counseling
weigh
##sian
needles
wiltshire
edison
costly
##not
fulton
tramway
redesigned
staffordshire
cache
gasping
watkins
sleepy
candidacy
##group
monkeys
timeline
throbbing
##bid
##sos
berth
uzbekistan
vanderbilt
bothering
overturned
ballots
gem
##iger
sunglasses
subscribers
hooker
compelling
ang
exceptionally
saloon
stab
##rdi
carla
terrifying
rom
##vision
coil
##oids
satisfying
vendors
31st
mackay
deities
overlooked
ambient
bahamas
felipe
olympia
whirled
botanist
advertised
tugging
##dden
disciples
morales
unionist
rites
foley
morse
motives
creepy
##₀
soo
##sz
bargain
highness
frightening
turnpike
tory
reorganization
##cer
depict
biographer
##walk
unopposed
manifesto
##gles
institut
emile
accidental
kapoor
##dam
kilkenny
cortex
lively
##13
romanesque
jain
shan
cannons
##ood
##ske
petrol
echoing
amalgamated
disappears
cautious
proposes
sanctions
trenton
##ر
flotilla
aus
contempt
tor
canary
cote
theirs
##hun
conceptual
deleted
fascinating
paso
blazing
elf
honourable
hutchinson
##eiro
##outh
##zin
surveyor
tee
amidst
wooded
reissue
intro
##ono
cobb
shelters
newsletter
hanson
brace
encoding
confiscated
dem
caravan
marino
scroll
melodic
cows
imam
##adi
##aneous
northward
searches
biodiversity
cora
310
roaring
##bers
connell
theologian
halo
compose
pathetic
unmarried
dynamo
##oot
az
calculation
toulouse
deserves
humour
nr
forgiveness
tam
undergone
martyr
pamela
myths
whore
counselor
hicks
290
heavens
battleship
electromagnetic
##bbs
stellar
establishments
presley
hopped
##chin
temptation
90s
wills
nas
##yuan
nhs
##nya
seminars
##yev
adaptations
gong
asher
lex
indicator
sikh
tobago
cites
goin
##yte
satirical
##gies
characterised
correspond
bubbles
lure
participates
##vid
eruption
skate
therapeutic
1785
canals
wholesale
defaulted
sac
460
petit
##zzled
virgil
leak
ravens
256
portraying
##yx
ghetto
creators
dams
portray
vicente
##rington
fae
namesake
bounty
##arium
joachim
##ota
##iser
aforementioned
axle
snout
depended
dismantled
reuben
480
##ibly
gallagher
##lau
##pd
earnest
##ieu
##iary
inflicted
objections
##llar
asa
gritted
##athy
jericho
##sea
##was
flick
underside
ceramics
undead
substituted
195
eastward
undoubtedly
wheeled
chimney
##iche
guinness
cb
##ager
siding
##bell
traitor
baptiste
disguised
inauguration
149
tipperary
choreographer
perched
warmed
stationary
eco
##ike
##ntes
bacterial
##aurus
flores
phosphate
##core
attacker
invaders
alvin
intersects
a1
indirectly
immigrated
businessmen
cornelius
valves
narrated
pill
sober
ul
nationale
monastic
applicants
scenery
##jack
161
motifs
constitutes
cpu
##osh
jurisdictions
sd
tuning
irritation
woven
##uddin
fertility
gao
##erie
antagonist
impatient
glacial
hides
boarded
denominations
interception
##jas
cookie
nicola
##tee
algebraic
marquess
bahn
parole
buyers
bait
turbines
paperwork
bestowed
natasha
renee
oceans
purchases
157
vaccine
215
##tock
fixtures
playhouse
integrate
jai
oswald
intellectuals
##cky
booked
nests
mortimer
##isi
obsession
sept
##gler
##sum
440
scrutiny
simultaneous
squinted
##shin
collects
oven
shankar
penned
remarkably
##я
slips
luggage
spectral
1786
collaborations
louie
consolidation
##ailed
##ivating
420
hoover
blackpool
harness
ignition
vest
tails
belmont
mongol
skinner
##nae
visually
mage
derry
##tism
##unce
stevie
transitional
##rdy
redskins
drying
prep
prospective
##21
annoyance
oversee
##loaded
fills
##books
##iki
announces
fda
scowled
respects
prasad
mystic
tucson
##vale
revue
springer
bankrupt
1772
aristotle
salvatore
habsburg
##geny
dal
natal
nut
pod
chewing
darts
moroccan
walkover
rosario
lenin
punjabi
##ße
grossed
scattering
wired
invasive
hui
polynomial
corridors
wakes
gina
portrays
##cratic
arid
retreating
erich
irwin
sniper
##dha
linen
lindsey
maneuver
butch
shutting
socio
bounce
commemorative
postseason
jeremiah
pines
275
mystical
beads
bp
abbas
furnace
bidding
consulted
assaulted
empirical
rubble
enclosure
sob
weakly
cancel
polly
yielded
##emann
curly
prediction
battered
70s
vhs
jacqueline
render
sails
barked
detailing
grayson
riga
sloane
raging
##yah
herbs
bravo
##athlon
alloy
giggle
imminent
suffers
assumptions
waltz
##itate
accomplishments
##ited
bathing
remixed
deception
prefix
##emia
deepest
##tier
##eis
balkan
frogs
##rong
slab
##pate
philosophers
peterborough
grains
imports
dickinson
rwanda
##atics
1774
dirk
lan
tablets
##rove
clone
##rice
caretaker
hostilities
mclean
##gre
regimental
treasures
norms
impose
tsar
tango
diplomacy
variously
complain
192
recognise
arrests
1779
celestial
pulitzer
##dus
bing
libretto
##moor
adele
splash
##rite
expectation
lds
confronts
##izer
spontaneous
harmful
wedge
entrepreneurs
buyer
##ope
bilingual
translate
rugged
conner
circulated
uae
eaton
##gra
##zzle
lingered
lockheed
vishnu
reelection
alonso
##oom
joints
yankee
headline
cooperate
heinz
laureate
invading
##sford
echoes
scandinavian
##dham
hugging
vitamin
salute
micah
hind
trader
##sper
radioactive
##ndra
militants
poisoned
ratified
remark
campeonato
deprived
wander
prop
##dong
outlook
##tani
##rix
##eye
chiang
darcy
##oping
mandolin
spice
statesman
babylon
182
walled
forgetting
afro
##cap
158
giorgio
buffer
##polis
planetary
##gis
overlap
terminals
kinda
centenary
##bir
arising
manipulate
elm
ke
1770
ak
##tad
chrysler
mapped
moose
pomeranian
quad
macarthur
assemblies
shoreline
recalls
stratford
##rted
noticeable
##evic
imp
##rita
##sque
accustomed
supplying
tents
disgusted
vogue
sipped
filters
khz
reno
selecting
luftwaffe
mcmahon
tyne
masterpiece
carriages
collided
dunes
exercised
flare
remembers
muzzle
##mobile
heck
##rson
burgess
lunged
middleton
boycott
bilateral
##sity
hazardous
lumpur
multiplayer
spotlight
jackets
goldman
liege
porcelain
rag
waterford
benz
attracts
hopeful
battling
ottomans
kensington
baked
hymns
cheyenne
lattice
levine
borrow
polymer
clashes
michaels
monitored
commitments
denounced
##25
##von
cavity
##oney
hobby
akin
##holders
futures
intricate
cornish
patty
##oned
illegally
dolphin
##lag
barlow
yellowish
maddie
apologized
luton
plagued
##puram
nana
##rds
sway
fanny
łodz
##rino
psi
suspicions
hanged
##eding
initiate
charlton
##por
nak
competent
235
analytical
annex
wardrobe
reservations
##rma
sect
162
fairfax
hedge
piled
buckingham
uneven
bauer
simplicity
snyder
interpret
accountability
donors
moderately
byrd
continents
##cite
##max
disciple
hr
jamaican
ping
nominees
##uss
mongolian
diver
attackers
eagerly
ideological
pillows
miracles
apartheid
revolver
sulfur
clinics
moran
163
##enko
ile
katy
rhetoric
##icated
chronology
recycling
##hrer
elongated
mughal
pascal
profiles
vibration
databases
domination
##fare
##rant
matthias
digest
rehearsal
polling
weiss
initiation
reeves
clinging
flourished
impress
ngo
##hoff
##ume
buckley
symposium
rhythms
weed
emphasize
transforming
##taking
##gence
##yman
accountant
analyze
flicker
foil
priesthood
voluntarily
decreases
##80
##hya
slater
sv
charting
mcgill
##lde
moreno
##iu
besieged
zur
robes
##phic
admitting
api
deported
turmoil
peyton
earthquakes
##ares
nationalists
beau
clair
brethren
interrupt
welch
curated
galerie
requesting
164
##ested
impending
steward
viper
##vina
complaining
beautifully
brandy
foam
nl
1660
##cake
alessandro
punches
laced
explanations
##lim
attribute
clit
reggie
discomfort
##cards
smoothed
whales
##cene
adler
countered
duffy
disciplinary
widening
recipe
reliance
conducts
goats
gradient
preaching
##shaw
matilda
quasi
striped
meridian
cannabis
cordoba
certificates
##agh
##tering
graffiti
hangs
pilgrims
repeats
##ych
revive
urine
etat
##hawk
fueled
belts
fuzzy
susceptible
##hang
mauritius
salle
sincere
beers
hooks
##cki
arbitration
entrusted
advise
sniffed
seminar
junk
donnell
processors
principality
strapped
celia
mendoza
everton
fortunes
prejudice
starving
reassigned
steamer
##lund
tuck
evenly
foreman
##ffen
dans
375
envisioned
slit
##xy
baseman
liberia
rosemary
##weed
electrified
periodically
potassium
stride
contexts
sperm
slade
mariners
influx
bianca
subcommittee
##rane
spilling
icao
estuary
##nock
delivers
iphone
##ulata
isa
mira
bohemian
dessert
##sbury
welcoming
proudly
slowing
##chs
musee
ascension
russ
##vian
waits
##psy
africans
exploit
##morphic
gov
eccentric
crab
peck
##ull
entrances
formidable
marketplace
groom
bolted
metabolism
patton
robbins
courier
payload
endure
##ifier
andes
refrigerator
##pr
ornate
##uca
ruthless
illegitimate
masonry
strasbourg
bikes
adobe
##³
apples
quintet
willingly
niche
bakery
corpses
energetic
##cliffe
##sser
##ards
177
centimeters
centro
fuscous
cretaceous
rancho
##yde
andrei
telecom
tottenham
oasis
ordination
vulnerability
presiding
corey
cp
penguins
sims
##pis
malawi
piss
##48
correction
##cked
##ffle
##ryn
countdown
detectives
psychiatrist
psychedelic
dinosaurs
blouse
##get
choi
vowed
##oz
randomly
##pol
49ers
scrub
blanche
bruins
dusseldorf
##using
unwanted
##ums
212
dominique
elevations
headlights
om
laguna
##oga
1750
famously
ignorance
shrewsbury
##aine
ajax
breuning
che
confederacy
greco
overhaul
##screen
paz
skirts
disagreement
cruelty
jagged
phoebe
shifter
hovered
viruses
##wes
mandy
##lined
##gc
landlord
squirrel
dashed
##ι
ornamental
gag
wally
grange
literal
spurs
undisclosed
proceeding
yin
##text
billie
orphan
spanned
humidity
indy
weighted
presentations
explosions
lucian
##tary
vaughn
hindus
##anga
##hell
psycho
171
daytona
protects
efficiently
rematch
sly
tandem
##oya
rebranded
impaired
hee
metropolis
peach
godfrey
diaspora
ethnicity
prosperous
gleaming
dar
grossing
playback
##rden
stripe
pistols
##tain
births
labelled
##cating
172
rudy
alba
##onne
aquarium
hostility
##gb
##tase
shudder
sumatra
hardest
lakers
consonant
creeping
demos
homicide
capsule
zeke
liberties
expulsion
pueblo
##comb
trait
transporting
##ddin
##neck
##yna
depart
gregg
mold
ledge
hangar
oldham
playboy
termination
analysts
gmbh
romero
##itic
insist
cradle
filthy
brightness
slash
shootout
deposed
bordering
##truct
isis
microwave
tumbled
sheltered
cathy
werewolves
messy
andersen
convex
clapped
clinched
satire
wasting
edo
vc
rufus
##jak
mont
##etti
poznan
##keeping
restructuring
transverse
##rland
azerbaijani
slovene
gestures
roommate
choking
shear
##quist
vanguard
oblivious
##hiro
disagreed
baptism
##lich
coliseum
##aceae
salvage
societe
cory
locke
relocation
relying
versailles
ahl
swelling
##elo
cheerful
##word
##edes
gin
sarajevo
obstacle
diverted
##nac
messed
thoroughbred
fluttered
utrecht
chewed
acquaintance
assassins
dispatch
mirza
##wart
nike
salzburg
swell
yen
##gee
idle
ligue
samson
##nds
##igh
playful
spawned
##cise
tease
##case
burgundy
##bot
stirring
skeptical
interceptions
marathi
##dies
bedrooms
aroused
pinch
##lik
preferences
tattoos
buster
digitally
projecting
rust
##ital
kitten
priorities
addison
pseudo
##guard
dusk
icons
sermon
##psis
##iba
bt
##lift
##xt
ju
truce
rink
##dah
##wy
defects
psychiatry
offences
calculate
glucose
##iful
##rized
##unda
francaise
##hari
richest
warwickshire
carly
1763
purity
redemption
lending
##cious
muse
bruises
cerebral
aero
carving
##name
preface
terminology
invade
monty
##int
anarchist
blurred
##iled
rossi
treats
guts
shu
foothills
ballads
undertaking
premise
cecilia
affiliates
blasted
conditional
wilder
minors
drone
rudolph
buffy
swallowing
horton
attested
##hop
rutherford
howell
primetime
livery
penal
##bis
minimize
hydro
wrecked
wrought
palazzo
##gling
cans
vernacular
friedman
nobleman
shale
walnut
danielle
##ection
##tley
sears
##kumar
chords
lend
flipping
streamed
por
dracula
gallons
sacrifices
gamble
orphanage
##iman
mckenzie
##gible
boxers
daly
##balls
##ان
208
##ific
##rative
##iq
exploited
slated
##uity
circling
hillary
pinched
goldberg
provost
campaigning
lim
piles
ironically
jong
mohan
successors
usaf
##tem
##ught
autobiographical
haute
preserves
##ending
acquitted
comparisons
203
hydroelectric
gangs
cypriot
torpedoes
rushes
chrome
derive
bumps
instability
fiat
pets
##mbe
silas
dye
reckless
settler
##itation
info
heats
##writing
176
canonical
maltese
fins
mushroom
stacy
aspen
avid
##kur
##loading
vickers
gaston
hillside
statutes
wilde
gail
kung
sabine
comfortably
motorcycles
##rgo
169
pneumonia
fetch
##sonic
axel
faintly
parallels
##oop
mclaren
spouse
compton
interdisciplinary
miner
##eni
181
clamped
##chal
##llah
separates
versa
##mler
scarborough
labrador
##lity
##osing
rutgers
hurdles
como
166
burt
divers
##100
wichita
cade
coincided
##erson
bruised
mla
##pper
vineyard
##ili
##brush
notch
mentioning
jase
hearted
kits
doe
##acle
pomerania
##ady
ronan
seizure
pavel
problematic
##zaki
domenico
##ulin
catering
penelope
dependence
parental
emilio
ministerial
atkinson
##bolic
clarkson
chargers
colby
grill
peeked
arises
summon
##aged
fools
##grapher
faculties
qaeda
##vial
garner
refurbished
##hwa
geelong
disasters
nudged
bs
shareholder
lori
algae
reinstated
rot
##ades
##nous
invites
stainless
183
inclusive
##itude
diocesan
til
##icz
denomination
##xa
benton
floral
registers
##ider
##erman
##kell
absurd
brunei
guangzhou
hitter
retaliation
##uled
##eve
blanc
nh
consistency
contamination
##eres
##rner
dire
palermo
broadcasters
diaries
inspire
vols
brewer
tightening
ky
mixtape
hormone
##tok
stokes
##color
##dly
##ssi
pg
##ometer
##lington
sanitation
##tility
intercontinental
apps
##adt
¹⁄₂
cylinders
economies
favourable
unison
croix
gertrude
odyssey
vanity
dangling
##logists
upgrades
dice
middleweight
practitioner
##ight
206
henrik
parlor
orion
angered
lac
python
blurted
##rri
sensual
intends
swings
angled
##phs
husky
attain
peerage
precinct
textiles
cheltenham
shuffled
dai
confess
tasting
bhutan
##riation
tyrone
segregation
abrupt
ruiz
##rish
smirked
blackwell
confidential
browning
amounted
##put
vase
scarce
fabulous
raided
staple
guyana
unemployed
glider
shay
##tow
carmine
troll
intervene
squash
superstar
##uce
cylindrical
len
roadway
researched
handy
##rium
##jana
meta
lao
declares
##rring
##tadt
##elin
##kova
willem
shrubs
napoleonic
realms
skater
qi
volkswagen
##ł
tad
hara
archaeologist
awkwardly
eerie
##kind
wiley
##heimer
##24
titus
organizers
cfl
crusaders
lama
usb
vent
enraged
thankful
occupants
maximilian
##gaard
possessing
textbooks
##oran
collaborator
quaker
##ulo
avalanche
mono
silky
straits
isaiah
mustang
surged
resolutions
potomac
descend
cl
kilograms
plato
strains
saturdays
##olin
bernstein
##ype
holstein
ponytail
##watch
belize
conversely
heroine
perpetual
##ylus
charcoal
piedmont
glee
negotiating
backdrop
prologue
##jah
##mmy
pasadena
climbs
ramos
sunni
##holm
##tner
##tri
anand
deficiency
hertfordshire
stout
##avi
aperture
orioles
##irs
doncaster
intrigued
bombed
coating
otis
##mat
cocktail
##jit
##eto
amir
arousal
sar
##proof
##act
##ories
dixie
pots
##bow
whereabouts
159
##fted
drains
bullying
cottages
scripture
coherent
fore
poe
appetite
##uration
sampled
##ators
##dp
derrick
rotor
jays
peacock
installment
##rro
advisors
##coming
rodeo
scotch
##mot
##db
##fen
##vant
ensued
rodrigo
dictatorship
martyrs
twenties
##н
towed
incidence
marta
rainforest
sai
scaled
##cles
oceanic
qualifiers
symphonic
mcbride
dislike
generalized
aubrey
colonization
##iation
##lion
##ssing
disliked
lublin
salesman
##ulates
spherical
whatsoever
sweating
avalon
contention
punt
severity
alderman
atari
##dina
##grant
##rop
scarf
seville
vertices
annexation
fairfield
fascination
inspiring
launches
palatinate
regretted
##rca
feral
##iom
elk
nap
olsen
reddy
yong
##leader
##iae
garment
transports
feng
gracie
outrage
viceroy
insides
##esis
breakup
grady
organizer
softer
grimaced
222
murals
galicia
arranging
vectors
##rsten
bas
##sb
##cens
sloan
##eka
bitten
ara
fender
nausea
bumped
kris
banquet
comrades
detector
persisted
##llan
adjustment
endowed
cinemas
##shot
sellers
##uman
peek
epa
kindly
neglect
simpsons
talon
mausoleum
runaway
hangul
lookout
##cic
rewards
coughed
acquainted
chloride
##ald
quicker
accordion
neolithic
##qa
artemis
coefficient
lenny
pandora
tx
##xed
ecstasy
litter
segunda
chairperson
gemma
hiss
rumor
vow
nasal
antioch
compensate
patiently
transformers
##eded
judo
morrow
penis
posthumous
philips
bandits
husbands
denote
flaming
##any
##phones
langley
yorker
1760
walters
##uo
##kle
gubernatorial
fatty
samsung
leroy
outlaw
##nine
unpublished
poole
jakob
##ᵢ
##ₙ
crete
distorted
superiority
##dhi
intercept
crust
mig
claus
crashes
positioning
188
stallion
301
frontal
armistice
##estinal
elton
aj
encompassing
camel
commemorated
malaria
woodward
calf
cigar
penetrate
##oso
willard
##rno
##uche
illustrate
amusing
convergence
noteworthy
##lma
##rva
journeys
realise
manfred
##sable
410
##vocation
hearings
fiance
##posed
educators
provoked
adjusting
##cturing
modular
stockton
paterson
vlad
rejects
electors
selena
maureen
##tres
uber
##rce
swirled
##num
proportions
nanny
pawn
naturalist
parma
apostles
awoke
ethel
wen
##bey
monsoon
overview
##inating
mccain
rendition
risky
adorned
##ih
equestrian
germain
nj
conspicuous
confirming
##yoshi
shivering
##imeter
milestone
rumours
flinched
bounds
smacked
token
##bei
lectured
automobiles
##shore
impacted
##iable
nouns
nero
##leaf
ismail
prostitute
trams
##lace
bridget
sud
stimulus
impressions
reins
revolves
##oud
##gned
giro
honeymoon
##swell
criterion
##sms
##uil
libyan
prefers
##osition
211
preview
sucks
accusation
bursts
metaphor
diffusion
tolerate
faye
betting
cinematographer
liturgical
specials
bitterly
humboldt
##ckle
flux
rattled
##itzer
archaeologists
odor
authorised
marshes
discretion
##ов
alarmed
archaic
inverse
##leton
explorers
##pine
drummond
tsunami
woodlands
##minate
##tland
booklet
insanity
owning
insert
crafted
calculus
##tore
receivers
##bt
stung
##eca
##nched
prevailing
travellers
eyeing
lila
graphs
##borne
178
julien
##won
morale
adaptive
therapist
erica
cw
libertarian
bowman
pitches
vita
##ional
crook
##ads
##entation
caledonia
mutiny
##sible
1840s
automation
##ß
flock
##pia
ironic
pathology
##imus
remarried
##22
joker
withstand
energies
##att
shropshire
hostages
madeleine
tentatively
conflicting
mateo
recipes
euros
ol
mercenaries
nico
##ndon
albuquerque
augmented
mythical
bel
freud
##child
cough
##lica
365
freddy
lillian
genetically
nuremberg
calder
209
bonn
outdoors
paste
suns
urgency
vin
restraint
tyson
##cera
##selle
barrage
bethlehem
kahn
##par
mounts
nippon
barony
happier
ryu
makeshift
sheldon
blushed
castillo
barking
listener
taped
bethel
fluent
headlines
pornography
rum
disclosure
sighing
mace
doubling
gunther
manly
##plex
rt
interventions
physiological
forwards
emerges
##tooth
##gny
compliment
rib
recession
visibly
barge
faults
connector
exquisite
prefect
##rlin
patio
##cured
elevators
brandt
italics
pena
173
wasp
satin
ea
botswana
graceful
respectable
##jima
##rter
##oic
franciscan
generates
##dl
alfredo
disgusting
##olate
##iously
sherwood
warns
cod
promo
cheryl
sino
##ة
##escu
twitch
##zhi
brownish
thom
ortiz
##dron
densely
##beat
carmel
reinforce
##bana
187
anastasia
downhill
vertex
contaminated
remembrance
harmonic
homework
##sol
fiancee
gears
olds
angelica
loft
ramsay
quiz
colliery
sevens
##cape
autism
##hil
walkway
##boats
ruben
abnormal
ounce
khmer
##bbe
zachary
bedside
morphology
punching
##olar
sparrow
convinces
##35
hewitt
queer
remastered
rods
mabel
solemn
notified
lyricist
symmetric
##xide
174
encore
passports
wildcats
##uni
baja
##pac
mildly
##ease
bleed
commodity
mounds
glossy
orchestras
##omo
damian
prelude
ambitions
##vet
awhile
remotely
##aud
asserts
imply
##iques
distinctly
modelling
remedy
##dded
windshield
dani
xiao
##endra
audible
powerplant
1300
invalid
elemental
acquisitions
##hala
immaculate
libby
plata
smuggling
ventilation
denoted
minh
##morphism
430
differed
dion
kelley
lore
mocking
sabbath
spikes
hygiene
drown
runoff
stylized
tally
liberated
aux
interpreter
righteous
aba
siren
reaper
pearce
millie
##cier
##yra
gaius
##iso
captures
##ttering
dorm
claudio
##sic
benches
knighted
blackness
##ored
discount
fumble
oxidation
routed
##ς
novak
perpendicular
spoiled
fracture
splits
##urt
pads
topology
##cats
axes
fortunate
offenders
protestants
esteem
221
broadband
convened
frankly
hound
prototypes
isil
facilitated
keel
##sher
sahara
awaited
bubba
orb
prosecutors
186
hem
520
##xing
relaxing
remnant
romney
sorted
slalom
stefano
ulrich
##active
exemption
folder
pauses
foliage
hitchcock
epithet
204
criticisms
##aca
ballistic
brody
hinduism
chaotic
youths
equals
##pala
pts
thicker
analogous
capitalist
improvised
overseeing
sinatra
ascended
beverage
##tl
straightforward
##kon
curran
##west
bois
325
induce
surveying
emperors
sax
unpopular
##kk
cartoonist
fused
##mble
unto
##yuki
localities
##cko
##ln
darlington
slain
academie
lobbying
sediment
puzzles
##grass
defiance
dickens
manifest
tongues
alumnus
arbor
coincide
184
appalachian
mustafa
examiner
cabaret
traumatic
yves
bracelet
draining
heroin
magnum
baths
odessa
consonants
mitsubishi
##gua
kellan
vaudeville
##fr
joked
null
straps
probation
##ław
ceded
interfaces
##pas
##zawa
blinding
viet
224
rothschild
museo
640
huddersfield
##vr
tactic
##storm
brackets
dazed
incorrectly
##vu
reg
glazed
fearful
manifold
benefited
irony
##sun
stumbling
##rte
willingness
balkans
mei
wraps
##aba
injected
##lea
gu
syed
harmless
##hammer
bray
takeoff
poppy
timor
cardboard
astronaut
purdue
weeping
southbound
cursing
stalls
diagonal
##neer
lamar
bryce
comte
weekdays
harrington
##uba
negatively
##see
lays
grouping
##cken
##henko
affirmed
halle
modernist
##lai
hodges
smelling
aristocratic
baptized
dismiss
justification
oilers
##now
coupling
qin
snack
healer
##qing
gardener
layla
battled
formulated
stephenson
gravitational
##gill
##jun
1768
granny
coordinating
suites
##cd
##ioned
monarchs
##cote
##hips
sep
blended
apr
barrister
deposition
fia
mina
policemen
paranoid
##pressed
churchyard
covert
crumpled
creep
abandoning
tr
transmit
conceal
barr
understands
readiness
spire
##cology
##enia
##erry
610
startling
unlock
vida
bowled
slots
##nat
##islav
spaced
trusting
admire
rig
##ink
slack
##70
mv
207
casualty
##wei
classmates
##odes
##rar
##rked
amherst
furnished
evolve
foundry
menace
mead
##lein
flu
wesleyan
##kled
monterey
webber
##vos
wil
##mith
##на
bartholomew
justices
restrained
##cke
amenities
191
mediated
sewage
trenches
ml
mainz
##thus
1800s
##cula
##inski
caine
bonding
213
converts
spheres
superseded
marianne
crypt
sweaty
ensign
historia
##br
spruce
##post
##ask
forks
thoughtfully
yukon
pamphlet
ames
##uter
karma
##yya
bryn
negotiation
sighs
incapable
##mbre
##ntial
actresses
taft
##mill
luce
prevailed
##amine
1773
motionless
envoy
testify
investing
sculpted
instructors
provence
kali
cullen
horseback
##while
goodwin
##jos
gaa
norte
##ldon
modify
wavelength
abd
214
skinned
sprinter
forecast
scheduling
marries
squared
tentative
##chman
boer
##isch
bolts
swap
fisherman
assyrian
impatiently
guthrie
martins
murdoch
194
tanya
nicely
dolly
lacy
med
##45
syn
decks
fashionable
millionaire
##ust
surfing
##ml
##ision
heaved
tammy
consulate
attendees
routinely
197
fuse
saxophonist
backseat
malaya
##lord
scowl
tau
##ishly
193
sighted
steaming
##rks
303
911
##holes
##hong
ching
##wife
bless
conserved
jurassic
stacey
unix
zion
chunk
rigorous
blaine
198
peabody
slayer
dismay
brewers
nz
##jer
det
##glia
glover
postwar
int
penetration
sylvester
imitation
vertically
airlift
heiress
knoxville
viva
##uin
390
macon
##rim
##fighter
##gonal
janice
##orescence
##wari
marius
belongings
leicestershire
196
blanco
inverted
preseason
sanity
sobbing
##due
##elt
##dled
collingwood
regeneration
flickering
shortest
##mount
##osi
feminism
##lat
sherlock
cabinets
fumbled
northbound
precedent
snaps
##mme
researching
##akes
guillaume
insights
manipulated
vapor
neighbour
sap
gangster
frey
f1
stalking
scarcely
callie
barnett
tendencies
audi
doomed
assessing
slung
panchayat
ambiguous
bartlett
##etto
distributing
violating
wolverhampton
##hetic
swami
histoire
##urus
liable
pounder
groin
hussain
larsen
popping
surprises
##atter
vie
curt
##station
mute
relocate
musicals
authorization
richter
##sef
immortality
tna
bombings
##press
deteriorated
yiddish
##acious
robbed
colchester
cs
pmid
ao
verified
balancing
apostle
swayed
recognizable
oxfordshire
retention
nottinghamshire
contender
judd
invitational
shrimp
uhf
##icient
cleaner
longitudinal
tanker
##mur
acronym
broker
koppen
sundance
suppliers
##gil
4000
clipped
fuels
petite
##anne
landslide
helene
diversion
populous
landowners
auspices
melville
quantitative
##xes
ferries
nicky
##llus
doo
haunting
roche
carver
downed
unavailable
##pathy
approximation
hiroshima
##hue
garfield
valle
comparatively
keyboardist
traveler
##eit
congestion
calculating
subsidiaries
##bate
serb
modernization
fairies
deepened
ville
averages
##lore
inflammatory
tonga
##itch
co₂
squads
##hea
gigantic
serum
enjoyment
retailer
verona
35th
cis
##phobic
magna
technicians
##vati
arithmetic
##sport
levin
##dation
amtrak
chow
sienna
##eyer
backstage
entrepreneurship
##otic
learnt
tao
##udy
worcestershire
formulation
baggage
hesitant
bali
sabotage
##kari
barren
enhancing
murmur
pl
freshly
putnam
syntax
aces
medicines
resentment
bandwidth
##sier
grins
chili
guido
##sei
framing
implying
gareth
lissa
genevieve
pertaining
admissions
geo
thorpe
proliferation
sato
bela
analyzing
parting
##gor
awakened
##isman
huddled
secrecy
##kling
hush
gentry
540
dungeons
##ego
coasts
##utz
sacrificed
##chule
landowner
mutually
prevalence
programmer
adolescent
disrupted
seaside
gee
trusts
vamp
georgie
##nesian
##iol
schedules
sindh
##market
etched
hm
sparse
bey
beaux
scratching
gliding
unidentified
216
collaborating
gems
jesuits
oro
accumulation
shaping
mbe
anal
##xin
231
enthusiasts
newscast
##egan
janata
dewey
parkinson
179
ankara
biennial
towering
dd
inconsistent
950
##chet
thriving
terminate
cabins
furiously
eats
advocating
donkey
marley
muster
phyllis
leiden
##user
grassland
glittering
iucn
loneliness
217
memorandum
armenians
##ddle
popularized
rhodesia
60s
lame
##illon
sans
bikini
header
orbits
##xx
##finger
##ulator
sharif
spines
biotechnology
strolled
naughty
yates
##wire
fremantle
milo
##mour
abducted
removes
##atin
humming
wonderland
##chrome
##ester
hume
pivotal
##rates
armand
grams
believers
elector
rte
apron
bis
scraped
##yria
endorsement
initials
##llation
eps
dotted
hints
buzzing
emigration
nearer
##tom
indicators
##ulu
coarse
neutron
protectorate
##uze
directional
exploits
pains
loire
1830s
proponents
guggenheim
rabbits
ritchie
305
hectare
inputs
hutton
##raz
verify
##ako
boilers
longitude
##lev
skeletal
yer
emilia
citrus
compromised
##gau
pokemon
prescription
paragraph
eduard
cadillac
attire
categorized
kenyan
weddings
charley
##bourg
entertain
monmouth
##lles
nutrients
davey
mesh
incentive
practised
ecosystems
kemp
subdued
overheard
##rya
bodily
maxim
##nius
apprenticeship
ursula
##fight
lodged
rug
silesian
unconstitutional
patel
inspected
coyote
unbeaten
##hak
34th
disruption
convict
parcel
##cl
##nham
collier
implicated
mallory
##iac
##lab
susannah
winkler
##rber
shia
phelps
sediments
graphical
robotic
##sner
adulthood
mart
smoked
##isto
kathryn
clarified
##aran
divides
convictions
oppression
pausing
burying
##mt
federico
mathias
eileen
##tana
kite
hunched
##acies
189
##atz
disadvantage
liza
kinetic
greedy
paradox
yokohama
dowager
trunks
ventured
##gement
gupta
vilnius
olaf
##thest
crimean
hopper
##ej
progressively
arturo
mouthed
arrondissement
##fusion
rubin
simulcast
oceania
##orum
##stra
##rred
busiest
intensely
navigator
cary
##vine
##hini
##bies
fife
rowe
rowland
posing
insurgents
shafts
lawsuits
activate
conor
inward
culturally
garlic
265
##eering
eclectic
##hui
##kee
##nl
furrowed
vargas
meteorological
rendezvous
##aus
culinary
commencement
##dition
quota
##notes
mommy
salaries
overlapping
mule
##iology
##mology
sums
wentworth
##isk
##zione
mainline
subgroup
##illy
hack
plaintiff
verdi
bulb
differentiation
engagements
multinational
supplemented
bertrand
caller
regis
##naire
##sler
##arts
##imated
blossom
propagation
kilometer
viaduct
vineyards
##uate
beckett
optimization
golfer
songwriters
seminal
semitic
thud
volatile
evolving
ridley
##wley
trivial
distributions
scandinavia
jiang
##ject
wrestled
insistence
##dio
emphasizes
napkin
##ods
adjunct
rhyme
##ricted
##eti
hopeless
surrounds
tremble
32nd
smoky
##ntly
oils
medicinal
padded
steer
wilkes
219
255
concessions
hue
uniquely
blinded
landon
yahoo
##lane
hendrix
commemorating
dex
specify
chicks
##ggio
intercity
1400
morley
##torm
highlighting
##oting
pang
oblique
stalled
##liner
flirting
newborn
1769
bishopric
shaved
232
currie
##ush
dharma
spartan
##ooped
favorites
smug
novella
sirens
abusive
creations
espana
##lage
paradigm
semiconductor
sheen
##rdo
##yen
##zak
nrl
renew
##pose
##tur
adjutant
marches
norma
##enity
ineffective
weimar
grunt
##gat
lordship
plotting
expenditure
infringement
lbs
refrain
av
mimi
mistakenly
postmaster
1771
##bara
ras
motorsports
tito
199
subjective
##zza
bully
stew
##kaya
prescott
1a
##raphic
##zam
bids
styling
paranormal
reeve
sneaking
exploding
katz
akbar
migrant
syllables
indefinitely
##ogical
destroys
replaces
applause
##phine
pest
##fide
218
articulated
bertie
##thing
##cars
##ptic
courtroom
crowley
aesthetics
cummings
tehsil
hormones
titanic
dangerously
##ibe
stadion
jaenelle
auguste
ciudad
##chu
mysore
partisans
##sio
lucan
philipp
##aly
debating
henley
interiors
##rano
##tious
homecoming
beyonce
usher
henrietta
prepares
weeds
##oman
ely
plucked
##pire
##dable
luxurious
##aq
artifact
password
pasture
juno
maddy
minsk
##dder
##ologies
##rone
assessments
martian
royalist
1765
examines
##mani
##rge
nino
223
parry
scooped
relativity
##eli
##uting
##cao
congregational
noisy
traverse
##agawa
strikeouts
nickelodeon
obituary
transylvania
binds
depictions
polk
trolley
##yed
##lard
breeders
##under
dryly
hokkaido
1762
strengths
stacks
bonaparte
connectivity
neared
prostitutes
stamped
anaheim
gutierrez
sinai
##zzling
bram
fresno
madhya
##86
proton
##lena
##llum
##phon
reelected
wanda
##anus
##lb
ample
distinguishing
##yler
grasping
sermons
tomato
bland
stimulation
avenues
##eux
spreads
scarlett
fern
pentagon
assert
baird
chesapeake
ir
calmed
distortion
fatalities
##olis
correctional
pricing
##astic
##gina
prom
dammit
ying
collaborate
##chia
welterweight
33rd
pointer
substitution
bonded
umpire
communicating
multitude
paddle
##obe
federally
intimacy
##insky
betray
ssr
##lett
##lean
##lves
##therapy
airbus
##tery
functioned
ud
bearer
biomedical
netflix
##hire
##nca
condom
brink
ik
##nical
macy
##bet
flap
gma
experimented
jelly
lavender
##icles
##ulia
munro
##mian
##tial
rye
##rle
60th
gigs
hottest
rotated
predictions
fuji
bu
##erence
##omi
barangay
##fulness
##sas
clocks
##rwood
##liness
cereal
roe
wight
decker
uttered
babu
onion
xml
forcibly
##df
petra
sarcasm
hartley
peeled
storytelling
##42
##xley
##ysis
##ffa
fibre
kiel
auditor
fig
harald
greenville
##berries
geographically
nell
quartz
##athic
cemeteries
##lr
crossings
nah
holloway
reptiles
chun
sichuan
snowy
660
corrections
##ivo
zheng
ambassadors
blacksmith
fielded
fluids
hardcover
turnover
medications
melvin
academies
##erton
ro
roach
absorbing
spaniards
colton
##founded
outsider
espionage
kelsey
245
edible
##ulf
dora
establishes
##sham
##tries
contracting
##tania
cinematic
costello
nesting
##uron
connolly
duff
##nology
mma
##mata
fergus
sexes
gi
optics
spectator
woodstock
banning
##hee
##fle
differentiate
outfielder
refinery
226
312
gerhard
horde
lair
drastically
##udi
landfall
##cheng
motorsport
odi
##achi
predominant
quay
skins
##ental
edna
harshly
complementary
murdering
##aves
wreckage
##90
ono
outstretched
lennox
munitions
galen
reconcile
470
scalp
bicycles
gillespie
questionable
rosenberg
guillermo
hostel
jarvis
kabul
volvo
opium
yd
##twined
abuses
decca
outpost
##cino
sensible
neutrality
##64
ponce
anchorage
atkins
turrets
inadvertently
disagree
libre
vodka
reassuring
weighs
##yal
glide
jumper
ceilings
repertory
outs
stain
##bial
envy
##ucible
smashing
heightened
policing
hyun
mixes
lai
prima
##ples
celeste
##bina
lucrative
intervened
kc
manually
##rned
stature
staffed
bun
bastards
nairobi
priced
##auer
thatcher
##kia
tripped
comune
##ogan
##pled
brasil
incentives
emanuel
hereford
musica
##kim
benedictine
biennale
##lani
eureka
gardiner
rb
knocks
sha
##ael
##elled
##onate
efficacy
ventura
masonic
sanford
maize
leverage
##feit
capacities
santana
##aur
novelty
vanilla
##cter
##tour
benin
##oir
##rain
neptune
drafting
tallinn
##cable
humiliation
##boarding
schleswig
fabian
bernardo
liturgy
spectacle
sweeney
pont
routledge
##tment
cosmos
ut
hilt
sleek
universally
##eville
##gawa
typed
##dry
favors
allegheny
glaciers
##rly
recalling
aziz
##log
parasite
requiem
auf
##berto
##llin
illumination
##breaker
##issa
festivities
bows
govern
vibe
vp
333
sprawled
larson
pilgrim
bwf
leaping
##rts
##ssel
alexei
greyhound
hoarse
##dler
##oration
seneca
##cule
gaping
##ulously
##pura
cinnamon
##gens
##rricular
craven
fantasies
houghton
engined
reigned
dictator
supervising
##oris
bogota
commentaries
unnatural
fingernails
spirituality
tighten
##tm
canadiens
protesting
intentional
cheers
sparta
##ytic
##iere
##zine
widen
belgarath
controllers
dodd
iaaf
navarre
##ication
defect
squire
steiner
whisky
##mins
560
inevitably
tome
##gold
chew
##uid
##lid
elastic
##aby
streaked
alliances
jailed
regal
##ined
##phy
czechoslovak
narration
absently
##uld
bluegrass
guangdong
quran
criticizing
hose
hari
##liest
##owa
skier
streaks
deploy
##lom
raft
bose
dialed
huff
##eira
haifa
simplest
bursting
endings
ib
sultanate
##titled
franks
whitman
ensures
sven
##ggs
collaborators
forster
organising
ui
banished
napier
injustice
teller
layered
thump
##otti
roc
battleships
evidenced
fugitive
sadie
robotics
##roud
equatorial
geologist
##iza
yielding
##bron
##sr
internationale
mecca
##diment
sbs
skyline
toad
uploaded
reflective
undrafted
lal
leafs
bayern
##dai
lakshmi
shortlisted
##stick
##wicz
camouflage
donate
af
christi
lau
##acio
disclosed
nemesis
1761
assemble
straining
northamptonshire
tal
##asi
bernardino
premature
heidi
42nd
coefficients
galactic
reproduce
buzzed
sensations
zionist
monsieur
myrtle
##eme
archery
strangled
musically
viewpoint
antiquities
bei
trailers
seahawks
cured
pee
preferring
tasmanian
lange
sul
##mail
##working
colder
overland
lucivar
massey
gatherings
haitian
##smith
disapproval
flaws
##cco
##enbach
1766
npr
##icular
boroughs
creole
forums
techno
1755
dent
abdominal
streetcar
##eson
##stream
procurement
gemini
predictable
##tya
acheron
christoph
feeder
fronts
vendor
bernhard
jammu
tumors
slang
##uber
goaltender
twists
curving
manson
vuelta
mer
peanut
confessions
pouch
unpredictable
allowance
theodor
vascular
##factory
bala
authenticity
metabolic
coughing
nanjing
##cea
pembroke
##bard
splendid
36th
ff
hourly
##ahu
elmer
handel
##ivate
awarding
thrusting
dl
experimentation
##hesion
##46
caressed
entertained
steak
##rangle
biologist
orphans
baroness
oyster
stepfather
##dridge
mirage
reefs
speeding
##31
barons
1764
227
inhabit
preached
repealed
##tral
honoring
boogie
captives
administer
johanna
##imate
gel
suspiciously
1767
sobs
##dington
backbone
hayward
garry
##folding
##nesia
maxi
##oof
##ppe
ellison
galileo
##stand
crimea
frenzy
amour
bumper
matrices
natalia
baking
garth
palestinians
##grove
smack
conveyed
ensembles
gardening
##manship
##rup
##stituting
1640
harvesting
topography
jing
shifters
dormitory
##carriage
##lston
ist
skulls
##stadt
dolores
jewellery
sarawak
##wai
##zier
fences
christy
confinement
tumbling
credibility
fir
stench
##bria
##plication
##nged
##sam
virtues
##belt
marjorie
pba
##eem
##made
celebrates
schooner
agitated
barley
fulfilling
anthropologist
##pro
restrict
novi
regulating
##nent
padres
##rani
##hesive
loyola
tabitha
milky
olson
proprietor
crambidae
guarantees
intercollegiate
ljubljana
hilda
##sko
ignorant
hooded
##lts
sardinia
##lidae
##vation
frontman
privileged
witchcraft
##gp
jammed
laude
poking
##than
bracket
amazement
yunnan
##erus
maharaja
linnaeus
264
commissioning
milano
peacefully
##logies
akira
rani
regulator
##36
grasses
##rance
luzon
crows
compiler
gretchen
seaman
edouard
tab
buccaneers
ellington
hamlets
whig
socialists
##anto
directorial
easton
mythological
##kr
##vary
rhineland
semantic
taut
dune
inventions
succeeds
##iter
replication
branched
##pired
jul
prosecuted
kangaroo
penetrated
##avian
middlesbrough
doses
bleak
madam
predatory
relentless
##vili
reluctance
##vir
hailey
crore
silvery
1759
monstrous
swimmers
transmissions
hawthorn
informing
##eral
toilets
caracas
crouch
kb
##sett
295
cartel
hadley
##aling
alexia
yvonne
##biology
cinderella
eton
superb
blizzard
stabbing
industrialist
maximus
##gm
##orus
groves
maud
clade
oversized
comedic
##bella
rosen
nomadic
fulham
montane
beverages
galaxies
redundant
swarm
##rot
##folia
##llis
buckinghamshire
fen
bearings
bahadur
##rom
gilles
phased
dynamite
faber
benoit
vip
##ount
##wd
booking
fractured
tailored
anya
spices
westwood
cairns
auditions
inflammation
steamed
##rocity
##acion
##urne
skyla
thereof
watford
torment
archdeacon
transforms
lulu
demeanor
fucked
serge
##sor
mckenna
minas
entertainer
##icide
caress
originate
residue
##sty
1740
##ilised
##org
beech
##wana
subsidies
##ghton
emptied
gladstone
ru
firefighters
voodoo
##rcle
het
nightingale
tamara
edmond
ingredient
weaknesses
silhouette
285
compatibility
withdrawing
hampson
##mona
anguish
giggling
##mber
bookstore
##jiang
southernmost
tilting
##vance
bai
economical
rf
briefcase
dreadful
hinted
projections
shattering
totaling
##rogate
analogue
indicted
periodical
fullback
##dman
haynes
##tenberg
##ffs
##ishment
1745
thirst
stumble
penang
vigorous
##ddling
##kor
##lium
octave
##ove
##enstein
##inen
##ones
siberian
##uti
cbn
repeal
swaying
##vington
khalid
tanaka
unicorn
otago
plastered
lobe
riddle
##rella
perch
##ishing
croydon
filtered
graeme
tripoli
##ossa
crocodile
##chers
sufi
mined
##tung
inferno
lsu
##phi
swelled
utilizes
£2
cale
periodicals
styx
hike
informally
coop
lund
##tidae
ala
hen
qui
transformations
disposed
sheath
chickens
##cade
fitzroy
sas
silesia
unacceptable
odisha
1650
sabrina
pe
spokane
ratios
athena
massage
shen
dilemma
##drum
##riz
##hul
corona
doubtful
niall
##pha
##bino
fines
cite
acknowledging
bangor
ballard
bathurst
##resh
huron
mustered
alzheimer
garments
kinase
tyre
warship
##cp
flashback
pulmonary
braun
cheat
kamal
cyclists
constructions
grenades
ndp
traveller
excuses
stomped
signalling
trimmed
futsal
mosques
relevance
##wine
wta
##23
##vah
##lter
hoc
##riding
optimistic
##´s
deco
sim
interacting
rejecting
moniker
waterways
##ieri
##oku
mayors
gdansk
outnumbered
pearls
##ended
##hampton
fairs
totals
dominating
262
notions
stairway
compiling
pursed
commodities
grease
yeast
##jong
carthage
griffiths
residual
amc
contraction
laird
sapphire
##marine
##ivated
amalgamation
dissolve
inclination
lyle
packaged
altitudes
suez
canons
graded
lurched
narrowing
boasts
guise
wed
enrico
##ovsky
rower
scarred
bree
cub
iberian
protagonists
bargaining
proposing
trainers
voyages
vans
fishes
##aea
##ivist
##verance
encryption
artworks
kazan
sabre
cleopatra
hepburn
rotting
supremacy
mecklenburg
##brate
burrows
hazards
outgoing
flair
organizes
##ctions
scorpion
##usions
boo
234
chevalier
dunedin
slapping
##34
ineligible
pensions
##38
##omic
manufactures
emails
bismarck
238
weakening
blackish
ding
mcgee
quo
##rling
northernmost
xx
manpower
greed
sampson
clicking
##ange
##horpe
##inations
##roving
torre
##eptive
##moral
symbolism
38th
asshole
meritorious
outfits
splashed
biographies
sprung
astros
##tale
302
737
filly
raoul
nw
tokugawa
linden
clubhouse
##apa
tracts
romano
##pio
putin
tags
##note
chained
dickson
gunshot
moe
gunn
rashid
##tails
zipper
##bas
##nea
contrasted
##ply
##udes
plum
pharaoh
##pile
aw
comedies
ingrid
sandwiches
subdivisions
1100
mariana
nokia
kamen
hz
delaney
veto
herring
##words
possessive
outlines
##roup
siemens
stairwell
rc
gallantry
messiah
palais
yells
233
zeppelin
##dm
bolivar
##cede
smackdown
mckinley
##mora
##yt
muted
geologic
finely
unitary
avatar
hamas
maynard
rees
bog
contrasting
##rut
liv
chico
disposition
pixel
##erate
becca
dmitry
yeshiva
narratives
##lva
##ulton
mercenary
sharpe
tempered
navigate
stealth
amassed
keynes
##lini
untouched
##rrie
havoc
lithium
##fighting
abyss
graf
southward
wolverine
balloons
implements
ngos
transitions
##icum
ambushed
concacaf
dormant
economists
##dim
costing
csi
rana
universite
boulders
verity
##llon
collin
mellon
misses
cypress
fluorescent
lifeless
spence
##ulla
crewe
shepard
pak
revelations
##م
jolly
gibbons
paw
##dro
##quel
freeing
##test
shack
fries
palatine
##51
##hiko
accompaniment
cruising
recycled
##aver
erwin
sorting
synthesizers
dyke
realities
sg
strides
enslaved
wetland
##ghan
competence
gunpowder
grassy
maroon
reactors
objection
##oms
carlson
gearbox
macintosh
radios
shelton
##sho
clergyman
prakash
254
mongols
trophies
oricon
228
stimuli
twenty20
cantonese
cortes
mirrored
##saurus
bhp
cristina
melancholy
##lating
enjoyable
nuevo
##wny
downfall
schumacher
##ind
banging
lausanne
rumbled
paramilitary
reflex
ax
amplitude
migratory
##gall
##ups
midi
barnard
lastly
sherry
##hp
##nall
keystone
##kra
carleton
slippery
##53
coloring
foe
socket
otter
##rgos
mats
##tose
consultants
bafta
bison
topping
##km
490
primal
abandonment
transplant
atoll
hideous
mort
pained
reproduced
tae
howling
##turn
unlawful
billionaire
hotter
poised
lansing
##chang
dinamo
retro
messing
nfc
domesday
##mina
blitz
timed
##athing
##kley
ascending
gesturing
##izations
signaled
tis
chinatown
mermaid
savanna
jameson
##aint
catalina
##pet
##hers
cochrane
cy
chatting
##kus
alerted
computation
mused
noelle
majestic
mohawk
campo
octagonal
##sant
##hend
241
aspiring
##mart
comprehend
iona
paralyzed
shimmering
swindon
rhone
##eley
reputed
configurations
pitchfork
agitation
francais
gillian
lipstick
##ilo
outsiders
pontifical
resisting
bitterness
sewer
rockies
##edd
##ucher
misleading
1756
exiting
galloway
##nging
risked
##heart
246
commemoration
schultz
##rka
integrating
##rsa
poses
shrieked
##weiler
guineas
gladys
jerking
owls
goldsmith
nightly
penetrating
##unced
lia
##33
ignited
betsy
##aring
##thorpe
follower
vigorously
##rave
coded
kiran
knit
zoology
tbilisi
##28
##bered
repository
govt
deciduous
dino
growling
##bba
enhancement
unleashed
chanting
pussy
biochemistry
##eric
kettle
repression
toxicity
nrhp
##arth
##kko
##bush
ernesto
commended
outspoken
242
mca
parchment
sms
kristen
##aton
bisexual
raked
glamour
navajo
a2
conditioned
showcased
##hma
spacious
youthful
##esa
usl
appliances
junta
brest
layne
conglomerate
enchanted
chao
loosened
picasso
circulating
inspect
montevideo
##centric
##kti
piazza
spurred
##aith
bari
freedoms
poultry
stamford
lieu
##ect
indigo
sarcastic
bahia
stump
attach
dvds
frankenstein
lille
approx
scriptures
pollen
##script
nmi
overseen
##ivism
tides
proponent
newmarket
inherit
milling
##erland
centralized
##rou
distributors
credentials
drawers
abbreviation
##lco
##xon
downing
uncomfortably
ripe
##oes
erase
franchises
##ever
populace
##bery
##khar
decomposition
pleas
##tet
daryl
sabah
##stle
##wide
fearless
genie
lesions
annette
##ogist
oboe
appendix
nair
dripped
petitioned
maclean
mosquito
parrot
rpg
hampered
1648
operatic
reservoirs
##tham
irrelevant
jolt
summarized
##fp
medallion
##taff
##−
clawed
harlow
narrower
goddard
marcia
bodied
fremont
suarez
altering
tempest
mussolini
porn
##isms
sweetly
oversees
walkers
solitude
grimly
shrines
hk
ich
supervisors
hostess
dietrich
legitimacy
brushes
expressive
##yp
dissipated
##rse
localized
systemic
##nikov
gettysburg
##js
##uaries
dialogues
muttering
251
housekeeper
sicilian
discouraged
##frey
beamed
kaladin
halftime
kidnap
##amo
##llet
1754
synonymous
depleted
instituto
insulin
reprised
##opsis
clashed
##ctric
interrupting
radcliffe
insisting
medici
1715
ejected
playfully
turbulent
##47
starvation
##rini
shipment
rebellious
petersen
verification
merits
##rified
cakes
##charged
1757
milford
shortages
spying
fidelity
##aker
emitted
storylines
harvested
seismic
##iform
cheung
kilda
theoretically
barbie
lynx
##rgy
##tius
goblin
mata
poisonous
##nburg
reactive
residues
obedience
##евич
conjecture
##rac
401
hating
sixties
kicker
moaning
motown
##bha
emancipation
neoclassical
##hering
consoles
ebert
professorship
##tures
sustaining
assaults
obeyed
affluent
incurred
tornadoes
##eber
##zow
emphasizing
highlanders
cheated
helmets
##ctus
internship
terence
bony
executions
legislators
berries
peninsular
tinged
##aco
1689
amplifier
corvette
ribbons
lavish
pennant
##lander
worthless
##chfield
##forms
mariano
pyrenees
expenditures
##icides
chesterfield
mandir
tailor
39th
sergey
nestled
willed
aristocracy
devotees
goodnight
raaf
rumored
weaponry
remy
appropriations
harcourt
burr
riaa
##lence
limitation
unnoticed
guo
soaking
swamps
##tica
collapsing
tatiana
descriptive
brigham
psalm
##chment
maddox
##lization
patti
caliph
##aja
akron
injuring
serra
##ganj
basins
##sari
astonished
launcher
##church
hilary
wilkins
sewing
##sf
stinging
##fia
##ncia
underwood
startup
##ition
compilations
vibrations
embankment
jurist
##nity
bard
juventus
groundwater
kern
palaces
helium
boca
cramped
marissa
soto
##worm
jae
princely
##ggy
faso
bazaar
warmly
##voking
229
pairing
##lite
##grate
##nets
wien
freaked
ulysses
rebirth
##alia
##rent
mummy
guzman
jimenez
stilled
##nitz
trajectory
tha
woken
archival
professions
##pts
##pta
hilly
shadowy
shrink
##bolt
norwood
glued
migrate
stereotypes
devoid
##pheus
625
evacuate
horrors
infancy
gotham
knowles
optic
downloaded
sachs
kingsley
parramatta
darryl
mor
##onale
shady
commence
confesses
kan
##meter
##placed
marlborough
roundabout
regents
frigates
io
##imating
gothenburg
revoked
carvings
clockwise
convertible
intruder
##sche
banged
##ogo
vicky
bourgeois
##mony
dupont
footing
##gum
pd
##real
buckle
yun
penthouse
sane
720
serviced
stakeholders
neumann
bb
##eers
comb
##gam
catchment
pinning
rallies
typing
##elles
forefront
freiburg
sweetie
giacomo
widowed
goodwill
worshipped
aspirations
midday
##vat
fishery
##trick
bournemouth
turk
243
hearth
ethanol
guadalajara
murmurs
sl
##uge
afforded
scripted
##hta
wah
##jn
coroner
translucent
252
memorials
puck
progresses
clumsy
##race
315
candace
recounted
##27
##slin
##uve
filtering
##mac
howl
strata
heron
leveled
##ays
dubious
##oja
##т
##wheel
citations
exhibiting
##laya
##mics
##pods
turkic
##lberg
injunction
##ennial
##mit
antibodies
##44
organise
##rigues
cardiovascular
cushion
inverness
##zquez
dia
cocoa
sibling
##tman
##roid
expanse
feasible
tunisian
algiers
##relli
rus
bloomberg
dso
westphalia
bro
tacoma
281
downloads
##ours
konrad
duran
##hdi
continuum
jett
compares
legislator
secession
##nable
##gues
##zuka
translating
reacher
##gley
##ła
aleppo
##agi
tc
orchards
trapping
linguist
versatile
drumming
postage
calhoun
superiors
##mx
barefoot
leary
##cis
ignacio
alfa
kaplan
##rogen
bratislava
mori
##vot
disturb
haas
313
cartridges
gilmore
radiated
salford
tunic
hades
##ulsive
archeological
delilah
magistrates
auditioned
brewster
charters
empowerment
blogs
cappella
dynasties
iroquois
whipping
##krishna
raceway
truths
myra
weaken
judah
mcgregor
##horse
mic
refueling
37th
burnley
bosses
markus
premio
query
##gga
dunbar
##economic
darkest
lyndon
sealing
commendation
reappeared
##mun
addicted
ezio
slaughtered
satisfactory
shuffle
##eves
##thic
##uj
fortification
warrington
##otto
resurrected
fargo
mane
##utable
##lei
##space
foreword
ox
##aris
##vern
abrams
hua
##mento
sakura
##alo
uv
sentimental
##skaya
midfield
##eses
sturdy
scrolls
macleod
##kyu
entropy
##lance
mitochondrial
cicero
excelled
thinner
convoys
perceive
##oslav
##urable
systematically
grind
burkina
287
##tagram
ops
##aman
guantanamo
##cloth
##tite
forcefully
wavy
##jou
pointless
##linger
##tze
layton
portico
superficial
clerical
outlaws
##hism
burials
muir
##inn
creditors
hauling
rattle
##leg
calais
monde
archers
reclaimed
dwell
wexford
hellenic
falsely
remorse
##tek
dough
furnishings
##uttered
gabon
neurological
novice
##igraphy
contemplated
pulpit
nightstand
saratoga
##istan
documenting
pulsing
taluk
##firmed
busted
marital
##rien
disagreements
wasps
##yes
hodge
mcdonnell
mimic
fran
pendant
dhabi
musa
##nington
congratulations
argent
darrell
concussion
losers
regrets
thessaloniki
reversal
donaldson
hardwood
thence
achilles
ritter
##eran
demonic
jurgen
prophets
goethe
eki
classmate
buff
##cking
yank
irrational
##inging
perished
seductive
qur
sourced
##crat
##typic
mustard
ravine
barre
horizontally
characterization
phylogenetic
boise
##dit
##runner
##tower
brutally
intercourse
seduce
##bbing
fay
ferris
ogden
amar
nik
unarmed
##inator
evaluating
kyrgyzstan
sweetness
##lford
##oki
mccormick
meiji
notoriety
stimulate
disrupt
figuring
instructional
mcgrath
##zoo
groundbreaking
##lto
flinch
khorasan
agrarian
bengals
mixer
radiating
##sov
ingram
pitchers
nad
tariff
##cript
tata
##codes
##emi
##ungen
appellate
lehigh
##bled
##giri
brawl
duct
texans
##ciation
##ropolis
skipper
speculative
vomit
doctrines
stresses
253
davy
graders
whitehead
jozef
timely
cumulative
haryana
paints
appropriately
boon
cactus
##ales
##pid
dow
legions
##pit
perceptions
1730
picturesque
##yse
periphery
rune
wr
##aha
celtics
sentencing
whoa
##erin
confirms
variance
425
moines
mathews
spade
rave
m1
fronted
fx
blending
alleging
reared
##gl
237
##paper
grassroots
eroded
##free
##physical
directs
ordeal
##sław
accelerate
hacker
rooftop
##inia
lev
buys
cebu
devote
##lce
specialising
##ulsion
choreographed
repetition
warehouses
##ryl
paisley
tuscany
analogy
sorcerer
hash
huts
shards
descends
exclude
nix
chaplin
gaga
ito
vane
##drich
causeway
misconduct
limo
orchestrated
glands
jana
##kot
u2
##mple
##sons
branching
contrasts
scoop
longed
##virus
chattanooga
##75
syrup
cornerstone
##tized
##mind
##iaceae
careless
precedence
frescoes
##uet
chilled
consult
modelled
snatch
peat
##thermal
caucasian
humane
relaxation
spins
temperance
##lbert
occupations
lambda
hybrids
moons
mp3
##oese
247
rolf
societal
yerevan
ness
##ssler
befriended
mechanized
nominate
trough
boasted
cues
seater
##hom
bends
##tangle
conductors
emptiness
##lmer
eurasian
adriatic
tian
##cie
anxiously
lark
propellers
chichester
jock
ev
2a
##holding
credible
recounts
tori
loyalist
abduction
##hoot
##redo
nepali
##mite
ventral
tempting
##ango
##crats
steered
##wice
javelin
dipping
laborers
prentice
looming
titanium
##ː
badges
emir
tensor
##ntation
egyptians
rash
denies
hawthorne
lombard
showers
wehrmacht
dietary
trojan
##reus
welles
executing
horseshoe
lifeboat
##lak
elsa
infirmary
nearing
roberta
boyer
mutter
trillion
joanne
##fine
##oked
sinks
vortex
uruguayan
clasp
sirius
##block
accelerator
prohibit
sunken
byu
chronological
diplomats
ochreous
510
symmetrical
1644
maia
##tology
salts
reigns
atrocities
##ия
hess
bared
issn
##vyn
cater
saturated
##cycle
##isse
sable
voyager
dyer
yusuf
##inge
fountains
wolff
##39
##nni
engraving
rollins
atheist
ominous
##ault
herr
chariot
martina
strung
##fell
##farlane
horrific
sahib
gazes
saetan
erased
ptolemy
##olic
flushing
lauderdale
analytic
##ices
530
navarro
beak
gorilla
herrera
broom
guadalupe
raiding
sykes
311
bsc
deliveries
1720
invasions
carmichael
tajikistan
thematic
ecumenical
sentiments
onstage
##rians
##brand
##sume
catastrophic
flanks
molten
##arns
waller
aimee
terminating
##icing
alternately
##oche
nehru
printers
outraged
##eving
empires
template
banners
repetitive
za
##oise
vegetarian
##tell
guiana
opt
cavendish
lucknow
synthesized
##hani
##mada
finalized
##ctable
fictitious
mayoral
unreliable
##enham
embracing
peppers
rbis
##chio
##neo
inhibition
slashed
togo
orderly
embroidered
safari
salty
236
barron
benito
totaled
##dak
pubs
simulated
caden
devin
tolkien
momma
welding
sesame
##ept
gottingen
hardness
630
shaman
temeraire
620
adequately
pediatric
##kit
ck
assertion
radicals
composure
cadence
seafood
beaufort
lazarus
mani
warily
cunning
kurdistan
249
cantata
##kir
ares
##41
##clusive
nape
townland
geared
insulted
flutter
boating
violate
draper
dumping
malmo
##hh
##romatic
firearm
alta
bono
obscured
##clave
exceeds
panorama
unbelievable
##train
preschool
##essed
disconnected
installing
rescuing
secretaries
accessibility
##castle
##drive
##ifice
##film
bouts
slug
waterway
mindanao
##buro
##ratic
halves
##ل
calming
liter
maternity
adorable
bragg
electrification
mcc
##dote
roxy
schizophrenia
##body
munoz
kaye
whaling
239
mil
tingling
tolerant
##ago
unconventional
volcanoes
##finder
deportivo
##llie
robson
kaufman
neuroscience
wai
deportation
masovian
scraping
converse
##bh
hacking
bulge
##oun
administratively
yao
580
amp
mammoth
booster
claremont
hooper
nomenclature
pursuits
mclaughlin
melinda
##sul
catfish
barclay
substrates
taxa
zee
originals
kimberly
packets
padma
##ality
borrowing
ostensibly
solvent
##bri
##genesis
##mist
lukas
shreveport
veracruz
##ь
##lou
##wives
cheney
tt
anatolia
hobbs
##zyn
cyclic
radiant
alistair
greenish
siena
dat
independents
##bation
conform
pieter
hyper
applicant
bradshaw
spores
telangana
vinci
inexpensive
nuclei
322
jang
nme
soho
spd
##ign
cradled
receptionist
pow
##43
##rika
fascism
##ifer
experimenting
##ading
##iec
##region
345
jocelyn
maris
stair
nocturnal
toro
constabulary
elgin
##kker
msc
##giving
##schen
##rase
doherty
doping
sarcastically
batter
maneuvers
##cano
##apple
##gai
##git
intrinsic
##nst
##stor
1753
showtime
cafes
gasps
lviv
ushered
##thed
fours
restart
astonishment
transmitting
flyer
shrugs
##sau
intriguing
cones
dictated
mushrooms
medial
##kovsky
##elman
escorting
gaped
##26
godfather
##door
##sell
djs
recaptured
timetable
vila
1710
3a
aerodrome
mortals
scientology
##orne
angelina
mag
convection
unpaid
insertion
intermittent
lego
##nated
endeavor
kota
pereira
##lz
304
bwv
glamorgan
insults
agatha
fey
##cend
fleetwood
mahogany
protruding
steamship
zeta
##arty
mcguire
suspense
##sphere
advising
urges
##wala
hurriedly
meteor
gilded
inline
arroyo
stalker
##oge
excitedly
revered
##cure
earle
introductory
##break
##ilde
mutants
puff
pulses
reinforcement
##haling
curses
lizards
stalk
correlated
##fixed
fallout
macquarie
##unas
bearded
denton
heaving
802
##ocation
winery
assign
dortmund
##lkirk
everest
invariant
charismatic
susie
##elling
bled
lesley
telegram
sumner
bk
##ogen
##к
wilcox
needy
colbert
duval
##iferous
##mbled
allotted
attends
imperative
##hita
replacements
hawker
##inda
insurgency
##zee
##eke
casts
##yla
680
ives
transitioned
##pack
##powering
authoritative
baylor
flex
cringed
plaintiffs
woodrow
##skie
drastic
ape
aroma
unfolded
commotion
nt
preoccupied
theta
routines
lasers
privatization
wand
domino
ek
clenching
nsa
strategically
showered
bile
handkerchief
pere
storing
christophe
insulting
316
nakamura
romani
asiatic
magdalena
palma
cruises
stripping
405
konstantin
soaring
##berman
colloquially
forerunner
havilland
incarcerated
parasites
sincerity
##utus
disks
plank
saigon
##ining
corbin
homo
ornaments
powerhouse
##tlement
chong
fastened
feasibility
idf
morphological
usable
##nish
##zuki
aqueduct
jaguars
keepers
##flies
aleksandr
faust
assigns
ewing
bacterium
hurled
tricky
hungarians
integers
wallis
321
yamaha
##isha
hushed
oblivion
aviator
evangelist
friars
##eller
monograph
ode
##nary
airplanes
labourers
charms
##nee
1661
hagen
tnt
rudder
fiesta
transcript
dorothea
ska
inhibitor
maccabi
retorted
raining
encompassed
clauses
menacing
1642
lineman
##gist
vamps
##ape
##dick
gloom
##rera
dealings
easing
seekers
##nut
##pment
helens
unmanned
##anu
##isson
basics
##amy
##ckman
adjustments
1688
brutality
horne
##zell
sui
##55
##mable
aggregator
##thal
rhino
##drick
##vira
counters
zoom
##01
##rting
mn
montenegrin
packard
##unciation
##♭
##kki
reclaim
scholastic
thugs
pulsed
##icia
syriac
quan
saddam
banda
kobe
blaming
buddies
dissent
##lusion
##usia
corbett
jaya
delle
erratic
lexie
##hesis
435
amiga
hermes
##pressing
##leen
chapels
gospels
jamal
##uating
compute
revolving
warp
##sso
##thes
armory
##eras
##gol
antrim
loki
##kow
##asian
##good
##zano
braid
handwriting
subdistrict
funky
pantheon
##iculate
concurrency
estimation
improper
juliana
##his
newcomers
johnstone
staten
communicated
##oco
##alle
sausage
stormy
##stered
##tters
superfamily
##grade
acidic
collateral
tabloid
##oped
##rza
bladder
austen
##ellant
mcgraw
##hay
hannibal
mein
aquino
lucifer
wo
badger
boar
cher
christensen
greenberg
interruption
##kken
jem
244
mocked
bottoms
cambridgeshire
##lide
sprawling
##bbly
eastwood
ghent
synth
##buck
advisers
##bah
nominally
hapoel
qu
daggers
estranged
fabricated
towels
vinnie
wcw
misunderstanding
anglia
nothin
unmistakable
##dust
##lova
chilly
marquette
truss
##edge
##erine
reece
##lty
##chemist
##connected
272
308
41st
bash
raion
waterfalls
##ump
##main
labyrinth
queue
theorist
##istle
bharatiya
flexed
soundtracks
rooney
leftist
patrolling
wharton
plainly
alleviate
eastman
schuster
topographic
engages
immensely
unbearable
fairchild
1620
dona
lurking
parisian
oliveira
ia
indictment
hahn
bangladeshi
##aster
vivo
##uming
##ential
antonia
expects
indoors
kildare
harlan
##logue
##ogenic
##sities
forgiven
##wat
childish
tavi
##mide
##orra
plausible
grimm
successively
scooted
##bola
##dget
##rith
spartans
emery
flatly
azure
epilogue
##wark
flourish
##iny
##tracted
##overs
##oshi
bestseller
distressed
receipt
spitting
hermit
topological
##cot
drilled
subunit
francs
##layer
eel
##fk
##itas
octopus
footprint
petitions
ufo
##say
##foil
interfering
leaking
palo
##metry
thistle
valiant
##pic
narayan
mcpherson
##fast
gonzales
##ym
##enne
dustin
novgorod
solos
##zman
doin
##raph
##patient
##meyer
soluble
ashland
cuffs
carole
pendleton
whistling
vassal
##river
deviation
revisited
constituents
rallied
rotate
loomed
##eil
##nting
amateurs
augsburg
auschwitz
crowns
skeletons
##cona
bonnet
257
dummy
globalization
simeon
sleeper
mandal
differentiated
##crow
##mare
milne
bundled
exasperated
talmud
owes
segregated
##feng
##uary
dentist
piracy
props
##rang
devlin
##torium
malicious
paws
##laid
dependency
##ergy
##fers
##enna
258
pistons
rourke
jed
grammatical
tres
maha
wig
512
ghostly
jayne
##achal
##creen
##ilis
##lins
##rence
designate
##with
arrogance
cambodian
clones
showdown
throttle
twain
##ception
lobes
metz
nagoya
335
braking
##furt
385
roaming
##minster
amin
crippled
##37
##llary
indifferent
hoffmann
idols
intimidating
1751
261
influenza
memo
onions
1748
bandage
consciously
##landa
##rage
clandestine
observes
swiped
tangle
##ener
##jected
##trum
##bill
##lta
hugs
congresses
josiah
spirited
##dek
humanist
managerial
filmmaking
inmate
rhymes
debuting
grimsby
ur
##laze
duplicate
vigor
##tf
republished
bolshevik
refurbishment
antibiotics
martini
methane
newscasts
royale
horizons
levant
iain
visas
##ischen
paler
##around
manifestation
snuck
alf
chop
futile
pedestal
rehab
##kat
bmg
kerman
res
fairbanks
jarrett
abstraction
saharan
##zek
1746
procedural
clearer
kincaid
sash
luciano
##ffey
crunch
helmut
##vara
revolutionaries
##tute
creamy
leach
##mmon
1747
permitting
nes
plight
wendell
##lese
contra
ts
clancy
ipa
mach
staples
autopsy
disturbances
nueva
karin
pontiac
##uding
proxy
venerable
haunt
leto
bergman
expands
##helm
wal
##pipe
canning
celine
cords
obesity
##enary
intrusion
planner
##phate
reasoned
sequencing
307
harrow
##chon
##dora
marred
mcintyre
repay
tarzan
darting
248
harrisburg
margarita
repulsed
##hur
##lding
belinda
hamburger
novo
compliant
runways
bingham
registrar
skyscraper
ic
cuthbert
improvisation
livelihood
##corp
##elial
admiring
##dened
sporadic
believer
casablanca
popcorn
##29
asha
shovel
##bek
##dice
coiled
tangible
##dez
casper
elsie
resin
tenderness
rectory
##ivision
avail
sonar
##mori
boutique
##dier
guerre
bathed
upbringing
vaulted
sandals
blessings
##naut
##utnant
1680
306
foxes
pia
corrosion
hesitantly
confederates
crystalline
footprints
shapiro
tirana
valentin
drones
45th
microscope
shipments
texted
inquisition
wry
guernsey
unauthorized
resigning
760
ripple
schubert
stu
reassure
felony
##ardo
brittle
koreans
##havan
##ives
dun
implicit
tyres
##aldi
##lth
magnolia
##ehan
##puri
##poulos
aggressively
fei
gr
familiarity
##poo
indicative
##trust
fundamentally
jimmie
overrun
395
anchors
moans
##opus
britannia
armagh
##ggle
purposely
seizing
##vao
bewildered
mundane
avoidance
cosmopolitan
geometridae
quartermaster
caf
415
chatter
engulfed
gleam
purge
##icate
juliette
jurisprudence
guerra
revisions
##bn
casimir
brew
##jm
1749
clapton
cloudy
conde
hermitage
278
simulations
torches
vincenzo
matteo
##rill
hidalgo
booming
westbound
accomplishment
tentacles
unaffected
##sius
annabelle
flopped
sloping
##litz
dreamer
interceptor
vu
##loh
consecration
copying
messaging
breaker
climates
hospitalized
1752
torino
afternoons
winfield
witnessing
##teacher
breakers
choirs
sawmill
coldly
##ege
sipping
haste
uninhabited
conical
bibliography
pamphlets
severn
edict
##oca
deux
illnesses
grips
##pl
rehearsals
sis
thinkers
tame
##keepers
1690
acacia
reformer
##osed
##rys
shuffling
##iring
##shima
eastbound
ionic
rhea
flees
littered
##oum
rocker
vomiting
groaning
champ
overwhelmingly
civilizations
paces
sloop
adoptive
##tish
skaters
##vres
aiding
mango
##joy
nikola
shriek
##ignon
pharmaceuticals
##mg
tuna
calvert
gustavo
stocked
yearbook
##urai
##mana
computed
subsp
riff
hanoi
kelvin
hamid
moors
pastures
summons
jihad
nectar
##ctors
bayou
untitled
pleasing
vastly
republics
intellect
##η
##ulio
##tou
crumbling
stylistic
sb
##ی
consolation
frequented
h₂o
walden
widows
##iens
404
##ignment
chunks
improves
288
grit
recited
##dev
snarl
sociological
##arte
##gul
inquired
##held
bruise
clube
consultancy
homogeneous
hornets
multiplication
pasta
prick
savior
##grin
##kou
##phile
yoon
##gara
grimes
vanishing
cheering
reacting
bn
distillery
##quisite
##vity
coe
dockyard
massif
##jord
escorts
voss
##valent
byte
chopped
hawke
illusions
workings
floats
##koto
##vac
kv
annapolis
madden
##onus
alvaro
noctuidae
##cum
##scopic
avenge
steamboat
forte
illustrates
erika
##trip
570
dew
nationalities
bran
manifested
thirsty
diversified
muscled
reborn
##standing
arson
##lessness
##dran
##logram
##boys
##kushima
##vious
willoughby
##phobia
286
alsace
dashboard
yuki
##chai
granville
myspace
publicized
tricked
##gang
adjective
##ater
relic
reorganisation
enthusiastically
indications
saxe
##lassified
consolidate
iec
padua
helplessly
ramps
renaming
regulars
pedestrians
accents
convicts
inaccurate
lowers
mana
##pati
barrie
bjp
outta
someplace
berwick
flanking
invoked
marrow
sparsely
excerpts
clothed
rei
##ginal
wept
##straße
##vish
alexa
excel
##ptive
membranes
aquitaine
creeks
cutler
sheppard
implementations
ns
##dur
fragrance
budge
concordia
magnesium
marcelo
##antes
gladly
vibrating
##rral
##ggles
montrose
##omba
lew
seamus
1630
cocky
##ament
##uen
bjorn
##rrick
fielder
fluttering
##lase
methyl
kimberley
mcdowell
reductions
barbed
##jic
##tonic
aeronautical
condensed
distracting
##promising
huffed
##cala
##sle
claudius
invincible
missy
pious
balthazar
ci
##lang
butte
combo
orson
##dication
myriad
1707
silenced
##fed
##rh
coco
netball
yourselves
##oza
clarify
heller
peg
durban
etudes
offender
roast
blackmail
curvature
##woods
vile
309
illicit
suriname
##linson
overture
1685
bubbling
gymnast
tucking
##mming
##ouin
maldives
##bala
gurney
##dda
##eased
##oides
backside
pinto
jars
racehorse
tending
##rdial
baronetcy
wiener
duly
##rke
barbarian
cupping
flawed
##thesis
bertha
pleistocene
puddle
swearing
##nob
##tically
fleeting
prostate
amulet
educating
##mined
##iti
##tler
75th
jens
respondents
analytics
cavaliers
papacy
raju
##iente
##ulum
##tip
funnel
271
disneyland
##lley
sociologist
##iam
2500
faulkner
louvre
menon
##dson
276
##ower
afterlife
mannheim
peptide
referees
comedians
meaningless
##anger
##laise
fabrics
hurley
renal
sleeps
##bour
##icle
breakout
kristin
roadside
animator
clover
disdain
unsafe
redesign
##urity
firth
barnsley
portage
reset
narrows
268
commandos
expansive
speechless
tubular
##lux
essendon
eyelashes
smashwords
##yad
##bang
##claim
craved
sprinted
chet
somme
astor
wrocław
orton
266
bane
##erving
##uing
mischief
##amps
##sund
scaling
terre
##xious
impairment
offenses
undermine
moi
soy
contiguous
arcadia
inuit
seam
##tops
macbeth
rebelled
##icative
##iot
590
elaborated
frs
uniformed
##dberg
259
powerless
priscilla
stimulated
980
qc
arboretum
frustrating
trieste
bullock
##nified
enriched
glistening
intern
##adia
locus
nouvelle
ollie
ike
lash
starboard
ee
tapestry
headlined
hove
rigged
##vite
pollock
##yme
thrive
clustered
cas
roi
gleamed
olympiad
##lino
pressured
regimes
##hosis
##lick
ripley
##ophone
kickoff
gallon
rockwell
##arable
crusader
glue
revolutions
scrambling
1714
grover
##jure
englishman
aztec
263
contemplating
coven
ipad
preach
triumphant
tufts
##esian
rotational
##phus
328
falkland
##brates
strewn
clarissa
rejoin
environmentally
glint
banded
drenched
moat
albanians
johor
rr
maestro
malley
nouveau
shaded
taxonomy
v6
adhere
bunk
airfields
##ritan
1741
encompass
remington
tran
##erative
amelie
mazda
friar
morals
passions
##zai
breadth
vis
##hae
argus
burnham
caressing
insider
rudd
##imov
##mini
##rso
italianate
murderous
textual
wainwright
armada
bam
weave
timer
##taken
##nh
fra
##crest
ardent
salazar
taps
tunis
##ntino
allegro
gland
philanthropic
##chester
implication
##optera
esq
judas
noticeably
wynn
##dara
inched
indexed
crises
villiers
bandit
royalties
patterned
cupboard
interspersed
accessory
isla
kendrick
entourage
stitches
##esthesia
headwaters
##ior
interlude
distraught
draught
1727
##basket
biased
sy
transient
triad
subgenus
adapting
kidd
shortstop
##umatic
dimly
spiked
mcleod
reprint
nellie
pretoria
windmill
##cek
singled
##mps
273
reunite
##orous
747
bankers
outlying
##omp
##ports
##tream
apologies
cosmetics
patsy
##deh
##ocks
##yson
bender
nantes
serene
##nad
lucha
mmm
323
##cius
##gli
cmll
coinage
nestor
juarez
##rook
smeared
sprayed
twitching
sterile
irina
embodied
juveniles
enveloped
miscellaneous
cancers
dq
gulped
luisa
crested
swat
donegal
ref
##anov
##acker
hearst
mercantile
##lika
doorbell
ua
vicki
##alla
##som
bilbao
psychologists
stryker
sw
horsemen
turkmenistan
wits
##national
anson
mathew
screenings
##umb
rihanna
##agne
##nessy
aisles
##iani
##osphere
hines
kenton
saskatoon
tasha
truncated
##champ
##itan
mildred
advises
fredrik
interpreting
inhibitors
##athi
spectroscopy
##hab
##kong
karim
panda
##oia
##nail
##vc
conqueror
kgb
leukemia
##dity
arrivals
cheered
pisa
phosphorus
shielded
##riated
mammal
unitarian
urgently
chopin
sanitary
##mission
spicy
drugged
hinges
##tort
tipping
trier
impoverished
westchester
##caster
267
epoch
nonstop
##gman
##khov
aromatic
centrally
cerro
##tively
##vio
billions
modulation
sedimentary
283
facilitating
outrageous
goldstein
##eak
##kt
ld
maitland
penultimate
pollard
##dance
fleets
spaceship
vertebrae
##nig
alcoholism
als
recital
##bham
##ference
##omics
m2
##bm
trois
##tropical
##в
commemorates
##meric
marge
##raction
1643
670
cosmetic
ravaged
##ige
catastrophe
eng
##shida
albrecht
arterial
bellamy
decor
harmon
##rde
bulbs
synchronized
vito
easiest
shetland
shielding
wnba
##glers
##ssar
##riam
brianna
cumbria
##aceous
##rard
cores
thayer
##nsk
brood
hilltop
luminous
carts
keynote
larkin
logos
##cta
##ا
##mund
##quay
lilith
tinted
277
wrestle
mobilization
##uses
sequential
siam
bloomfield
takahashi
274
##ieving
presenters
ringo
blazed
witty
##oven
##ignant
devastation
haydn
harmed
newt
therese
##peed
gershwin
molina
rabbis
sudanese
001
innate
restarted
##sack
##fus
slices
wb
##shah
enroll
hypothetical
hysterical
1743
fabio
indefinite
warped
##hg
exchanging
525
unsuitable
##sboro
gallo
1603
bret
cobalt
homemade
##hunter
mx
operatives
##dhar
terraces
durable
latch
pens
whorls
##ctuated
##eaux
billing
ligament
succumbed
##gly
regulators
spawn
##brick
##stead
filmfare
rochelle
##nzo
1725
circumstance
saber
supplements
##nsky
##tson
crowe
wellesley
carrot
##9th
##movable
primate
drury
sincerely
topical
##mad
##rao
callahan
kyiv
smarter
tits
undo
##yeh
announcements
anthologies
barrio
nebula
##islaus
##shaft
##tyn
bodyguards
2021
assassinate
barns
emmett
scully
##mah
##yd
##eland
##tino
##itarian
demoted
gorman
lashed
prized
adventist
writ
##gui
alla
invertebrates
##ausen
1641
amman
1742
align
healy
redistribution
##gf
##rize
insulation
##drop
adherents
hezbollah
vitro
ferns
yanking
269
php
registering
uppsala
cheerleading
confines
mischievous
tully
##ross
49th
docked
roam
stipulated
pumpkin
##bry
prompt
##ezer
blindly
shuddering
craftsmen
frail
scented
katharine
scramble
shaggy
sponge
helix
zaragoza
279
##52
43rd
backlash
fontaine
seizures
posse
cowan
nonfiction
telenovela
wwii
hammered
undone
##gpur
encircled
irs
##ivation
artefacts
oneself
searing
smallpox
##belle
##osaurus
shandong
breached
upland
blushing
rankin
infinitely
psyche
tolerated
docking
evicted
##col
unmarked
##lving
gnome
lettering
litres
musique
##oint
benevolent
##jal
blackened
##anna
mccall
racers
tingle
##ocene
##orestation
introductions
radically
292
##hiff
##باد
1610
1739
munchen
plead
##nka
condo
scissors
##sight
##tens
apprehension
##cey
##yin
hallmark
watering
formulas
sequels
##llas
aggravated
bae
commencing
##building
enfield
prohibits
marne
vedic
civilized
euclidean
jagger
beforehand
blasts
dumont
##arney
##nem
740
conversions
hierarchical
rios
simulator
##dya
##lellan
hedges
oleg
thrusts
shadowed
darby
maximize
1744
gregorian
##nded
##routed
sham
unspecified
##hog
emory
factual
##smo
##tp
fooled
##rger
ortega
wellness
marlon
##oton
##urance
casket
keating
ley
enclave
##ayan
char
influencing
jia
##chenko
412
ammonia
erebidae
incompatible
violins
cornered
##arat
grooves
astronauts
columbian
rampant
fabrication
kyushu
mahmud
vanish
##dern
mesopotamia
##lete
ict
##rgen
caspian
kenji
pitted
##vered
999
grimace
roanoke
tchaikovsky
twinned
##analysis
##awan
xinjiang
arias
clemson
kazakh
sizable
1662
##khand
##vard
plunge
tatum
vittorio
##nden
cholera
##dana
##oper
bracing
indifference
projectile
superliga
##chee
realises
upgrading
299
porte
retribution
##vies
nk
stil
##resses
ama
bureaucracy
blackberry
bosch
testosterone
collapses
greer
##pathic
ioc
fifties
malls
##erved
bao
baskets
adolescents
siegfried
##osity
##tosis
mantra
detecting
existent
fledgling
##cchi
dissatisfied
gan
telecommunication
mingled
sobbed
6000
controversies
outdated
taxis
##raus
fright
slams
##lham
##fect
##tten
detectors
fetal
tanned
##uw
fray
goth
olympian
skipping
mandates
scratches
sheng
unspoken
hyundai
tracey
hotspur
restrictive
##buch
americana
mundo
##bari
burroughs
diva
vulcan
##6th
distinctions
thumping
##ngen
mikey
sheds
fide
rescues
springsteen
vested
valuation
##ece
##ely
pinnacle
rake
sylvie
##edo
almond
quivering
##irus
alteration
faltered
##wad
51st
hydra
ticked
##kato
recommends
##dicated
antigua
arjun
stagecoach
wilfred
trickle
pronouns
##pon
aryan
nighttime
##anian
gall
pea
stitch
##hei
leung
milos
##dini
eritrea
nexus
starved
snowfall
kant
parasitic
cot
discus
hana
strikers
appleton
kitchens
##erina
##partisan
##itha
##vius
disclose
metis
##channel
1701
tesla
##vera
fitch
1735
blooded
##tila
decimal
##tang
##bai
cyclones
eun
bottled
peas
pensacola
basha
bolivian
crabs
boil
lanterns
partridge
roofed
1645
necks
##phila
opined
patting
##kla
##lland
chuckles
volta
whereupon
##nche
devout
euroleague
suicidal
##dee
inherently
involuntary
knitting
nasser
##hide
puppets
colourful
courageous
southend
stills
miraculous
hodgson
richer
rochdale
ethernet
greta
uniting
prism
umm
##haya
##itical
##utation
deterioration
pointe
prowess
##ropriation
lids
scranton
billings
subcontinent
##koff
##scope
brute
kellogg
psalms
degraded
##vez
stanisław
##ructured
ferreira
pun
astonishing
gunnar
##yat
arya
prc
gottfried
##tight
excursion
##ographer
dina
##quil
##nare
huffington
illustrious
wilbur
gundam
verandah
##zard
naacp
##odle
constructive
fjord
kade
##naud
generosity
thrilling
baseline
cayman
frankish
plastics
accommodations
zoological
##fting
cedric
qb
motorized
##dome
##otted
squealed
tackled
canucks
budgets
situ
asthma
dail
gabled
grasslands
whimpered
writhing
judgments
##65
minnie
pv
##carbon
bananas
grille
domes
monique
odin
maguire
markham
tierney
##estra
##chua
libel
poke
speedy
atrium
laval
notwithstanding
##edly
fai
kala
##sur
robb
##sma
listings
luz
supplementary
tianjin
##acing
enzo
jd
ric
scanner
croats
transcribed
##49
arden
cv
##hair
##raphy
##lver
##uy
357
seventies
staggering
alam
horticultural
hs
regression
timbers
blasting
##ounded
montagu
manipulating
##cit
catalytic
1550
troopers
##meo
condemnation
fitzpatrick
##oire
##roved
inexperienced
1670
castes
##lative
outing
314
dubois
flicking
quarrel
ste
learners
1625
iq
whistled
##class
282
classify
tariffs
temperament
355
folly
liszt
##yles
immersed
jordanian
ceasefire
apparel
extras
maru
fished
##bio
harta
stockport
assortment
craftsman
paralysis
transmitters
##cola
blindness
##wk
fatally
proficiency
solemnly
##orno
repairing
amore
groceries
ultraviolet
##chase
schoolhouse
##tua
resurgence
nailed
##otype
##×
ruse
saliva
diagrams
##tructing
albans
rann
thirties
1b
antennas
hilarious
cougars
paddington
stats
##eger
breakaway
ipod
reza
authorship
prohibiting
scoffed
##etz
##ttle
conscription
defected
trondheim
##fires
ivanov
keenan
##adan
##ciful
##fb
##slow
locating
##ials
##tford
cadiz
basalt
blankly
interned
rags
rattling
##tick
carpathian
reassured
sync
bum
guildford
iss
staunch
##onga
astronomers
sera
sofie
emergencies
susquehanna
##heard
duc
mastery
vh1
williamsburg
bayer
buckled
craving
##khan
##rdes
bloomington
##write
alton
barbecue
##bians
justine
##hri
##ndt
delightful
smartphone
newtown
photon
retrieval
peugeot
hissing
##monium
##orough
flavors
lighted
relaunched
tainted
##games
##lysis
anarchy
microscopic
hopping
adept
evade
evie
##beau
inhibit
sinn
adjustable
hurst
intuition
wilton
cisco
44th
lawful
lowlands
stockings
thierry
##dalen
##hila
##nai
fates
prank
tb
maison
lobbied
provocative
1724
4a
utopia
##qual
carbonate
gujarati
purcell
##rford
curtiss
##mei
overgrown
arenas
mediation
swallows
##rnik
respectful
turnbull
##hedron
##hope
alyssa
ozone
##ʻi
ami
gestapo
johansson
snooker
canteen
cuff
declines
empathy
stigma
##ags
##iner
##raine
taxpayers
gui
volga
##wright
##copic
lifespan
overcame
tattooed
enactment
giggles
##ador
##camp
barrington
bribe
obligatory
orbiting
peng
##enas
elusive
sucker
##vating
cong
hardship
empowered
anticipating
estrada
cryptic
greasy
detainees
planck
sudbury
plaid
dod
marriott
kayla
##ears
##vb
##zd
mortally
##hein
cognition
radha
319
liechtenstein
meade
richly
argyle
harpsichord
liberalism
trumpets
lauded
tyrant
salsa
tiled
lear
promoters
reused
slicing
trident
##chuk
##gami
##lka
cantor
checkpoint
##points
gaul
leger
mammalian
##tov
##aar
##schaft
doha
frenchman
nirvana
##vino
delgado
headlining
##eron
##iography
jug
tko
1649
naga
intersections
##jia
benfica
nawab
##suka
ashford
gulp
##deck
##vill
##rug
brentford
frazier
pleasures
dunne
potsdam
shenzhen
dentistry
##tec
flanagan
##dorff
##hear
chorale
dinah
prem
quezon
##rogated
relinquished
sutra
terri
##pani
flaps
##rissa
poly
##rnet
homme
aback
##eki
linger
womb
##kson
##lewood
doorstep
orthodoxy
threaded
westfield
##rval
dioceses
fridays
subsided
##gata
loyalists
##biotic
##ettes
letterman
lunatic
prelate
tenderly
invariably
souza
thug
winslow
##otide
furlongs
gogh
jeopardy
##runa
pegasus
##umble
humiliated
standalone
tagged
##roller
freshmen
klan
##bright
attaining
initiating
transatlantic
logged
viz
##uance
1723
combatants
intervening
stephane
chieftain
despised
grazed
317
cdc
galveston
godzilla
macro
simulate
##planes
parades
##esses
960
##ductive
##unes
equator
overdose
##cans
##hosh
##lifting
joshi
epstein
sonora
treacherous
aquatics
manchu
responsive
##sation
supervisory
##christ
##llins
##ibar
##balance
##uso
kimball
karlsruhe
mab
##emy
ignores
phonetic
reuters
spaghetti
820
almighty
danzig
rumbling
tombstone
designations
lured
outset
##felt
supermarkets
##wt
grupo
kei
kraft
susanna
##blood
comprehension
genealogy
##aghan
##verted
redding
##ythe
1722
bowing
##pore
##roi
lest
sharpened
fulbright
valkyrie
sikhs
##unds
swans
bouquet
merritt
##tage
##venting
commuted
redhead
clerks
leasing
cesare
dea
hazy
##vances
fledged
greenfield
servicemen
##gical
armando
blackout
dt
sagged
downloadable
intra
potion
pods
##4th
##mism
xp
attendants
gambia
stale
##ntine
plump
asteroids
rediscovered
buds
flea
hive
##neas
1737
classifications
debuts
##eles
olympus
scala
##eurs
##gno
##mute
hummed
sigismund
visuals
wiggled
await
pilasters
clench
sulfate
##ances
bellevue
enigma
trainee
snort
##sw
clouded
denim
##rank
##rder
churning
hartman
lodges
riches
sima
##missible
accountable
socrates
regulates
mueller
##cr
1702
avoids
solids
himalayas
nutrient
pup
##jevic
squat
fades
nec
##lates
##pina
##rona
##ου
privateer
tequila
##gative
##mpton
apt
hornet
immortals
##dou
asturias
cleansing
dario
##rries
##anta
etymology
servicing
zhejiang
##venor
##nx
horned
erasmus
rayon
relocating
£10
##bags
escalated
promenade
stubble
2010s
artisans
axial
liquids
mora
sho
yoo
##tsky
bundles
oldies
##nally
notification
bastion
##ths
sparkle
##lved
1728
leash
pathogen
highs
##hmi
immature
880
gonzaga
ignatius
mansions
monterrey
sweets
bryson
##loe
polled
regatta
brightest
pei
rosy
squid
hatfield
payroll
addict
meath
cornerback
heaviest
lodging
##mage
capcom
rippled
##sily
barnet
mayhem
ymca
snuggled
rousseau
##cute
blanchard
284
fragmented
leighton
chromosomes
risking
##md
##strel
##utter
corinne
coyotes
cynical
hiroshi
yeomanry
##ractive
ebook
grading
mandela
plume
agustin
magdalene
##rkin
bea
femme
trafford
##coll
##lun
##tance
52nd
fourier
upton
##mental
camilla
gust
iihf
islamabad
longevity
##kala
feldman
netting
##rization
endeavour
foraging
mfa
orr
##open
greyish
contradiction
graz
##ruff
handicapped
marlene
tweed
oaxaca
spp
campos
miocene
pri
configured
cooks
pluto
cozy
pornographic
##entes
70th
fairness
glided
jonny
lynne
rounding
sired
##emon
##nist
remade
uncover
##mack
complied
lei
newsweek
##jured
##parts
##enting
##pg
293
finer
guerrillas
athenian
deng
disused
stepmother
accuse
gingerly
seduction
521
confronting
##walker
##going
gora
nostalgia
sabres
virginity
wrenched
##minated
syndication
wielding
eyre
##56
##gnon
##igny
behaved
taxpayer
sweeps
##growth
childless
gallant
##ywood
amplified
geraldine
scrape
##ffi
babylonian
fresco
##rdan
##kney
##position
1718
restricting
tack
fukuoka
osborn
selector
partnering
##dlow
318
gnu
kia
tak
whitley
gables
##54
##mania
mri
softness
immersion
##bots
##evsky
1713
chilling
insignificant
pcs
##uis
elites
lina
purported
supplemental
teaming
##americana
##dding
##inton
proficient
rouen
##nage
##rret
niccolo
selects
##bread
fluffy
1621
gruff
knotted
mukherjee
polgara
thrash
nicholls
secluded
smoothing
thru
corsica
loaf
whitaker
inquiries
##rrier
##kam
indochina
289
marlins
myles
peking
##tea
extracts
pastry
superhuman
connacht
vogel
##ditional
##het
##udged
##lash
gloss
quarries
refit
teaser
##alic
##gaon
20s
materialized
sling
camped
pickering
tung
tracker
pursuant
##cide
cranes
soc
##cini
##typical
##viere
anhalt
overboard
workout
chores
fares
orphaned
stains
##logie
fenton
surpassing
joyah
triggers
##itte
grandmaster
##lass
##lists
clapping
fraudulent
ledger
nagasaki
##cor
##nosis
##tsa
eucalyptus
tun
##icio
##rney
##tara
dax
heroism
ina
wrexham
onboard
unsigned
##dates
moshe
galley
winnie
droplets
exiles
praises
watered
noodles
##aia
fein
adi
leland
multicultural
stink
bingo
comets
erskine
modernized
canned
constraint
domestically
chemotherapy
featherweight
stifled
##mum
darkly
irresistible
refreshing
hasty
isolate
##oys
kitchener
planners
##wehr
cages
yarn
implant
toulon
elects
childbirth
yue
##lind
##lone
cn
rightful
sportsman
junctions
remodeled
specifies
##rgh
291
##oons
complimented
##urgent
lister
ot
##logic
bequeathed
cheekbones
fontana
gabby
##dial
amadeus
corrugated
maverick
resented
triangles
##hered
##usly
nazareth
tyrol
1675
assent
poorer
sectional
aegean
##cous
296
nylon
ghanaian
##egorical
##weig
cushions
forbid
fusiliers
obstruction
somerville
##scia
dime
earrings
elliptical
leyte
oder
polymers
timmy
atm
midtown
piloted
settles
continual
externally
mayfield
##uh
enrichment
henson
keane
persians
1733
benji
braden
pep
324
##efe
contenders
pepsi
valet
##isches
298
##asse
##earing
goofy
stroll
##amen
authoritarian
occurrences
adversary
ahmedabad
tangent
toppled
dorchester
1672
modernism
marxism
islamist
charlemagne
exponential
racks
unicode
brunette
mbc
pic
skirmish
##bund
##lad
##powered
##yst
hoisted
messina
shatter
##ctum
jedi
vantage
##music
##neil
clemens
mahmoud
corrupted
authentication
lowry
nils
##washed
omnibus
wounding
jillian
##itors
##opped
serialized
narcotics
handheld
##arm
##plicity
intersecting
stimulating
##onis
crate
fellowships
hemingway
casinos
climatic
fordham
copeland
drip
beatty
leaflets
robber
brothel
madeira
##hedral
sphinx
ultrasound
##vana
valor
forbade
leonid
villas
##aldo
duane
marquez
##cytes
disadvantaged
forearms
kawasaki
reacts
consular
lax
uncles
uphold
##hopper
concepcion
dorsey
lass
##izan
arching
passageway
1708
researches
tia
internationals
##graphs
##opers
distinguishes
javanese
divert
##uven
plotted
##listic
##rwin
##erik
##tify
affirmative
signifies
validation
##bson
kari
felicity
georgina
zulu
##eros
##rained
##rath
overcoming
##dot
argyll
##rbin
1734
chiba
ratification
windy
earls
parapet
##marks
hunan
pristine
astrid
punta
##gart
brodie
##kota
##oder
malaga
minerva
rouse
##phonic
bellowed
pagoda
portals
reclamation
##gur
##odies
##⁄₄
parentheses
quoting
allergic
palette
showcases
benefactor
heartland
nonlinear
##tness
bladed
cheerfully
scans
##ety
##hone
1666
girlfriends
pedersen
hiram
sous
##liche
##nator
1683
##nery
##orio
##umen
bobo
primaries
smiley
##cb
unearthed
uniformly
fis
metadata
1635
ind
##oted
recoil
##titles
##tura
##ια
406
hilbert
jamestown
mcmillan
tulane
seychelles
##frid
antics
coli
fated
stucco
##grants
1654
bulky
accolades
arrays
caledonian
carnage
optimism
puebla
##tative
##cave
enforcing
rotherham
seo
dunlop
aeronautics
chimed
incline
zoning
archduke
hellenistic
##oses
##sions
candi
thong
##ople
magnate
rustic
##rsk
projective
slant
##offs
danes
hollis
vocalists
##ammed
congenital
contend
gesellschaft
##ocating
##pressive
douglass
quieter
##cm
##kshi
howled
salim
spontaneously
townsville
buena
southport
##bold
kato
1638
faerie
stiffly
##vus
##rled
297
flawless
realising
taboo
##7th
bytes
straightening
356
jena
##hid
##rmin
cartwright
berber
bertram
soloists
411
noses
417
coping
fission
hardin
inca
##cen
1717
mobilized
vhf
##raf
biscuits
curate
##85
##anial
331
gaunt
neighbourhoods
1540
##abas
blanca
bypassed
sockets
behold
coincidentally
##bane
nara
shave
splinter
terrific
##arion
##erian
commonplace
juris
redwood
waistband
boxed
caitlin
fingerprints
jennie
naturalized
##ired
balfour
craters
jody
bungalow
hugely
quilt
glitter
pigeons
undertaker
bulging
constrained
goo
##sil
##akh
assimilation
reworked
##person
persuasion
##pants
felicia
##cliff
##ulent
1732
explodes
##dun
##inium
##zic
lyman
vulture
hog
overlook
begs
northwards
ow
spoil
##urer
fatima
favorably
accumulate
sargent
sorority
corresponded
dispersal
kochi
toned
##imi
##lita
internacional
newfound
##agger
##lynn
##rigue
booths
peanuts
##eborg
medicare
muriel
nur
##uram
crates
millennia
pajamas
worsened
##breakers
jimi
vanuatu
yawned
##udeau
carousel
##hony
hurdle
##ccus
##mounted
##pod
rv
##eche
airship
ambiguity
compulsion
recapture
##claiming
arthritis
##osomal
1667
asserting
ngc
sniffing
dade
discontent
glendale
ported
##amina
defamation
rammed
##scent
fling
livingstone
##fleet
875
##ppy
apocalyptic
comrade
lcd
##lowe
cessna
eine
persecuted
subsistence
demi
hoop
reliefs
710
coptic
progressing
stemmed
perpetrators
1665
priestess
##nio
dobson
ebony
rooster
itf
tortricidae
##bbon
##jian
cleanup
##jean
##øy
1721
eighties
taxonomic
holiness
##hearted
##spar
antilles
showcasing
stabilized
##nb
gia
mascara
michelangelo
dawned
##uria
##vinsky
extinguished
fitz
grotesque
£100
##fera
##loid
##mous
barges
neue
throbbed
cipher
johnnie
##a1
##mpt
outburst
##swick
spearheaded
administrations
c1
heartbreak
pixels
pleasantly
##enay
lombardy
plush
##nsed
bobbie
##hly
reapers
tremor
xiang
minogue
substantive
hitch
barak
##wyl
kwan
##encia
910
obscene
elegance
indus
surfer
bribery
conserve
##hyllum
##masters
horatio
##fat
apes
rebound
psychotic
##pour
iteration
##mium
##vani
botanic
horribly
antiques
dispose
paxton
##hli
##wg
timeless
1704
disregard
engraver
hounds
##bau
##version
looted
uno
facilitates
groans
masjid
rutland
antibody
disqualification
decatur
footballers
quake
slacks
48th
rein
scribe
stabilize
commits
exemplary
tho
##hort
##chison
pantry
traversed
##hiti
disrepair
identifiable
vibrated
baccalaureate
##nnis
csa
interviewing
##iensis
##raße
greaves
wealthiest
343
classed
jogged
£5
##58
##atal
illuminating
knicks
respecting
##uno
scrubbed
##iji
##dles
kruger
moods
growls
raider
silvia
chefs
kam
vr
cree
percival
##terol
gunter
counterattack
defiant
henan
ze
##rasia
##riety
equivalence
submissions
##fra
##thor
bautista
mechanically
##heater
cornice
herbal
templar
##mering
outputs
ruining
ligand
renumbered
extravagant
mika
blockbuster
eta
insurrection
##ilia
darkening
ferocious
pianos
strife
kinship
##aer
melee
##anor
##iste
##may
##oue
decidedly
weep
##jad
##missive
##ppel
354
puget
unease
##gnant
1629
hammering
kassel
ob
wessex
##lga
bromwich
egan
paranoia
utilization
##atable
##idad
contradictory
provoke
##ols
##ouring
##tangled
knesset
##very
##lette
plumbing
##sden
##¹
greensboro
occult
sniff
338
zev
beaming
gamer
haggard
mahal
##olt
##pins
mendes
utmost
briefing
gunnery
##gut
##pher
##zh
##rok
1679
khalifa
sonya
##boot
principals
urbana
wiring
##liffe
##minating
##rrado
dahl
nyu
skepticism
np
townspeople
ithaca
lobster
somethin
##fur
##arina
##−1
freighter
zimmerman
biceps
contractual
##herton
amend
hurrying
subconscious
##anal
336
meng
clermont
spawning
##eia
##lub
dignitaries
impetus
snacks
spotting
twigs
##bilis
##cz
##ouk
libertadores
nic
skylar
##aina
##firm
gustave
asean
##anum
dieter
legislatures
flirt
bromley
trolls
umar
##bbies
##tyle
blah
parc
bridgeport
crank
negligence
##nction
46th
constantin
molded
bandages
seriousness
00pm
siegel
carpets
compartments
upbeat
statehood
##dner
##edging
marko
730
platt
##hane
paving
##iy
1738
abbess
impatience
limousine
nbl
##talk
441
lucille
mojo
nightfall
robbers
##nais
karel
brisk
calves
replicate
ascribed
telescopes
##olf
intimidated
##reen
ballast
specialization
##sit
aerodynamic
caliphate
rainer
visionary
##arded
epsilon
##aday
##onte
aggregation
auditory
boosted
reunification
kathmandu
loco
robyn
402
acknowledges
appointing
humanoid
newell
redeveloped
restraints
##tained
barbarians
chopper
1609
italiana
##lez
##lho
investigates
wrestlemania
##anies
##bib
690
##falls
creaked
dragoons
gravely
minions
stupidity
volley
##harat
##week
musik
##eries
##uously
fungal
massimo
semantics
malvern
##ahl
##pee
discourage
embryo
imperialism
1910s
profoundly
##ddled
jiangsu
sparkled
stat
##holz
sweatshirt
tobin
##iction
sneered
##cheon
##oit
brit
causal
smyth
##neuve
diffuse
perrin
silvio
##ipes
##recht
detonated
iqbal
selma
##nism
##zumi
roasted
##riders
tay
##ados
##mament
##mut
##rud
840
completes
nipples
cfa
flavour
hirsch
##laus
calderon
sneakers
moravian
##ksha
1622
rq
294
##imeters
bodo
##isance
##pre
##ronia
anatomical
excerpt
##lke
dh
kunst
##tablished
##scoe
biomass
panted
unharmed
gael
housemates
montpellier
##59
coa
rodents
tonic
hickory
singleton
##taro
451
1719
aldo
breaststroke
dempsey
och
rocco
##cuit
merton
dissemination
midsummer
serials
##idi
haji
polynomials
##rdon
gs
enoch
prematurely
shutter
taunton
£3
##grating
##inates
archangel
harassed
##asco
326
archway
dazzling
##ecin
1736
sumo
wat
##kovich
1086
honneur
##ently
##nostic
##ttal
##idon
1605
403
1716
blogger
rents
##gnan
hires
##ikh
##dant
howie
##rons
handler
retracted
shocks
1632
arun
duluth
kepler
trumpeter
##lary
peeking
seasoned
trooper
##mara
laszlo
##iciencies
##rti
heterosexual
##inatory
##ssion
indira
jogging
##inga
##lism
beit
dissatisfaction
malice
##ately
nedra
peeling
##rgeon
47th
stadiums
475
vertigo
##ains
iced
restroom
##plify
##tub
illustrating
pear
##chner
##sibility
inorganic
rappers
receipts
watery
##kura
lucinda
##oulos
reintroduced
##8th
##tched
gracefully
saxons
nutritional
wastewater
rained
favourites
bedrock
fisted
hallways
likeness
upscale
##lateral
1580
blinds
prequel
##pps
##tama
deter
humiliating
restraining
tn
vents
1659
laundering
recess
rosary
tractors
coulter
federer
##ifiers
##plin
persistence
##quitable
geschichte
pendulum
quakers
##beam
bassett
pictorial
buffet
koln
##sitor
drills
reciprocal
shooters
##57
##cton
##tees
converge
pip
dmitri
donnelly
yamamoto
aqua
azores
demographics
hypnotic
spitfire
suspend
wryly
roderick
##rran
sebastien
##asurable
mavericks
##fles
##200
himalayan
prodigy
##iance
transvaal
demonstrators
handcuffs
dodged
mcnamara
sublime
1726
crazed
##efined
##till
ivo
pondered
reconciled
shrill
sava
##duk
bal
cad
heresy
jaipur
goran
##nished
341
lux
shelly
whitehall
##hre
israelis
peacekeeping
##wled
1703
demetrius
ousted
##arians
##zos
beale
anwar
backstroke
raged
shrinking
cremated
##yck
benign
towing
wadi
darmstadt
landfill
parana
soothe
colleen
sidewalks
mayfair
tumble
hepatitis
ferrer
superstructure
##gingly
##urse
##wee
anthropological
translators
##mies
closeness
hooves
##pw
mondays
##roll
##vita
landscaping
##urized
purification
sock
thorns
thwarted
jalan
tiberius
##taka
saline
##rito
confidently
khyber
sculptors
##ij
brahms
hammersmith
inspectors
battista
fivb
fragmentation
hackney
##uls
arresting
exercising
antoinette
bedfordshire
##zily
dyed
##hema
1656
racetrack
variability
##tique
1655
austrians
deteriorating
madman
theorists
aix
lehman
weathered
1731
decreed
eruptions
1729
flaw
quinlan
sorbonne
flutes
nunez
1711
adored
downwards
fable
rasped
1712
moritz
mouthful
renegade
shivers
stunts
dysfunction
restrain
translit
327
pancakes
##avio
##cision
##tray
351
vial
##lden
bain
##maid
##oxide
chihuahua
malacca
vimes
##rba
##rnier
1664
donnie
plaques
##ually
337
bangs
floppy
huntsville
loretta
nikolay
##otte
eater
handgun
ubiquitous
##hett
eras
zodiac
1634
##omorphic
1820s
##zog
cochran
##bula
##lithic
warring
##rada
dalai
excused
blazers
mcconnell
reeling
bot
este
##abi
geese
hoax
taxon
##bla
guitarists
##icon
condemning
hunts
inversion
moffat
taekwondo
##lvis
1624
stammered
##rest
##rzy
sousa
fundraiser
marylebone
navigable
uptown
cabbage
daniela
salman
shitty
whimper
##kian
##utive
programmers
protections
rm
##rmi
##rued
forceful
##enes
fuss
##tao
##wash
brat
oppressive
reykjavik
spartak
ticking
##inkles
##kiewicz
adolph
horst
maui
protege
straighten
cpc
landau
concourse
clements
resultant
##ando
imaginative
joo
reactivated
##rem
##ffled
##uising
consultative
##guide
flop
kaitlyn
mergers
parenting
somber
##vron
supervise
vidhan
##imum
courtship
exemplified
harmonies
medallist
refining
##rrow
##ка
amara
##hum
780
goalscorer
sited
overshadowed
rohan
displeasure
secretive
multiplied
osman
##orth
engravings
padre
##kali
##veda
miniatures
mis
##yala
clap
pali
rook
##cana
1692
57th
antennae
astro
oskar
1628
bulldog
crotch
hackett
yucatan
##sure
amplifiers
brno
ferrara
migrating
##gree
thanking
turing
##eza
mccann
ting
andersson
onslaught
gaines
ganga
incense
standardization
##mation
sentai
scuba
stuffing
turquoise
waivers
alloys
##vitt
regaining
vaults
##clops
##gizing
digger
furry
memorabilia
probing
##iad
payton
rec
deutschland
filippo
opaque
seamen
zenith
afrikaans
##filtration
disciplined
inspirational
##merie
banco
confuse
grafton
tod
##dgets
championed
simi
anomaly
biplane
##ceptive
electrode
##para
1697
cleavage
crossbow
swirl
informant
##lars
##osta
afi
bonfire
spec
##oux
lakeside
slump
##culus
##lais
##qvist
##rrigan
1016
facades
borg
inwardly
cervical
xl
pointedly
050
stabilization
##odon
chests
1699
hacked
ctv
orthogonal
suzy
##lastic
gaulle
jacobite
rearview
##cam
##erted
ashby
##drik
##igate
##mise
##zbek
affectionately
canine
disperse
latham
##istles
##ivar
spielberg
##orin
##idium
ezekiel
cid
##sg
durga
middletown
##cina
customized
frontiers
harden
##etano
##zzy
1604
bolsheviks
##66
coloration
yoko
##bedo
briefs
slabs
debra
liquidation
plumage
##oin
blossoms
dementia
subsidy
1611
proctor
relational
jerseys
parochial
ter
##ici
esa
peshawar
cavalier
loren
cpi
idiots
shamrock
1646
dutton
malabar
mustache
##endez
##ocytes
referencing
terminates
marche
yarmouth
##sop
acton
mated
seton
subtly
baptised
beige
extremes
jolted
kristina
telecast
##actic
safeguard
waldo
##baldi
##bular
endeavors
sloppy
subterranean
##ensburg
##itung
delicately
pigment
tq
##scu
1626
##ound
collisions
coveted
herds
##personal
##meister
##nberger
chopra
##ricting
abnormalities
defective
galician
lucie
##dilly
alligator
likened
##genase
burundi
clears
complexion
derelict
deafening
diablo
fingered
champaign
dogg
enlist
isotope
labeling
mrna
##erre
brilliance
marvelous
##ayo
1652
crawley
ether
footed
dwellers
deserts
hamish
rubs
warlock
skimmed
##lizer
870
buick
embark
heraldic
irregularities
##ajan
kiara
##kulam
##ieg
antigen
kowalski
##lge
oakley
visitation
##mbit
vt
##suit
1570
murderers
##miento
##rites
chimneys
##sling
condemn
custer
exchequer
havre
##ghi
fluctuations
##rations
dfb
hendricks
vaccines
##tarian
nietzsche
biking
juicy
##duced
brooding
scrolling
selangor
##ragan
352
annum
boomed
seminole
sugarcane
##dna
departmental
dismissing
innsbruck
arteries
ashok
batavia
daze
kun
overtook
##rga
##tlan
beheaded
gaddafi
holm
electronically
faulty
galilee
fractures
kobayashi
##lized
gunmen
magma
aramaic
mala
eastenders
inference
messengers
bf
##qu
407
bathrooms
##vere
1658
flashbacks
ideally
misunderstood
##jali
##weather
mendez
##grounds
505
uncanny
##iii
1709
friendships
##nbc
sacrament
accommodated
reiterated
logistical
pebbles
thumped
##escence
administering
decrees
drafts
##flight
##cased
##tula
futuristic
picket
intimidation
winthrop
##fahan
interfered
339
afar
francoise
morally
uta
cochin
croft
dwarfs
##bruck
##dents
##nami
biker
##hner
##meral
nano
##isen
##ometric
##pres
##ан
brightened
meek
parcels
securely
gunners
##jhl
##zko
agile
hysteria
##lten
##rcus
bukit
champs
chevy
cuckoo
leith
sadler
theologians
welded
##section
1663
jj
plurality
xander
##rooms
##formed
shredded
temps
intimately
pau
tormented
##lok
##stellar
1618
charred
ems
essen
##mmel
alarms
spraying
ascot
blooms
twinkle
##abia
##apes
internment
obsidian
##chaft
snoop
##dav
##ooping
malibu
##tension
quiver
##itia
hays
mcintosh
travers
walsall
##ffie
1623
beverley
schwarz
plunging
structurally
m3
rosenthal
vikram
##tsk
770
ghz
##onda
##tiv
chalmers
groningen
pew
reckon
unicef
##rvis
55th
##gni
1651
sulawesi
avila
cai
metaphysical
screwing
turbulence
##mberg
augusto
samba
56th
baffled
momentary
toxin
##urian
##wani
aachen
condoms
dali
steppe
##3d
##app
##oed
##year
adolescence
dauphin
electrically
inaccessible
microscopy
nikita
##ega
atv
##cel
##enter
##oles
##oteric
##ы
accountants
punishments
wrongly
bribes
adventurous
clinch
flinders
southland
##hem
##kata
gough
##ciency
lads
soared
##ה
undergoes
deformation
outlawed
rubbish
##arus
##mussen
##nidae
##rzburg
arcs
##ingdon
##tituted
1695
wheelbase
wheeling
bombardier
campground
zebra
##lices
##oj
##bain
lullaby
##ecure
donetsk
wylie
grenada
##arding
##ης
squinting
eireann
opposes
##andra
maximal
runes
##broken
##cuting
##iface
##ror
##rosis
additive
britney
adultery
triggering
##drome
detrimental
aarhus
containment
jc
swapped
vichy
##ioms
madly
##oric
##rag
brant
##ckey
##trix
1560
1612
broughton
rustling
##stems
##uder
asbestos
mentoring
##nivorous
finley
leaps
##isan
apical
pry
slits
substitutes
##dict
intuitive
fantasia
insistent
unreasonable
##igen
##vna
domed
hannover
margot
ponder
##zziness
impromptu
jian
lc
rampage
stemming
##eft
andrey
gerais
whichever
amnesia
appropriated
anzac
clicks
modifying
ultimatum
cambrian
maids
verve
yellowstone
##mbs
conservatoire
##scribe
adherence
dinners
spectra
imperfect
mysteriously
sidekick
tatar
tuba
##aks
##ifolia
distrust
##athan
##zle
c2
ronin
zac
##pse
celaena
instrumentalist
scents
skopje
##mbling
comical
compensated
vidal
condor
intersect
jingle
wavelengths
##urrent
mcqueen
##izzly
carp
weasel
422
kanye
militias
postdoctoral
eugen
gunslinger
##ɛ
faux
hospice
##for
appalled
derivation
dwarves
##elis
dilapidated
##folk
astoria
philology
##lwyn
##otho
##saka
inducing
philanthropy
##bf
##itative
geek
markedly
sql
##yce
bessie
indices
rn
##flict
495
frowns
resolving
weightlifting
tugs
cleric
contentious
1653
mania
rms
##miya
##reate
##ruck
##tucket
bien
eels
marek
##ayton
##cence
discreet
unofficially
##ife
leaks
##bber
1705
332
dung
compressor
hillsborough
pandit
shillings
distal
##skin
381
##tat
##you
nosed
##nir
mangrove
undeveloped
##idia
textures
##inho
##500
##rise
ae
irritating
nay
amazingly
bancroft
apologetic
compassionate
kata
symphonies
##lovic
airspace
##lch
930
gifford
precautions
fulfillment
sevilla
vulgar
martinique
##urities
looting
piccolo
tidy
##dermott
quadrant
armchair
incomes
mathematicians
stampede
nilsson
##inking
##scan
foo
quarterfinal
##ostal
shang
shouldered
squirrels
##owe
344
vinegar
##bner
##rchy
##systems
delaying
##trics
ars
dwyer
rhapsody
sponsoring
##gration
bipolar
cinder
starters
##olio
##urst
421
signage
##nty
aground
figurative
mons
acquaintances
duets
erroneously
soyuz
elliptic
recreated
##cultural
##quette
##ssed
##tma
##zcz
moderator
scares
##itaire
##stones
##udence
juniper
sighting
##just
##nsen
britten
calabria
ry
bop
cramer
forsyth
stillness
##л
airmen
gathers
unfit
##umber
##upt
taunting
##rip
seeker
streamlined
##bution
holster
schumann
tread
vox
##gano
##onzo
strive
dil
reforming
covent
newbury
predicting
##orro
decorate
tre
##puted
andover
ie
asahi
dept
dunkirk
gills
##tori
buren
huskies
##stis
##stov
abstracts
bets
loosen
##opa
1682
yearning
##glio
##sir
berman
effortlessly
enamel
napoli
persist
##peration
##uez
attache
elisa
b1
invitations
##kic
accelerating
reindeer
boardwalk
clutches
nelly
polka
starbucks
##kei
adamant
huey
lough
unbroken
adventurer
embroidery
inspecting
stanza
##ducted
naia
taluka
##pone
##roids
chases
deprivation
florian
##jing
##ppet
earthly
##lib
##ssee
colossal
foreigner
vet
freaks
patrice
rosewood
triassic
upstate
##pkins
dominates
ata
chants
ks
vo
##400
##bley
##raya
##rmed
555
agra
infiltrate
##ailing
##ilation
##tzer
##uppe
##werk
binoculars
enthusiast
fujian
squeak
##avs
abolitionist
almeida
boredom
hampstead
marsden
rations
##ands
inflated
334
bonuses
rosalie
patna
##rco
329
detachments
penitentiary
54th
flourishing
woolf
##dion
##etched
papyrus
##lster
##nsor
##toy
bobbed
dismounted
endelle
inhuman
motorola
tbs
wince
wreath
##ticus
hideout
inspections
sanjay
disgrace
infused
pudding
stalks
##urbed
arsenic
leases
##hyl
##rrard
collarbone
##waite
##wil
dowry
##bant
##edance
genealogical
nitrate
salamanca
scandals
thyroid
necessitated
##!
##"
###
##$
##%
##&
##'
##(
##)
##*
##+
##,
##-
##.
##/
##:
##;
##<
##=
##>
##?
##@
##[
##\
##]
##^
##_
##`
##{
##|
##}
##~
##¡
##¢
##£
##¤
##¥
##¦
##§
##¨
##©
##ª
##«
##¬
##®
##±
##´
##µ
##¶
##·
##º
##»
##¼
##¾
##¿
##æ
##ð
##÷
##þ
##đ
##ħ
##ŋ
##œ
##ƒ
##ɐ
##ɑ
##ɒ
##ɔ
##ɕ
##ə
##ɡ
##ɣ
##ɨ
##ɪ
##ɫ
##ɬ
##ɯ
##ɲ
##ɴ
##ɹ
##ɾ
##ʀ
##ʁ
##ʂ
##ʃ
##ʉ
##ʊ
##ʋ
##ʌ
##ʎ
##ʐ
##ʑ
##ʒ
##ʔ
##ʰ
##ʲ
##ʳ
##ʷ
##ʸ
##ʻ
##ʼ
##ʾ
##ʿ
##ˈ
##ˡ
##ˢ
##ˣ
##ˤ
##β
##γ
##δ
##ε
##ζ
##θ
##κ
##λ
##μ
##ξ
##ο
##π
##ρ
##σ
##τ
##υ
##φ
##χ
##ψ
##ω
##б
##г
##д
##ж
##з
##м
##п
##с
##у
##ф
##х
##ц
##ч
##ш
##щ
##ъ
##э
##ю
##ђ
##є
##і
##ј
##љ
##њ
##ћ
##ӏ
##ա
##բ
##գ
##դ
##ե
##թ
##ի
##լ
##կ
##հ
##մ
##յ
##ն
##ո
##պ
##ս
##վ
##տ
##ր
##ւ
##ք
##־
##א
##ב
##ג
##ד
##ו
##ז
##ח
##ט
##י
##ך
##כ
##ל
##ם
##מ
##ן
##נ
##ס
##ע
##ף
##פ
##ץ
##צ
##ק
##ר
##ש
##ת
##،
##ء
##ب
##ت
##ث
##ج
##ح
##خ
##ذ
##ز
##س
##ش
##ص
##ض
##ط
##ظ
##ع
##غ
##ـ
##ف
##ق
##ك
##و
##ى
##ٹ
##پ
##چ
##ک
##گ
##ں
##ھ
##ہ
##ے
##अ
##आ
##उ
##ए
##क
##ख
##ग
##च
##ज
##ट
##ड
##ण
##त
##थ
##द
##ध
##न
##प
##ब
##भ
##म
##य
##र
##ल
##व
##श
##ष
##स
##ह
##ा
##ि
##ी
##ो
##।
##॥
##ং
##অ
##আ
##ই
##উ
##এ
##ও
##ক
##খ
##গ
##চ
##ছ
##জ
##ট
##ড
##ণ
##ত
##থ
##দ
##ধ
##ন
##প
##ব
##ভ
##ম
##য
##র
##ল
##শ
##ষ
##স
##হ
##া
##ি
##ী
##ে
##க
##ச
##ட
##த
##ந
##ன
##ப
##ம
##ய
##ர
##ல
##ள
##வ
##ா
##ி
##ு
##ே
##ை
##ನ
##ರ
##ಾ
##ක
##ය
##ර
##ල
##ව
##ා
##ก
##ง
##ต
##ท
##น
##พ
##ม
##ย
##ร
##ล
##ว
##ส
##อ
##า
##เ
##་
##།
##ག
##ང
##ད
##ན
##པ
##བ
##མ
##འ
##ར
##ལ
##ས
##မ
##ა
##ბ
##გ
##დ
##ე
##ვ
##თ
##ი
##კ
##ლ
##მ
##ნ
##ო
##რ
##ს
##ტ
##უ
##ᄀ
##ᄂ
##ᄃ
##ᄅ
##ᄆ
##ᄇ
##ᄉ
##ᄊ
##ᄋ
##ᄌ
##ᄎ
##ᄏ
##ᄐ
##ᄑ
##ᄒ
##ᅡ
##ᅢ
##ᅥ
##ᅦ
##ᅧ
##ᅩ
##ᅪ
##ᅭ
##ᅮ
##ᅯ
##ᅲ
##ᅳ
##ᅴ
##ᅵ
##ᆨ
##ᆫ
##ᆯ
##ᆷ
##ᆸ
##ᆼ
##ᴬ
##ᴮ
##ᴰ
##ᴵ
##ᴺ
##ᵀ
##ᵃ
##ᵇ
##ᵈ
##ᵉ
##ᵍ
##ᵏ
##ᵐ
##ᵒ
##ᵖ
##ᵗ
##ᵘ
##ᵣ
##ᵤ
##ᵥ
##ᶜ
##ᶠ
##‐
##‑
##‒
##–
##—
##―
##‖
##‘
##’
##‚
##“
##”
##„
##†
##‡
##•
##…
##‰
##′
##″
##›
##‿
##⁄
##⁰
##ⁱ
##⁴
##⁵
##⁶
##⁷
##⁸
##⁹
##⁻
##ⁿ
##₅
##₆
##₇
##₈
##₉
##₊
##₍
##₎
##ₐ
##ₑ
##ₒ
##ₓ
##ₕ
##ₖ
##ₗ
##ₘ
##ₚ
##ₛ
##ₜ
##₤
##₩
##€
##₱
##₹
##ℓ
##№
##ℝ
##™
##⅓
##⅔
##←
##↑
##→
##↓
##↔
##↦
##⇄
##⇌
##⇒
##∂
##∅
##∆
##∇
##∈
##∗
##∘
##√
##∞
##∧
##∨
##∩
##∪
##≈
##≡
##≤
##≥
##⊂
##⊆
##⊕
##⊗
##⋅
##─
##│
##■
##▪
##●
##★
##☆
##☉
##♠
##♣
##♥
##♦
##♯
##⟨
##⟩
##ⱼ
##⺩
##⺼
##⽥
##、
##。
##〈
##〉
##《
##》
##「
##」
##『
##』
##〜
##あ
##い
##う
##え
##お
##か
##き
##く
##け
##こ
##さ
##し
##す
##せ
##そ
##た
##ち
##っ
##つ
##て
##と
##な
##に
##ぬ
##ね
##の
##は
##ひ
##ふ
##へ
##ほ
##ま
##み
##む
##め
##も
##や
##ゆ
##よ
##ら
##り
##る
##れ
##ろ
##を
##ん
##ァ
##ア
##ィ
##イ
##ウ
##ェ
##エ
##オ
##カ
##キ
##ク
##ケ
##コ
##サ
##シ
##ス
##セ
##タ
##チ
##ッ
##ツ
##テ
##ト
##ナ
##ニ
##ノ
##ハ
##ヒ
##フ
##ヘ
##ホ
##マ
##ミ
##ム
##メ
##モ
##ャ
##ュ
##ョ
##ラ
##リ
##ル
##レ
##ロ
##ワ
##ン
##・
##ー
##一
##三
##上
##下
##不
##世
##中
##主
##久
##之
##也
##事
##二
##五
##井
##京
##人
##亻
##仁
##介
##代
##仮
##伊
##会
##佐
##侍
##保
##信
##健
##元
##光
##八
##公
##内
##出
##分
##前
##劉
##力
##加
##勝
##北
##区
##十
##千
##南
##博
##原
##口
##古
##史
##司
##合
##吉
##同
##名
##和
##囗
##四
##国
##國
##土
##地
##坂
##城
##堂
##場
##士
##夏
##外
##大
##天
##太
##夫
##奈
##女
##子
##学
##宀
##宇
##安
##宗
##定
##宣
##宮
##家
##宿
##寺
##將
##小
##尚
##山
##岡
##島
##崎
##川
##州
##巿
##帝
##平
##年
##幸
##广
##弘
##張
##彳
##後
##御
##德
##心
##忄
##志
##忠
##愛
##成
##我
##戦
##戸
##手
##扌
##政
##文
##新
##方
##日
##明
##星
##春
##昭
##智
##曲
##書
##月
##有
##朝
##木
##本
##李
##村
##東
##松
##林
##森
##楊
##樹
##橋
##歌
##止
##正
##武
##比
##氏
##民
##水
##氵
##氷
##永
##江
##沢
##河
##治
##法
##海
##清
##漢
##瀬
##火
##版
##犬
##王
##生
##田
##男
##疒
##発
##白
##的
##皇
##目
##相
##省
##真
##石
##示
##社
##神
##福
##禾
##秀
##秋
##空
##立
##章
##竹
##糹
##美
##義
##耳
##良
##艹
##花
##英
##華
##葉
##藤
##行
##街
##西
##見
##訁
##語
##谷
##貝
##貴
##車
##軍
##辶
##道
##郎
##郡
##部
##都
##里
##野
##金
##鈴
##镇
##長
##門
##間
##阝
##阿
##陳
##陽
##雄
##青
##面
##風
##食
##香
##馬
##高
##龍
##龸
##fi
##fl
##!
##(
##)
##,
##-
##.
##/
##:
##?
##~
|
TensorFlow/Detection/SSD/models/research/slim/scripts | scripts | export_mobilenet | #!/bin/bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# This script prepares the various different versions of MobileNet models for
# use in a mobile application. If you don't specify your own trained checkpoint
# file, it will download pretrained checkpoints for ImageNet. You'll also need
# to have a copy of the TensorFlow source code to run some of the commands,
# by default it will be looked for in ./tensorflow, but you can set the
# TENSORFLOW_PATH environment variable before calling the script if your source
# is in a different location.
# The main slim/nets/mobilenet_v1.md description has more details about the
# model, but the main points are that it comes in four size versions, 1.0, 0.75,
# 0.50, and 0.25, which controls the number of parameters and so the file size
# of the model, and the input image size, which can be 224, 192, 160, or 128
# pixels, and affects the amount of computation needed, and the latency.
# Here's an example generating a frozen model from pretrained weights:
#
set -e
print_usage () {
echo "Creates a frozen mobilenet model suitable for mobile use"
echo "Usage:"
echo "$0 <mobilenet version> <input size> [checkpoint path]"
}
MOBILENET_VERSION=$1
IMAGE_SIZE=$2
CHECKPOINT=$3
if [[ ${MOBILENET_VERSION} = "1.0" ]]; then
SLIM_NAME=mobilenet_v1
elif [[ ${MOBILENET_VERSION} = "0.75" ]]; then
SLIM_NAME=mobilenet_v1_075
elif [[ ${MOBILENET_VERSION} = "0.50" ]]; then
SLIM_NAME=mobilenet_v1_050
elif [[ ${MOBILENET_VERSION} = "0.25" ]]; then
SLIM_NAME=mobilenet_v1_025
else
echo "Bad mobilenet version, should be one of 1.0, 0.75, 0.50, or 0.25"
print_usage
exit 1
fi
if [[ ${IMAGE_SIZE} -ne "224" ]] && [[ ${IMAGE_SIZE} -ne "192" ]] && [[ ${IMAGE_SIZE} -ne "160" ]] && [[ ${IMAGE_SIZE} -ne "128" ]]; then
echo "Bad input image size, should be one of 224, 192, 160, or 128"
print_usage
exit 1
fi
if [[ ${TENSORFLOW_PATH} -eq "" ]]; then
TENSORFLOW_PATH=../tensorflow
fi
if [[ ! -d ${TENSORFLOW_PATH} ]]; then
echo "TensorFlow source folder not found. You should download the source and then set"
echo "the TENSORFLOW_PATH environment variable to point to it, like this:"
echo "export TENSORFLOW_PATH=/my/path/to/tensorflow"
print_usage
exit 1
fi
MODEL_FOLDER=/tmp/mobilenet_v1_${MOBILENET_VERSION}_${IMAGE_SIZE}
if [[ -d ${MODEL_FOLDER} ]]; then
echo "Model folder ${MODEL_FOLDER} already exists!"
echo "If you want to overwrite it, then 'rm -rf ${MODEL_FOLDER}' first."
print_usage
exit 1
fi
mkdir ${MODEL_FOLDER}
if [[ ${CHECKPOINT} = "" ]]; then
echo "*******"
echo "Downloading pretrained weights"
echo "*******"
curl "http://download.tensorflow.org/models/mobilenet_v1_${MOBILENET_VERSION}_${IMAGE_SIZE}_2017_06_14.tar.gz" \
-o ${MODEL_FOLDER}/checkpoints.tar.gz
tar xzf ${MODEL_FOLDER}/checkpoints.tar.gz --directory ${MODEL_FOLDER}
CHECKPOINT=${MODEL_FOLDER}/mobilenet_v1_${MOBILENET_VERSION}_${IMAGE_SIZE}.ckpt
fi
echo "*******"
echo "Exporting graph architecture to ${MODEL_FOLDER}/unfrozen_graph.pb"
echo "*******"
bazel run slim:export_inference_graph -- \
--model_name=${SLIM_NAME} --image_size=${IMAGE_SIZE} --logtostderr \
--output_file=${MODEL_FOLDER}/unfrozen_graph.pb --dataset_dir=${MODEL_FOLDER}
cd ../tensorflow
echo "*******"
echo "Freezing graph to ${MODEL_FOLDER}/frozen_graph.pb"
echo "*******"
bazel run tensorflow/python/tools:freeze_graph -- \
--input_graph=${MODEL_FOLDER}/unfrozen_graph.pb \
--input_checkpoint=${CHECKPOINT} \
--input_binary=true --output_graph=${MODEL_FOLDER}/frozen_graph.pb \
--output_node_names=MobilenetV1/Predictions/Reshape_1
echo "Quantizing weights to ${MODEL_FOLDER}/quantized_graph.pb"
bazel run tensorflow/tools/graph_transforms:transform_graph -- \
--in_graph=${MODEL_FOLDER}/frozen_graph.pb \
--out_graph=${MODEL_FOLDER}/quantized_graph.pb \
--inputs=input --outputs=MobilenetV1/Predictions/Reshape_1 \
--transforms='fold_constants fold_batch_norms quantize_weights'
echo "*******"
echo "Running label_image using the graph"
echo "*******"
bazel build tensorflow/examples/label_image:label_image
bazel-bin/tensorflow/examples/label_image/label_image \
--input_layer=input --output_layer=MobilenetV1/Predictions/Reshape_1 \
--graph=${MODEL_FOLDER}/quantized_graph.pb --input_mean=-127 --input_std=127 \
--image=tensorflow/examples/label_image/data/grace_hopper.jpg \
--input_width=${IMAGE_SIZE} --input_height=${IMAGE_SIZE} --labels=${MODEL_FOLDER}/labels.txt
echo "*******"
echo "Saved graphs to ${MODEL_FOLDER}/frozen_graph.pb and ${MODEL_FOLDER}/quantized_graph.pb"
echo "*******"
|
CUDA-Optimized/FastSpeech | FastSpeech | .gitignore | .idea
__pycache__
.DS_Store
*.egg-info
.vscode
|
TensorFlow2/Segmentation/UNet_Medical/data_loading | data_loading | data_loader | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Dataset class encapsulates the data loading"""
import multiprocessing
import os
from collections import deque
import numpy as np
import tensorflow as tf
from PIL import Image, ImageSequence
class Dataset:
"""Load, separate and prepare the data for training and prediction"""
def __init__(self, data_dir, batch_size, fold, augment=False, gpu_id=0, num_gpus=1, seed=0, amp=False):
if not os.path.exists(data_dir):
raise FileNotFoundError('Cannot find data dir: {}'.format(data_dir))
self._data_dir = data_dir
self._batch_size = batch_size
self._augment = augment
self.precision = tf.float16 if amp else tf.float32
self._seed = seed
images = self._load_multipage_tiff(os.path.join(self._data_dir, 'train-volume.tif'))
masks = self._load_multipage_tiff(os.path.join(self._data_dir, 'train-labels.tif'))
self._test_images = \
self._load_multipage_tiff(os.path.join(self._data_dir, 'test-volume.tif'))
train_indices, val_indices = self._get_val_train_indices(len(images), fold)
self._train_images = images[train_indices]
self._train_masks = masks[train_indices]
self._val_images = images[val_indices]
self._val_masks = masks[val_indices]
self._num_gpus = num_gpus
self._gpu_id = gpu_id
@property
def train_size(self):
return len(self._train_images)
@property
def eval_size(self):
return len(self._val_images)
@property
def test_size(self):
return len(self._test_images)
def _load_multipage_tiff(self, path):
"""Load tiff images containing many images in the channel dimension"""
return np.array([np.array(p) for p in ImageSequence.Iterator(Image.open(path))])
def _get_val_train_indices(self, length, fold, ratio=0.8):
assert 0 < ratio <= 1, "Train/total data ratio must be in range (0.0, 1.0]"
np.random.seed(self._seed)
indices = np.arange(0, length, 1, dtype=np.int)
np.random.shuffle(indices)
if fold is not None:
indices = deque(indices)
indices.rotate(fold * int((1.0 - ratio) * length))
indices = np.array(indices)
train_indices = indices[:int(ratio * len(indices))]
val_indices = indices[int(ratio * len(indices)):]
else:
train_indices = indices
val_indices = []
return train_indices, val_indices
def _normalize_inputs(self, inputs):
"""Normalize inputs"""
inputs = tf.expand_dims(tf.cast(inputs, tf.float32), -1)
# Center around zero
inputs = tf.divide(inputs, 127.5) - 1
# Resize to match output size
inputs = tf.image.resize(inputs, (388, 388))
return tf.image.resize_with_crop_or_pad(inputs, 572, 572)
def _normalize_labels(self, labels):
"""Normalize labels"""
labels = tf.expand_dims(tf.cast(labels, tf.float32), -1)
labels = tf.divide(labels, 255)
# Resize to match output size
labels = tf.image.resize(labels, (388, 388))
labels = tf.image.resize_with_crop_or_pad(labels, 572, 572)
cond = tf.less(labels, 0.5 * tf.ones(tf.shape(input=labels)))
labels = tf.where(cond, tf.zeros(tf.shape(input=labels)), tf.ones(tf.shape(input=labels)))
return tf.one_hot(tf.squeeze(tf.cast(labels, tf.int32)), 2)
@tf.function
def _preproc_samples(self, inputs, labels, augment=True):
"""Preprocess samples and perform random augmentations"""
inputs = self._normalize_inputs(inputs)
labels = self._normalize_labels(labels)
if self._augment and augment:
# Horizontal flip
h_flip = tf.random.uniform([]) > 0.5
inputs = tf.cond(pred=h_flip, true_fn=lambda: tf.image.flip_left_right(inputs), false_fn=lambda: inputs)
labels = tf.cond(pred=h_flip, true_fn=lambda: tf.image.flip_left_right(labels), false_fn=lambda: labels)
# Vertical flip
v_flip = tf.random.uniform([]) > 0.5
inputs = tf.cond(pred=v_flip, true_fn=lambda: tf.image.flip_up_down(inputs), false_fn=lambda: inputs)
labels = tf.cond(pred=v_flip, true_fn=lambda: tf.image.flip_up_down(labels), false_fn=lambda: labels)
# Prepare for batched transforms
inputs = tf.expand_dims(inputs, 0)
labels = tf.expand_dims(labels, 0)
# Random crop and resize
left = tf.random.uniform([]) * 0.3
right = 1 - tf.random.uniform([]) * 0.3
top = tf.random.uniform([]) * 0.3
bottom = 1 - tf.random.uniform([]) * 0.3
inputs = tf.image.crop_and_resize(inputs, [[top, left, bottom, right]], [0], (572, 572))
labels = tf.image.crop_and_resize(labels, [[top, left, bottom, right]], [0], (572, 572))
# Gray value variations
# Adjust brightness and keep values in range
inputs = tf.image.random_brightness(inputs, max_delta=0.2)
inputs = tf.clip_by_value(inputs, clip_value_min=-1, clip_value_max=1)
inputs = tf.squeeze(inputs, 0)
labels = tf.squeeze(labels, 0)
# Bring back labels to network's output size and remove interpolation artifacts
labels = tf.image.resize_with_crop_or_pad(labels, target_width=388, target_height=388)
cond = tf.less(labels, 0.5 * tf.ones(tf.shape(input=labels)))
labels = tf.where(cond, tf.zeros(tf.shape(input=labels)), tf.ones(tf.shape(input=labels)))
return tf.cast(inputs, self.precision), labels
@tf.function
def _preproc_eval_samples(self, inputs, labels):
"""Preprocess samples and perform random augmentations"""
inputs = self._normalize_inputs(inputs)
labels = self._normalize_labels(labels)
# Bring back labels to network's output size and remove interpolation artifacts
labels = tf.image.resize_with_crop_or_pad(labels, target_width=388, target_height=388)
cond = tf.less(labels, 0.5 * tf.ones(tf.shape(input=labels)))
labels = tf.where(cond, tf.zeros(tf.shape(input=labels)), tf.ones(tf.shape(input=labels)))
return tf.cast(inputs, self.precision), labels
@tf.function
def _preproc_test_samples(self, inputs):
inputs = self._normalize_inputs(inputs)
return tf.cast(inputs, self.precision)
def train_fn(self, drop_remainder=False):
"""Input function for training"""
dataset = tf.data.Dataset.from_tensor_slices(
(self._train_images, self._train_masks))
dataset = dataset.shard(self._num_gpus, self._gpu_id)
dataset = dataset.repeat()
dataset = dataset.shuffle(self._batch_size * 3)
dataset = dataset.map(self._preproc_samples,
num_parallel_calls=multiprocessing.cpu_count()//self._num_gpus)
dataset = dataset.batch(self._batch_size, drop_remainder=drop_remainder)
dataset = dataset.prefetch(self._batch_size)
return dataset
def eval_fn(self, count, drop_remainder=False):
"""Input function for validation"""
dataset = tf.data.Dataset.from_tensor_slices(
(self._val_images, self._val_masks))
dataset = dataset.repeat(count=count)
dataset = dataset.map(self._preproc_eval_samples,
num_parallel_calls=multiprocessing.cpu_count())
dataset = dataset.batch(self._batch_size, drop_remainder=drop_remainder)
dataset = dataset.prefetch(self._batch_size)
return dataset
def test_fn(self, count, drop_remainder=False):
"""Input function for testing"""
dataset = tf.data.Dataset.from_tensor_slices(
self._test_images)
dataset = dataset.repeat(count=count)
dataset = dataset.map(self._preproc_test_samples)
dataset = dataset.batch(self._batch_size, drop_remainder=drop_remainder)
dataset = dataset.prefetch(self._batch_size)
return dataset
def synth_fn(self):
"""Synthetic data function for testing"""
inputs = tf.random.truncated_normal((572, 572, 1), dtype=tf.float32, mean=127.5, stddev=1, seed=self._seed,
name='synth_inputs')
masks = tf.random.truncated_normal((388, 388, 2), dtype=tf.float32, mean=0.01, stddev=0.1, seed=self._seed,
name='synth_masks')
dataset = tf.data.Dataset.from_tensors((inputs, masks))
dataset = dataset.cache()
dataset = dataset.repeat()
dataset = dataset.batch(self._batch_size)
dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
return dataset
|
PyTorch/Classification/GPUNet/triton | triton | dataloader | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import json
from timm.data import create_dataset, create_loader
import torch
def update_argparser(parser):
parser.add_argument(
"--config", type=str, required=True, help="Network to deploy")
parser.add_argument("--val-path", type=str, help="Path to dataset to be used", required=True)
parser.add_argument("--batch-size", type=int, help="Batch size to use", default=10)
parser.add_argument("--precision", type=str, default="fp32",
choices=["fp32", "fp16"], help="Inference precision")
parser.add_argument(
"--is-prunet", type=bool, required=True, help="Bool on whether network is a prunet")
def get_dataloader_fn(config, val_path, batch_size, precision, is_prunet):
imagenet_val_path = val_path
dataset = create_dataset( root=imagenet_val_path, name='', split='validation', load_bytes=False, class_map='')
with open(config) as configFile:
modelJSON = json.load(configFile)
configFile.close()
config = modelJSON
assert len(config) > 0
dataLayer = config[0]
assert dataLayer['layer_type'] == 'data'
assert dataLayer['img_resolution'] > 0
imgRes = dataLayer['img_resolution']
crop_pct = 1.0
if is_prunet == "True":
crop_pct = 0.875
data_config = {'input_size': (3, imgRes, imgRes), 'interpolation': 'bicubic', 'mean': (0.485, 0.456, 0.406), 'std': (0.229, 0.224, 0.225), 'crop_pct': crop_pct}
batch_size = int(batch_size)
loader = create_loader(
dataset,
input_size=data_config['input_size'],
batch_size=batch_size,
use_prefetcher=True,
interpolation=data_config['interpolation'],
mean=data_config['mean'],
std=data_config['std'],
num_workers=1,
crop_pct=data_config['crop_pct'],
pin_memory=False,
tf_preprocessing=False)
dtype = precision
if dtype == 'fp16':
dtype = torch.float16
elif dtype == 'fp32':
dtype = torch.float32
else:
raise NotImplementedError
def _get_dataloader():
for batch_idx, (input, target) in enumerate(loader):
x = {"INPUT__0": input.to(dtype).cpu().numpy()}
y_real = {"OUTPUT__0": np.tile(target.to(dtype).cpu().numpy()[:, np.newaxis], (1, 1000))}
ids = np.tile(batch_idx, target.shape[0])
yield (ids, x, y_real)
return _get_dataloader |
PyTorch/Classification/GPUNet/triton | triton | run_performance_on_triton | #!/usr/bin/env python3
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import logging
import pathlib
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from .deployment_toolkit.core import EvaluationMode, MeasurementMode, OfflineMode, PerformanceTool
from .deployment_toolkit.triton_performance_runner import TritonPerformanceRunner
LOGGER = logging.getLogger("run_performance_on_triton")
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--model-name",
type=str,
required=True,
help="Name of the model to test",
)
parser.add_argument(
"--result-path",
type=pathlib.Path,
required=True,
help="Path where results files is stored.",
)
parser.add_argument(
"--server-url",
type=str,
default="http://127.0.0.1:8000",
help="Url to Triton server",
)
parser.add_argument(
"--model-version",
type=str,
default=1,
help="Version of model",
)
parser.add_argument(
"--input-data",
type=str,
default="random",
help="Input data to perform profiling.",
)
parser.add_argument(
"--input-shapes",
action="append",
help="Input data shape in form INPUT_NAME:<full_shape_without_batch_axis>.",
)
parser.add_argument(
"--batch-sizes",
type=int,
default=[1],
help="List of batch sizes to tests.",
nargs="*",
)
parser.add_argument(
"--concurrency",
type=int,
default=[1],
help="List of concurrency modes.",
nargs="*",
)
parser.add_argument(
"--measurement-mode",
choices=[item.value for item in MeasurementMode],
default=MeasurementMode.COUNT_WINDOWS.value,
type=str,
help="Select measurement mode "
"'time_windows' stabilize performance on measurement window. "
"'count_windows' stabilize performance on number of samples.",
)
parser.add_argument(
"--measurement-interval",
help="Time window perf_analyzer will wait to stabilize the measurement",
default=5000,
type=int,
)
parser.add_argument(
"--measurement-request-count",
help="Number of samples on which perf_analyzer will stabilize the measurement",
default=50,
type=int,
)
parser.add_argument(
"--evaluation-mode",
choices=[item.value for item in EvaluationMode],
default=EvaluationMode.OFFLINE.value,
type=str,
help="Select evaluation mode "
"'offline' run offline analysis and use GPU memory to pass tensors. "
"'online' run online analysis and use HTTP protocol.",
)
parser.add_argument(
"--offline-mode",
choices=[item.value for item in OfflineMode],
default=OfflineMode.SYSTEM.value,
type=str,
help="Select offline mode "
"'system' pass tensors through CPU RAM memory. "
"'cuda' pass tensors through GPU RAM memory.",
)
parser.add_argument(
"--output-shared-memory-size",
default=102400,
type=int,
help="Size of memory buffer allocated for output with dynamic shapes in bytes. "
"Has to be equal to maximal size of output tensor.",
)
parser.add_argument(
"--performance-tool",
choices=[item.value for item in PerformanceTool],
default=PerformanceTool.MODEL_ANALYZER.value,
type=str,
help="Select performance tool for measurement mode "
"'model_analyzer' use Model Analyzer "
"'perf_analyzer' use Perf Analyzer",
)
parser.add_argument(
"--model-repository",
default=None,
type=str,
help="Path to model repository. Valid when using Model Analyzer",
)
parser.add_argument(
"--warmup",
help="Enable model warmup before performance test",
action="store_true",
default=False,
)
parser.add_argument(
"--timeout",
help="Timeout for performance analysis",
type=int,
default=None,
required=False,
)
parser.add_argument(
"-v",
"--verbose",
help="Verbose logs",
action="store_true",
default=False,
)
args = parser.parse_args()
log_level = logging.INFO if not args.verbose else logging.DEBUG
log_format = "%(asctime)s %(levelname)s %(name)s %(message)s"
logging.basicConfig(level=log_level, format=log_format)
runner = TritonPerformanceRunner(
server_url=args.server_url,
model_name=args.model_name,
input_data=args.input_data,
input_shapes=args.input_shapes or [],
batch_sizes=args.batch_sizes,
measurement_mode=MeasurementMode(args.measurement_mode),
measurement_interval=args.measurement_interval,
measurement_request_count=args.measurement_request_count,
concurrency=args.concurrency,
evaluation_mode=EvaluationMode(args.evaluation_mode),
offline_mode=OfflineMode(args.offline_mode),
output_shared_memory_size=args.output_shared_memory_size,
performance_tool=PerformanceTool(args.performance_tool),
model_repository=args.model_repository,
result_path=args.result_path,
warmup=args.warmup,
timeout=args.timeout,
verbose=args.verbose,
)
runner.run()
if __name__ == "__main__":
main()
|
TensorFlow2/Detection/Efficientdet/object_detection | object_detection | argmax_matcher | # Copyright 2020 Google Research. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Argmax matcher implementation.
This class takes a similarity matrix and matches columns to rows based on the
maximum value per column. One can specify matched_thresholds and
to prevent columns from matching to rows (generally resulting in a negative
training example) and unmatched_theshold to ignore the match (generally
resulting in neither a positive or negative training example).
This matcher is used in Fast(er)-RCNN.
Note: matchers are used in TargetAssigners. There is a create_target_assigner
factory function for popular implementations.
"""
import tensorflow.compat.v1 as tf
from object_detection import matcher
from object_detection import shape_utils
class ArgMaxMatcher(matcher.Matcher):
"""Matcher based on highest value.
This class computes matches from a similarity matrix. Each column is matched
to a single row.
To support object detection target assignment this class enables setting both
matched_threshold (upper threshold) and unmatched_threshold (lower thresholds)
defining three categories of similarity which define whether examples are
positive, negative, or ignored:
(1) similarity >= matched_threshold: Highest similarity. Matched/Positive!
(2) matched_threshold > similarity >= unmatched_threshold: Medium similarity.
Depending on negatives_lower_than_unmatched, this is either
Unmatched/Negative OR Ignore.
(3) unmatched_threshold > similarity: Lowest similarity. Depending on flag
negatives_lower_than_unmatched, either Unmatched/Negative OR Ignore.
For ignored matches this class sets the values in the Match object to -2.
"""
def __init__(self,
matched_threshold,
unmatched_threshold=None,
negatives_lower_than_unmatched=True,
force_match_for_each_row=False):
"""Construct ArgMaxMatcher.
Args:
matched_threshold: Threshold for positive matches. Positive if
sim >= matched_threshold, where sim is the maximum value of the
similarity matrix for a given column. Set to None for no threshold.
unmatched_threshold: Threshold for negative matches. Negative if
sim < unmatched_threshold. Defaults to matched_threshold
when set to None.
negatives_lower_than_unmatched: Boolean which defaults to True. If True
then negative matches are the ones below the unmatched_threshold,
whereas ignored matches are in between the matched and unmatched
threshold. If False, then negative matches are in between the matched
and unmatched threshold, and everything lower than unmatched is ignored.
force_match_for_each_row: If True, ensures that each row is matched to
at least one column (which is not guaranteed otherwise if the
matched_threshold is high). Defaults to False. See
argmax_matcher_test.testMatcherForceMatch() for an example.
Raises:
ValueError: if unmatched_threshold is set but matched_threshold is not set
or if unmatched_threshold > matched_threshold.
"""
if (matched_threshold is None) and (unmatched_threshold is not None):
raise ValueError('Need to also define matched_threshold when'
'unmatched_threshold is defined')
self._matched_threshold = matched_threshold
if unmatched_threshold is None:
self._unmatched_threshold = matched_threshold
else:
if unmatched_threshold > matched_threshold:
raise ValueError('unmatched_threshold needs to be smaller or equal'
'to matched_threshold')
self._unmatched_threshold = unmatched_threshold
if not negatives_lower_than_unmatched:
if self._unmatched_threshold == self._matched_threshold:
raise ValueError('When negatives are in between matched and '
'unmatched thresholds, these cannot be of equal '
'value. matched: %s, unmatched: %s',
self._matched_threshold, self._unmatched_threshold)
self._force_match_for_each_row = force_match_for_each_row
self._negatives_lower_than_unmatched = negatives_lower_than_unmatched
def _match(self, similarity_matrix):
"""Tries to match each column of the similarity matrix to a row.
Args:
similarity_matrix: tensor of shape [N, M] representing any similarity
metric.
Returns:
Match object with corresponding matches for each of M columns.
"""
def _match_when_rows_are_empty():
"""Performs matching when the rows of similarity matrix are empty.
When the rows are empty, all detections are false positives. So we return
a tensor of -1's to indicate that the columns do not match to any rows.
Returns:
matches: int32 tensor indicating the row each column matches to.
"""
similarity_matrix_shape = shape_utils.combined_static_and_dynamic_shape(
similarity_matrix)
return -1 * tf.ones([similarity_matrix_shape[1]], dtype=tf.int32)
def _match_when_rows_are_non_empty():
"""Performs matching when the rows of similarity matrix are non empty.
Returns:
matches: int32 tensor indicating the row each column matches to.
"""
# Matches for each column
matches = tf.argmax(similarity_matrix, 0, output_type=tf.int32)
# Deal with matched and unmatched threshold
if self._matched_threshold is not None:
# Get logical indices of ignored and unmatched columns as tf.int64
matched_vals = tf.reduce_max(similarity_matrix, 0)
below_unmatched_threshold = tf.greater(self._unmatched_threshold,
matched_vals)
between_thresholds = tf.logical_and(
tf.greater_equal(matched_vals, self._unmatched_threshold),
tf.greater(self._matched_threshold, matched_vals))
if self._negatives_lower_than_unmatched:
matches = self._set_values_using_indicator(matches,
below_unmatched_threshold,
-1)
matches = self._set_values_using_indicator(matches,
between_thresholds,
-2)
else:
matches = self._set_values_using_indicator(matches,
below_unmatched_threshold,
-2)
matches = self._set_values_using_indicator(matches,
between_thresholds,
-1)
if self._force_match_for_each_row:
similarity_matrix_shape = shape_utils.combined_static_and_dynamic_shape(
similarity_matrix)
force_match_column_ids = tf.argmax(similarity_matrix, 1,
output_type=tf.int32)
force_match_column_indicators = tf.one_hot(
force_match_column_ids, depth=similarity_matrix_shape[1])
force_match_row_ids = tf.argmax(force_match_column_indicators, 0,
output_type=tf.int32)
force_match_column_mask = tf.cast(
tf.reduce_max(force_match_column_indicators, 0), tf.bool)
final_matches = tf.where(force_match_column_mask,
force_match_row_ids, matches)
return final_matches
else:
return matches
if similarity_matrix.shape.is_fully_defined():
if similarity_matrix.shape[0] == 0:
return _match_when_rows_are_empty()
else:
return _match_when_rows_are_non_empty()
else:
return tf.cond(
tf.greater(tf.shape(similarity_matrix)[0], 0),
_match_when_rows_are_non_empty, _match_when_rows_are_empty)
def _set_values_using_indicator(self, x, indicator, val):
"""Set the indicated fields of x to val.
Args:
x: tensor.
indicator: boolean with same shape as x.
val: scalar with value to set.
Returns:
modified tensor.
"""
indicator = tf.cast(indicator, x.dtype)
return x * (1 - indicator) + val * indicator
|
PyTorch/SpeechSynthesis/Tacotron2/tacotron2_common | tacotron2_common | utils | # *****************************************************************************
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NVIDIA CORPORATION nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# *****************************************************************************
import numpy as np
from scipy.io.wavfile import read
import torch
import os
import argparse
import json
class ParseFromConfigFile(argparse.Action):
def __init__(self, option_strings, type, dest, help=None, required=False):
super(ParseFromConfigFile, self).__init__(option_strings=option_strings, type=type, dest=dest, help=help, required=required)
def __call__(self, parser, namespace, values, option_string):
with open(values, 'r') as f:
data = json.load(f)
for group in data.keys():
for k,v in data[group].items():
underscore_k = k.replace('-', '_')
setattr(namespace, underscore_k, v)
def get_mask_from_lengths(lengths):
max_len = torch.max(lengths).item()
ids = torch.arange(0, max_len, device=lengths.device, dtype=lengths.dtype)
mask = (ids < lengths.unsqueeze(1)).byte()
mask = torch.le(mask, 0)
return mask
def load_wav_to_torch(full_path):
sampling_rate, data = read(full_path)
return torch.FloatTensor(data.astype(np.float32)), sampling_rate
def load_filepaths_and_text(dataset_path, filename, split="|"):
with open(filename, encoding='utf-8') as f:
def split_line(root, line):
parts = line.strip().split(split)
if len(parts) > 2:
raise Exception(
"incorrect line format for file: {}".format(filename))
path = os.path.join(root, parts[0])
text = parts[1]
return path,text
filepaths_and_text = [split_line(dataset_path, line) for line in f]
return filepaths_and_text
def to_gpu(x):
x = x.contiguous()
if torch.cuda.is_available():
x = x.cuda(non_blocking=True)
return x
|
PyTorch/Classification/GPUNet/triton/085ms/runner | runner | pipeline_impl | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from ...runner.pipeline import Pipeline
pipeline = Pipeline()
pipeline.model_export(
commands=(
r"""
if [[ "${EXPORT_FORMAT}" == "torchscript" ]]; then
export FORMAT_SUFFIX="pt"
else
export FORMAT_SUFFIX="${EXPORT_FORMAT}"
fi
python3 triton/export_model.py \
--input-path triton/model.py \
--input-type pyt \
--output-path ${SHARED_DIR}/exported_model.${FORMAT_SUFFIX} \
--output-type ${EXPORT_FORMAT} \
--ignore-unknown-parameters \
--onnx-opset 13 \
--torch-jit ${TORCH_JIT} \
\
--config /workspace/gpunet/configs/batch1/GV100/0.85ms.json \
--checkpoint ${CHECKPOINT_DIR}/0.85ms.pth.tar \
--precision ${EXPORT_PRECISION} \
\
--dataloader triton/dataloader.py \
--val-path ${DATASETS_DIR}/ \
--is-prunet False \
--batch-size 1
""",
)
)
pipeline.model_conversion(
commands=(
r"""
if [[ "${EXPORT_FORMAT}" == "torchscript" ]]; then
export FORMAT_SUFFIX="pt"
else
export FORMAT_SUFFIX="${EXPORT_FORMAT}"
fi
model-navigator convert \
--model-name ${MODEL_NAME} \
--model-path ${SHARED_DIR}/exported_model.${FORMAT_SUFFIX} \
--output-path ${SHARED_DIR}/converted_model \
--target-formats ${FORMAT} \
--target-precisions ${PRECISION} \
--launch-mode local \
--override-workspace \
--verbose \
\
--onnx-opsets 13 \
--max-batch-size ${MAX_BATCH_SIZE} \
--container-version 21.12 \
--max-workspace-size 10000000000 \
--atol OUTPUT__0=100 \
--rtol OUTPUT__0=100
""",
)
)
pipeline.model_deploy(
commands=(
r"""
model-navigator triton-config-model \
--model-repository ${MODEL_REPOSITORY_PATH} \
--model-name ${MODEL_NAME} \
--model-version 1 \
--model-path ${SHARED_DIR}/converted_model \
--model-format ${FORMAT} \
--model-control-mode explicit \
--load-model \
--load-model-timeout-s 100 \
--verbose \
\
--backend-accelerator ${BACKEND_ACCELERATOR} \
--tensorrt-precision ${PRECISION} \
--tensorrt-capture-cuda-graph \
--tensorrt-max-workspace-size 10000000000 \
--max-batch-size ${MAX_BATCH_SIZE} \
--batching ${MODEL_BATCHING} \
--preferred-batch-sizes ${MAX_BATCH_SIZE} \
--engine-count-per-device gpu=${NUMBER_OF_MODEL_INSTANCES}
""",
)
)
pipeline.triton_performance_offline_tests(
commands=(
r"""
python triton/run_performance_on_triton.py \
--model-repository ${MODEL_REPOSITORY_PATH} \
--model-name ${MODEL_NAME} \
--input-data random \
--batch-sizes 1 2 4 8 16 32 64 \
--concurrency 1 \
--evaluation-mode offline \
--measurement-request-count 10 \
--warmup \
--performance-tool perf_analyzer \
--result-path ${SHARED_DIR}/triton_performance_offline.csv
""",
),
result_path="${SHARED_DIR}/triton_performance_offline.csv",
)
pipeline.triton_performance_online_tests(
commands=(
r"""
python triton/run_performance_on_triton.py \
--model-repository ${MODEL_REPOSITORY_PATH} \
--model-name ${MODEL_NAME} \
--input-data random \
--batch-sizes 1 \
--concurrency 8 16 24 32 40 48 56 64 72 80 88 96 104 112 120 128 136 144 152 160 168 176 184 192 200 208 216 224 232 240 248 256 \
--evaluation-mode online \
--measurement-request-count 500 \
--warmup \
--performance-tool perf_analyzer \
--result-path ${SHARED_DIR}/triton_performance_online.csv
""",
),
result_path="${SHARED_DIR}/triton_performance_online.csv",
) |
TensorFlow/Detection/SSD/models/research/object_detection/core | core | model | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Abstract detection model.
This file defines a generic base class for detection models. Programs that are
designed to work with arbitrary detection models should only depend on this
class. We intend for the functions in this class to follow tensor-in/tensor-out
design, thus all functions have tensors or lists/dictionaries holding tensors as
inputs and outputs.
Abstractly, detection models predict output tensors given input images
which can be passed to a loss function at training time or passed to a
postprocessing function at eval time. The computation graphs at a high level
consequently look as follows:
Training time:
inputs (images tensor) -> preprocess -> predict -> loss -> outputs (loss tensor)
Evaluation time:
inputs (images tensor) -> preprocess -> predict -> postprocess
-> outputs (boxes tensor, scores tensor, classes tensor, num_detections tensor)
DetectionModels must thus implement four functions (1) preprocess, (2) predict,
(3) postprocess and (4) loss. DetectionModels should make no assumptions about
the input size or aspect ratio --- they are responsible for doing any
resize/reshaping necessary (see docstring for the preprocess function).
Output classes are always integers in the range [0, num_classes). Any mapping
of these integers to semantic labels is to be handled outside of this class.
Images are resized in the `preprocess` method. All of `preprocess`, `predict`,
and `postprocess` should be reentrant.
The `preprocess` method runs `image_resizer_fn` that returns resized_images and
`true_image_shapes`. Since `image_resizer_fn` can pad the images with zeros,
true_image_shapes indicate the slices that contain the image without padding.
This is useful for padding images to be a fixed size for batching.
The `postprocess` method uses the true image shapes to clip predictions that lie
outside of images.
By default, DetectionModels produce bounding box detections; However, we support
a handful of auxiliary annotations associated with each bounding box, namely,
instance masks and keypoints.
"""
from abc import ABCMeta
from abc import abstractmethod
from object_detection.core import standard_fields as fields
class DetectionModel(object):
"""Abstract base class for detection models."""
__metaclass__ = ABCMeta
def __init__(self, num_classes):
"""Constructor.
Args:
num_classes: number of classes. Note that num_classes *does not* include
background categories that might be implicitly predicted in various
implementations.
"""
self._num_classes = num_classes
self._groundtruth_lists = {}
@property
def num_classes(self):
return self._num_classes
def groundtruth_lists(self, field):
"""Access list of groundtruth tensors.
Args:
field: a string key, options are
fields.BoxListFields.{boxes,classes,masks,keypoints} or
fields.InputDataFields.is_annotated.
Returns:
a list of tensors holding groundtruth information (see also
provide_groundtruth function below), with one entry for each image in the
batch.
Raises:
RuntimeError: if the field has not been provided via provide_groundtruth.
"""
if field not in self._groundtruth_lists:
raise RuntimeError('Groundtruth tensor {} has not been provided'.format(
field))
return self._groundtruth_lists[field]
def groundtruth_has_field(self, field):
"""Determines whether the groundtruth includes the given field.
Args:
field: a string key, options are
fields.BoxListFields.{boxes,classes,masks,keypoints} or
fields.InputDataFields.is_annotated.
Returns:
True if the groundtruth includes the given field, False otherwise.
"""
return field in self._groundtruth_lists
@abstractmethod
def preprocess(self, inputs):
"""Input preprocessing.
To be overridden by implementations.
This function is responsible for any scaling/shifting of input values that
is necessary prior to running the detector on an input image.
It is also responsible for any resizing, padding that might be necessary
as images are assumed to arrive in arbitrary sizes. While this function
could conceivably be part of the predict method (below), it is often
convenient to keep these separate --- for example, we may want to preprocess
on one device, place onto a queue, and let another device (e.g., the GPU)
handle prediction.
A few important notes about the preprocess function:
+ We assume that this operation does not have any trainable variables nor
does it affect the groundtruth annotations in any way (thus data
augmentation operations such as random cropping should be performed
externally).
+ There is no assumption that the batchsize in this function is the same as
the batch size in the predict function. In fact, we recommend calling the
preprocess function prior to calling any batching operations (which should
happen outside of the model) and thus assuming that batch sizes are equal
to 1 in the preprocess function.
+ There is also no explicit assumption that the output resolutions
must be fixed across inputs --- this is to support "fully convolutional"
settings in which input images can have different shapes/resolutions.
Args:
inputs: a [batch, height_in, width_in, channels] float32 tensor
representing a batch of images with values between 0 and 255.0.
Returns:
preprocessed_inputs: a [batch, height_out, width_out, channels] float32
tensor representing a batch of images.
true_image_shapes: int32 tensor of shape [batch, 3] where each row is
of the form [height, width, channels] indicating the shapes
of true images in the resized images, as resized images can be padded
with zeros.
"""
pass
@abstractmethod
def predict(self, preprocessed_inputs, true_image_shapes):
"""Predict prediction tensors from inputs tensor.
Outputs of this function can be passed to loss or postprocess functions.
Args:
preprocessed_inputs: a [batch, height, width, channels] float32 tensor
representing a batch of images.
true_image_shapes: int32 tensor of shape [batch, 3] where each row is
of the form [height, width, channels] indicating the shapes
of true images in the resized images, as resized images can be padded
with zeros.
Returns:
prediction_dict: a dictionary holding prediction tensors to be
passed to the Loss or Postprocess functions.
"""
pass
@abstractmethod
def postprocess(self, prediction_dict, true_image_shapes, **params):
"""Convert predicted output tensors to final detections.
Outputs adhere to the following conventions:
* Classes are integers in [0, num_classes); background classes are removed
and the first non-background class is mapped to 0. If the model produces
class-agnostic detections, then no output is produced for classes.
* Boxes are to be interpreted as being in [y_min, x_min, y_max, x_max]
format and normalized relative to the image window.
* `num_detections` is provided for settings where detections are padded to a
fixed number of boxes.
* We do not specifically assume any kind of probabilistic interpretation
of the scores --- the only important thing is their relative ordering.
Thus implementations of the postprocess function are free to output
logits, probabilities, calibrated probabilities, or anything else.
Args:
prediction_dict: a dictionary holding prediction tensors.
true_image_shapes: int32 tensor of shape [batch, 3] where each row is
of the form [height, width, channels] indicating the shapes
of true images in the resized images, as resized images can be padded
with zeros.
**params: Additional keyword arguments for specific implementations of
DetectionModel.
Returns:
detections: a dictionary containing the following fields
detection_boxes: [batch, max_detections, 4]
detection_scores: [batch, max_detections]
detection_classes: [batch, max_detections]
(If a model is producing class-agnostic detections, this field may be
missing)
instance_masks: [batch, max_detections, image_height, image_width]
(optional)
keypoints: [batch, max_detections, num_keypoints, 2] (optional)
num_detections: [batch]
"""
pass
@abstractmethod
def loss(self, prediction_dict, true_image_shapes):
"""Compute scalar loss tensors with respect to provided groundtruth.
Calling this function requires that groundtruth tensors have been
provided via the provide_groundtruth function.
Args:
prediction_dict: a dictionary holding predicted tensors
true_image_shapes: int32 tensor of shape [batch, 3] where each row is
of the form [height, width, channels] indicating the shapes
of true images in the resized images, as resized images can be padded
with zeros.
Returns:
a dictionary mapping strings (loss names) to scalar tensors representing
loss values.
"""
pass
def provide_groundtruth(self,
groundtruth_boxes_list,
groundtruth_classes_list,
groundtruth_masks_list=None,
groundtruth_keypoints_list=None,
groundtruth_weights_list=None,
groundtruth_confidences_list=None,
groundtruth_is_crowd_list=None,
is_annotated_list=None):
"""Provide groundtruth tensors.
Args:
groundtruth_boxes_list: a list of 2-D tf.float32 tensors of shape
[num_boxes, 4] containing coordinates of the groundtruth boxes.
Groundtruth boxes are provided in [y_min, x_min, y_max, x_max]
format and assumed to be normalized and clipped
relative to the image window with y_min <= y_max and x_min <= x_max.
groundtruth_classes_list: a list of 2-D tf.float32 one-hot (or k-hot)
tensors of shape [num_boxes, num_classes] containing the class targets
with the 0th index assumed to map to the first non-background class.
groundtruth_masks_list: a list of 3-D tf.float32 tensors of
shape [num_boxes, height_in, width_in] containing instance
masks with values in {0, 1}. If None, no masks are provided.
Mask resolution `height_in`x`width_in` must agree with the resolution
of the input image tensor provided to the `preprocess` function.
groundtruth_keypoints_list: a list of 3-D tf.float32 tensors of
shape [num_boxes, num_keypoints, 2] containing keypoints.
Keypoints are assumed to be provided in normalized coordinates and
missing keypoints should be encoded as NaN.
groundtruth_weights_list: A list of 1-D tf.float32 tensors of shape
[num_boxes] containing weights for groundtruth boxes.
groundtruth_confidences_list: A list of 2-D tf.float32 tensors of shape
[num_boxes, num_classes] containing class confidences for groundtruth
boxes.
groundtruth_is_crowd_list: A list of 1-D tf.bool tensors of shape
[num_boxes] containing is_crowd annotations
is_annotated_list: A list of scalar tf.bool tensors indicating whether
images have been labeled or not.
"""
self._groundtruth_lists[fields.BoxListFields.boxes] = groundtruth_boxes_list
self._groundtruth_lists[
fields.BoxListFields.classes] = groundtruth_classes_list
if groundtruth_weights_list:
self._groundtruth_lists[fields.BoxListFields.
weights] = groundtruth_weights_list
if groundtruth_confidences_list:
self._groundtruth_lists[fields.BoxListFields.
confidences] = groundtruth_confidences_list
if groundtruth_masks_list:
self._groundtruth_lists[
fields.BoxListFields.masks] = groundtruth_masks_list
if groundtruth_keypoints_list:
self._groundtruth_lists[
fields.BoxListFields.keypoints] = groundtruth_keypoints_list
if groundtruth_is_crowd_list:
self._groundtruth_lists[
fields.BoxListFields.is_crowd] = groundtruth_is_crowd_list
if is_annotated_list:
self._groundtruth_lists[
fields.InputDataFields.is_annotated] = is_annotated_list
@abstractmethod
def regularization_losses(self):
"""Returns a list of regularization losses for this model.
Returns a list of regularization losses for this model that the estimator
needs to use during training/optimization.
Returns:
A list of regularization loss tensors.
"""
pass
@abstractmethod
def restore_map(self, fine_tune_checkpoint_type='detection'):
"""Returns a map of variables to load from a foreign checkpoint.
Returns a map of variable names to load from a checkpoint to variables in
the model graph. This enables the model to initialize based on weights from
another task. For example, the feature extractor variables from a
classification model can be used to bootstrap training of an object
detector. When loading from an object detection model, the checkpoint model
should have the same parameters as this detection model with exception of
the num_classes parameter.
Args:
fine_tune_checkpoint_type: whether to restore from a full detection
checkpoint (with compatible variable names) or to restore from a
classification checkpoint for initialization prior to training.
Valid values: `detection`, `classification`. Default 'detection'.
Returns:
A dict mapping variable names (to load from a checkpoint) to variables in
the model graph.
"""
pass
@abstractmethod
def updates(self):
"""Returns a list of update operators for this model.
Returns a list of update operators for this model that must be executed at
each training step. The estimator's train op needs to have a control
dependency on these updates.
Returns:
A list of update operators.
"""
pass
|
PyTorch/SpeechSynthesis/Tacotron2/platform | platform | DGX1_waveglow_AMP_4NGPU_train | mkdir -p output
python -m multiproc train.py -m WaveGlow -o output/ --amp -lr 1e-4 --epochs 1001 -bs 10 --segment-length 8000 --weight-decay 0 --grad-clip-thresh 65504.0 --cudnn-benchmark --cudnn-enabled --log-file nvlog.json
|
TensorFlow2/Recommendation/DLRM_and_DCNv2/deployment | deployment | evaluate_accuracy | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# author: Tomasz Grel ([email protected])
import dataloading.feature_spec
import os
import numpy as np
import argparse
import dllogger
from dataloading.dataloader import create_input_pipelines
from nn.evaluator import Evaluator
from utils.logging import IterTimer, init_logging
import deployment.tf.triton_ensemble_wrapper
import deployment.hps.triton_ensemble_wrapper
def log_results(auc, test_loss, latencies, batch_size, compute_latencies=False, warmup_steps=10):
# don't benchmark the first few warmup steps
latencies = latencies[warmup_steps:]
result_data = {
'mean_inference_throughput': batch_size / np.mean(latencies),
'mean_inference_latency': np.mean(latencies)
}
if compute_latencies:
for percentile in [90, 95, 99]:
result_data[f'p{percentile}_inference_latency'] = np.percentile(latencies, percentile)
result_data['auc'] = auc
result_data['test_loss'] = test_loss
dllogger.log(data=result_data, step=tuple())
def parse_args():
parser = argparse.ArgumentParser(description='')
parser.add_argument('--dataset_path', type=str, required=True, help='')
parser.add_argument('--dataset_type', default='tf_raw', type=str, help='')
parser.add_argument('--feature_spec', default='feature_spec.yaml', type=str, help='')
parser.add_argument('--batch_size', type=int, default=32768, help='Batch size')
parser.add_argument('--auc_thresholds', type=int, default=8000, help='')
parser.add_argument('--max_steps', type=int, default=None, help='')
parser.add_argument('--print_freq', type=int, default=10, help='')
parser.add_argument('--log_path', type=str, default='dlrm_tf_log.json', help='triton_inference_log.json')
parser.add_argument('--verbose', action='store_true', default=False, help='')
parser.add_argument('--test_on_train', action='store_true', default=False,
help='Run validation on the training set.')
parser.add_argument('--fused_embedding', action='store_true', default=False,
help='Fuse the embedding table together for better GPU utilization.')
parser.add_argument("--model_name", type=str, help="The name of the model used for inference.", required=True)
parser.add_argument("--sparse_input_format", type=str, choices=["tf-savedmodel", "hps"],
required=True, default="tf-savedmodel")
args = parser.parse_args()
return args
def main():
args = parse_args()
init_logging(log_path=args.log_path, params_dict=args.__dict__)
fspec = dataloading.feature_spec.FeatureSpec.from_yaml(os.path.join(args.dataset_path, args.feature_spec))
num_tables = len(fspec.get_categorical_sizes())
table_ids = list(range(num_tables)) # possibly wrong ordering, to be tested
train_pipeline, validation_pipeline = create_input_pipelines(dataset_type=args.dataset_type,
dataset_path=args.dataset_path,
train_batch_size=args.batch_size,
test_batch_size=args.batch_size,
table_ids=table_ids,
feature_spec=args.feature_spec,
rank=0, world_size=1)
if args.test_on_train:
validation_pipeline = train_pipeline
if args.sparse_input_format == 'hps':
wrapper_cls = deployment.hps.triton_ensemble_wrapper.RecsysTritonEnsemble
else:
wrapper_cls = deployment.tf.triton_ensemble_wrapper.RecsysTritonEnsemble
model = wrapper_cls(model_name=args.model_name, num_tables=num_tables, verbose=args.verbose,
categorical_sizes=fspec.get_categorical_sizes(), fused_embedding=args.fused_embedding)
timer = IterTimer(train_batch_size=args.batch_size, test_batch_size=args.batch_size,
optimizer=None, print_freq=args.print_freq, enabled=True)
evaluator = Evaluator(model=model, timer=timer, auc_thresholds=args.auc_thresholds,
max_steps=args.max_steps, cast_dtype=None)
auc, test_loss, latencies = evaluator(validation_pipeline=validation_pipeline)
log_results(auc, test_loss, latencies, batch_size=args.batch_size)
print('DONE')
if __name__ == '__main__':
main() |
TensorFlow/Recommendation/WideAndDeep/scripts | scripts | DGX1_benchmark_training_fp32_8gpu | #!/bin/bash
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -e
mpiexec --allow-run-as-root --bind-to socket -np 8 \
python -m trainer.task \
--hvd \
--benchmark_warmup_steps 500 \
--benchmark_steps 1000 \
--gpu \
--benchmark
|
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/cli/commands | commands | __init__ | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
TensorFlow/Classification/ConvNets/triton | triton | calculate_metrics | #!/usr/bin/env python3
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""
Using `calculate_metrics.py` script, you can obtain model accuracy/error metrics using defined `MetricsCalculator` class.
Data provided to `MetricsCalculator` are obtained from dump files
stored in directory pointed by `--dump-dir` argument.
Above files are prepared by `run_inference_on_fw.py` and `run_inference_on_triton.py` scripts.
Output data is stored in csv file pointed by `--csv` argument.
Example call:
```shell script
python ./triton/calculate_metrics.py \
--dump-dir /results/dump_triton \
--csv /results/accuracy_results.csv \
--metrics metrics.py \
--metric-class-param1 value
```
"""
import argparse
import csv
import logging
import string
from pathlib import Path
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = Path(__file__).parent.name
from .deployment_toolkit.args import ArgParserGenerator
from .deployment_toolkit.core import BaseMetricsCalculator, load_from_file
from .deployment_toolkit.dump import JsonDumpReader
LOGGER = logging.getLogger("calculate_metrics")
TOTAL_COLUMN_NAME = "_total_"
def main():
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description="Run models with given dataloader", allow_abbrev=False)
parser.add_argument("--metrics", help="Path to python module containing metrics calculator", required=True)
parser.add_argument("--csv", help="Path to csv file", required=True)
parser.add_argument("--dump-dir", help="Path to directory with dumped outputs (and labels)", required=True)
args, *_ = parser.parse_known_args()
MetricsCalculator = load_from_file(args.metrics, "metrics", "MetricsCalculator")
ArgParserGenerator(MetricsCalculator).update_argparser(parser)
args = parser.parse_args()
LOGGER.info("args:")
for key, value in vars(args).items():
LOGGER.info(f" {key} = {value}")
MetricsCalculator = load_from_file(args.metrics, "metrics", "MetricsCalculator")
metrics_calculator: BaseMetricsCalculator = ArgParserGenerator(MetricsCalculator).from_args(args)
reader = JsonDumpReader(args.dump_dir)
for ids, x, y_true, y_pred in reader.iterate_over(["ids", "inputs", "labels", "outputs"]):
ids = list(ids["ids"]) if ids is not None else None
metrics_calculator.update(ids=ids, x=x, y_pred=y_pred, y_real=y_true)
metrics = metrics_calculator.metrics
metric_names_with_space = [name for name in metrics if any([c in string.whitespace for c in name])]
if metric_names_with_space:
raise ValueError(f"Metric names shall have no spaces; Incorrect names: {', '.join(metric_names_with_space)}")
csv_path = Path(args.csv)
csv_path.parent.mkdir(parents=True, exist_ok=True)
with csv_path.open("w") as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=list(metrics.keys()))
writer.writeheader()
writer.writerow(metrics)
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.