relative_path
stringclasses 812
values | section
stringclasses 339
values | filename
stringlengths 2
61
| text
stringlengths 6
1.76M
|
---|---|---|---|
TensorFlow/Detection/SSD/examples | examples | SSD320_FP16_4GPU | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CKPT_DIR=${1:-"/results/SSD320_FP16_4GPU"}
PIPELINE_CONFIG_PATH=${2:-"/workdir/models/research/configs"}"/ssd320_full_4gpus.config"
GPUS=4
TENSOR_OPS=0
export TF_ENABLE_CUBLAS_TENSOR_OP_MATH_FP32=${TENSOR_OPS}
export TF_ENABLE_CUDNN_TENSOR_OP_MATH_FP32=${TENSOR_OPS}
export TF_ENABLE_CUDNN_RNN_TENSOR_OP_MATH_FP32=${TENSOR_OPS}
time mpirun --allow-run-as-root \
-np $GPUS \
-H localhost:$GPUS \
-bind-to none \
-map-by slot \
-x NCCL_DEBUG=INFO \
-x LD_LIBRARY_PATH \
-x PATH \
-mca pml ob1 \
-mca btl ^openib \
python -u ./object_detection/model_main.py \
--pipeline_config_path=${PIPELINE_CONFIG_PATH} \
--model_dir=${CKPT_DIR} \
--alsologtostder \
--amp \
"${@:3}"
|
PyTorch/SpeechSynthesis/HiFiGAN/common | common | ema_utils | import amp_C
import torch
def apply_ema_decay(model, ema_model, decay):
if not decay:
return
st = model.state_dict()
add_module = hasattr(model, 'module') and not hasattr(ema_model, 'module')
for k, v in ema_model.state_dict().items():
if add_module and not k.startswith('module.'):
k = 'module.' + k
v.copy_(decay * v + (1 - decay) * st[k])
def init_multi_tensor_ema(model, ema_model):
model_weights = list(model.state_dict().values())
ema_model_weights = list(ema_model.state_dict().values())
ema_overflow_buf = torch.cuda.IntTensor([0])
return model_weights, ema_model_weights, ema_overflow_buf
def apply_multi_tensor_ema(decay, model_weights, ema_weights, overflow_buf):
amp_C.multi_tensor_axpby(
65536, overflow_buf, [ema_weights, model_weights, ema_weights],
decay, 1-decay, -1)
|
PyTorch/Recommendation/DLRM/dlrm/cuda_src | cuda_src | gather_gpu_fused_pytorch_impl | #include <torch/extension.h>
#include <torch/types.h>
#include <stdexcept>
#include "gather_gpu_fused.cu"
// plugin functions instantiated to do only mixed-precision execution
torch::Tensor gatherGPUFusedFwdTorch(torch::Tensor embedding, torch::Tensor indices, torch::Tensor offsets,
bool amp_train) {
auto size = indices.sizes();
auto batch_size = size[0];
auto num_features = size[1];
size = embedding.sizes();
auto embedding_vector_dim = size[1];
auto embedding_table_rows = size[0]; // not really need this
// if (embedding.scalar_type() != torch::ScalarType::Float) {
// throw std::invalid_argument("Invalid input type.");
// }
int64_t outputShape[3] = {batch_size, num_features, embedding_vector_dim};
torch::Tensor output;
if (embedding.scalar_type() == torch::ScalarType::Float) {
if (amp_train) {
output = torch::empty(c10::IntArrayRef(outputShape), embedding.options().dtype(torch::ScalarType::Half));
gather_gpu_fused_fwd(embedding.contiguous().data_ptr<float>(),
offsets.contiguous().data_ptr<int64_t>(),
indices.contiguous().data_ptr<int64_t>(),
output.contiguous().data_ptr<at::Half>(),
batch_size);
}
else {
output = torch::empty(c10::IntArrayRef(outputShape), embedding.options().dtype(torch::ScalarType::Float));
gather_gpu_fused_fwd(embedding.contiguous().data_ptr<float>(),
offsets.contiguous().data_ptr<int64_t>(),
indices.contiguous().data_ptr<int64_t>(),
output.contiguous().data_ptr<float>(),
batch_size);
}
}
else {
output = torch::empty(c10::IntArrayRef(outputShape), embedding.options().dtype(torch::ScalarType::Half));
gather_gpu_fused_fwd(embedding.contiguous().data_ptr<at::Half>(),
offsets.contiguous().data_ptr<int64_t>(),
indices.contiguous().data_ptr<int64_t>(),
output.contiguous().data_ptr<at::Half>(),
batch_size);
}
return output;
}
torch::Tensor gatherGPUFusedBwdTorch(torch::Tensor embedding, torch::Tensor indices,
torch::Tensor offsets, torch::Tensor upstreamGrad) {
if (embedding.scalar_type() != torch::ScalarType::Float) {
throw std::invalid_argument("Invalid input type.");
}
auto size = upstreamGrad.sizes();
auto batch_size = size[0];
auto num_features = size[1];
auto embedding_vector_dim = size[2];
size = indices.sizes();
auto sparse_tensor_indices_dim = size[0] * size[1];
int64_t indices_outputShape[2] = {1, sparse_tensor_indices_dim};
auto sparse_tensor_values_0 = batch_size * num_features;
auto sparse_tensor_values_1 = embedding_vector_dim;
int64_t values_outputShape[2] = {sparse_tensor_values_0, sparse_tensor_values_1};
auto sparse_grad_indices_tensor = torch::empty(c10::IntArrayRef(indices_outputShape), indices.options());
auto sparse_grad_values_tensor = torch::empty(c10::IntArrayRef(values_outputShape),
upstreamGrad.options().dtype(torch::ScalarType::Float));
// this is the shape of output gradient vector
int64_t sparse_tensor_shape[2] = {embedding.sizes()[0], embedding_vector_dim};
if (upstreamGrad.scalar_type() == torch::ScalarType::Half) {
gather_gpu_fused_bwd(upstreamGrad.contiguous().data_ptr<at::Half>(),
indices.contiguous().data_ptr<int64_t>(),
offsets.contiguous().data_ptr<int64_t>(),
sparse_grad_values_tensor.contiguous().data_ptr<float>(),
sparse_grad_indices_tensor.contiguous().data_ptr<int64_t>(),
(int)batch_size, (int)num_features, (int)embedding_vector_dim);
}
else {
gather_gpu_fused_bwd(upstreamGrad.contiguous().data_ptr<float>(),
indices.contiguous().data_ptr<int64_t>(),
offsets.contiguous().data_ptr<int64_t>(),
sparse_grad_values_tensor.contiguous().data_ptr<float>(),
sparse_grad_indices_tensor.contiguous().data_ptr<int64_t>(),
(int)batch_size, (int)num_features, (int)embedding_vector_dim);
}
return torch::_sparse_coo_tensor_with_dims_and_tensors(1, 1, c10::IntArrayRef(sparse_tensor_shape),
sparse_grad_indices_tensor, sparse_grad_values_tensor,
sparse_grad_values_tensor.options().layout(c10::Layout::Sparse));
}
|
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/plugins/taco2AttentionPlugin | taco2AttentionPlugin | taco2AttentionLayerKernel | /*
* Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the NVIDIA CORPORATION nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "taco2AttentionLayerKernel.h"
#include "taco2Utils.h"
using namespace tts;
namespace nvinfer1
{
namespace plugin
{
/******************************************************************************
* CONSTANTS ******************************************************************
*****************************************************************************/
namespace
{
constexpr const int ENERGY_BLOCK_SIZE = 128;
constexpr const int CONV_BLOCK_SIZE = 128;
constexpr const int QUERY_NUM_COLS = 1024;
constexpr const int QUERY_COL_SIZE = 128;
constexpr const int WARP_SIZE = 32;
static_assert(QUERY_NUM_COLS % QUERY_COL_SIZE == 0, "QUERY_NUM_COLS must be a multiple of QUERY_COL_SIZE");
} // namespace
const float Taco2AttentionLayerKernel::ONE = 1.0f;
const float Taco2AttentionLayerKernel::ZERO = 0.0f;
/******************************************************************************
* CUDA KERNELS ***************************************************************
*****************************************************************************/
template <typename T, int NUM_THREADS>
__device__ inline T warpSum(T const initVal)
{
constexpr const uint32_t mask = 0xffffffff >> (WARP_SIZE - NUM_THREADS);
T val = initVal;
#pragma unroll
for (int d = NUM_THREADS / 2; d > 0; d /= 2)
{
val += __shfl_down_sync(mask, val, d, NUM_THREADS);
}
return val;
}
template <typename T, int BLOCK_SIZE>
__device__ T cooperativeSum(T const initVal, T* const buffer)
{
// first all warps reduce to single value
assert(BLOCK_SIZE % WARP_SIZE == 0);
assert(BLOCK_SIZE <= WARP_SIZE * WARP_SIZE);
T val = warpSum<T, WARP_SIZE>(initVal);
if (BLOCK_SIZE > WARP_SIZE)
{
if (threadIdx.x % WARP_SIZE == 0)
{
buffer[threadIdx.x / WARP_SIZE] = val;
}
__syncthreads();
if (threadIdx.x < (BLOCK_SIZE / WARP_SIZE))
{
val = warpSum<T, BLOCK_SIZE / WARP_SIZE>(buffer[threadIdx.x]);
}
}
return val;
}
__global__ void attentionQueryGemvKernel(const float* const weights, const float* const input, float* const output,
const int inputLength, const int outputLength)
{
__shared__ float shared[QUERY_COL_SIZE];
assert(gridDim.x == outputLength);
assert(inputLength == QUERY_NUM_COLS);
// perform mat vec
float v = 0.0f;
for (int col = threadIdx.x; col < QUERY_NUM_COLS; col += QUERY_COL_SIZE)
{
// load chunk
v += input[col] * weights[blockIdx.x * QUERY_NUM_COLS + col];
}
v = cooperativeSum<float, QUERY_COL_SIZE>(v, shared);
// add bias and write
if (threadIdx.x == 0)
{
output[blockIdx.x] = v;
}
}
__global__ void attentionEnergyKernel(const float* const query, const float* const processedMemory,
const float* const location, const float* const weights, const int inputLength, float* const blockSums)
{
// first every thread must load their 'query' cell
const float q = query[threadIdx.x];
// should be 32x128 = 4k
__shared__ float summation[ENERGY_BLOCK_SIZE];
// iterate over rows to create sums and perform tanh
const int gIdx = blockIdx.x * ENERGY_BLOCK_SIZE + threadIdx.x;
const float v = q + processedMemory[gIdx] + location[gIdx];
float val = tanh(v) * weights[threadIdx.x];
val = cooperativeSum<float, ENERGY_BLOCK_SIZE>(val, summation);
// perform simplistic reduction
if (threadIdx.x == 0)
{
// write summation back to shared memory
blockSums[blockIdx.x] = exp(val);
}
}
__global__ void attentionNormalizeAndSumKernel(
const float* const elemAccumsIn, float* const elems, const int numElems, const float* const blockSums)
{
__shared__ float sums[ENERGY_BLOCK_SIZE];
__shared__ float invSum;
// each block sums up the blockSums on its own
float v = 0;
for (int i = threadIdx.x; i < gridDim.x; i += blockDim.x)
{
v += blockSums[i];
}
v = cooperativeSum<float, ENERGY_BLOCK_SIZE>(v, sums);
if (threadIdx.x == 0)
{
invSum = 1.0f / v;
}
__syncthreads();
// normalize and sum
float* const elemAccumsOut = elems + numElems;
for (int i = threadIdx.x + (blockIdx.x * blockDim.x); i < numElems; i += gridDim.x * blockDim.x)
{
const float val = blockSums[i] * invSum;
elems[i] = val;
elemAccumsOut[i] = val + elemAccumsIn[i];
}
}
__global__ void attentionConvolutionKernel(const float* const convWeights, const float* const attWeights,
float* const output, const int inputLength, const int kernelSize)
{
__shared__ float kernels[32 * 2];
__shared__ float input[(CONV_BLOCK_SIZE + 32) * 2];
__shared__ float sum[CONV_BLOCK_SIZE * 2];
const int halfKernel = (kernelSize - 1) / 2;
const int inputOffset = 32 - halfKernel;
// all threads work to populate the shared memory kernels
if (threadIdx.x < kernelSize)
{
kernels[threadIdx.x + threadIdx.y * 32]
= convWeights[blockIdx.x * (kernelSize * 2) + (threadIdx.x + threadIdx.y * kernelSize)];
}
// set initial input zero for second half
if (threadIdx.x < 32)
{
if (threadIdx.x < halfKernel || threadIdx.x - halfKernel >= inputLength)
{
input[CONV_BLOCK_SIZE + threadIdx.x + threadIdx.y * (CONV_BLOCK_SIZE + 32)] = 0;
}
else
{
input[CONV_BLOCK_SIZE + threadIdx.x + threadIdx.y * (CONV_BLOCK_SIZE + 32)]
= attWeights[threadIdx.x - halfKernel + threadIdx.y * inputLength];
}
}
__syncthreads();
for (int i = 0; i < inputLength; i += CONV_BLOCK_SIZE)
{
// shift second half into first half
if (threadIdx.x < 32)
{
input[threadIdx.x + threadIdx.y * (CONV_BLOCK_SIZE + 32)]
= input[CONV_BLOCK_SIZE + threadIdx.x + threadIdx.y * (CONV_BLOCK_SIZE + 32)];
}
__syncthreads();
// copy in second half
float v = 0;
if (i + threadIdx.x + inputOffset < inputLength)
{
v = attWeights[i + threadIdx.x + inputOffset + threadIdx.y * inputLength];
}
input[32 + threadIdx.x + threadIdx.y * (CONV_BLOCK_SIZE + 32)] = v;
__syncthreads();
// multiply with kernel
float a = 0.0f;
for (int j = 0; j < kernelSize; ++j)
{
const int k = threadIdx.x + j + threadIdx.y * (CONV_BLOCK_SIZE + 32);
a += input[k] * kernels[j + threadIdx.y * 32];
}
sum[threadIdx.x + threadIdx.y * CONV_BLOCK_SIZE] = a;
__syncthreads();
// write to global memory
if (threadIdx.y == 0 && threadIdx.x + i < inputLength)
{
output[(blockIdx.x * inputLength) + i + threadIdx.x]
= sum[threadIdx.x] + sum[threadIdx.x + CONV_BLOCK_SIZE];
}
}
}
/******************************************************************************
* CONSTRUCTORS / DESTRUCTOR **************************************************
*****************************************************************************/
Taco2AttentionLayerKernel::Taco2AttentionLayerKernel(
const std::vector<float>& queryWeightsHost,
const std::vector<float>& convWeightsHost,
const std::vector<float>& locationWeightsHost,
const std::vector<float>& energyWeightsHost,
const int encLength,
const int numQueryDimension,
const int numFilters,
const int convKernelSize,
const int numAttentionDimension) :
mNumEncodingDimension(encLength),
mNumQueryDimension(numQueryDimension),
mNumFilters(numFilters),
mConvKernelSize(convKernelSize),
mNumAttentionDimension(numAttentionDimension),
mQueryWeightsDevice(),
mConvWeightsDevice(),
mLocationWeightsDevice(),
mEnergyWeightsDevice(),
mCublasHandle{}
{
const size_t numExpectedQueryWeights = mNumAttentionDimension * mNumQueryDimension;
const size_t numExpectedConvWeights = mNumFilters * mConvKernelSize * 2;
const size_t numExpectedLocationWeights = mNumAttentionDimension * mNumFilters;
const size_t numExpectedEnergyWeights = mNumAttentionDimension;
if (queryWeightsHost.size() != numExpectedQueryWeights)
{
throw std::runtime_error("Expected " + std::to_string(numExpectedQueryWeights) + " query weights but got "
+ std::to_string(queryWeightsHost.size()) + " instead.");
}
else if (convWeightsHost.size() != numExpectedConvWeights)
{
throw std::runtime_error("Expected " + std::to_string(numExpectedConvWeights) + " convolution weights but got "
+ std::to_string(convWeightsHost.size()) + " instead.");
}
else if (locationWeightsHost.size() != numExpectedLocationWeights)
{
throw std::runtime_error("Expected " + std::to_string(numExpectedLocationWeights) + " location weights but got "
+ std::to_string(locationWeightsHost.size()) + " instead.");
}
else if (energyWeightsHost.size() != numExpectedEnergyWeights)
{
throw std::runtime_error("Expected " + std::to_string(numExpectedEnergyWeights) + " energy weights but got "
+ std::to_string(energyWeightsHost.size()) + " instead.");
}
// copy up weights to GPU
// keep in row major [128x1024]
mQueryWeightsDevice = CudaMemory<float>(queryWeightsHost);
// convolution has [32x2x31] weights (filters x kernel size).
mConvWeightsDevice = CudaMemory<float>(convWeightsHost);
// transpose from column major [32x128] to column major [128x32]
std::vector<float> transLocationWeights(locationWeightsHost.size());
for (int j = 0; j < mNumAttentionDimension; ++j)
{
for (int i = 0; i < mNumFilters; ++i)
{
transLocationWeights[i * mNumAttentionDimension + j] = locationWeightsHost[j * mNumFilters + i];
}
}
mLocationWeightsDevice = CudaMemory<float>(transLocationWeights);
// energy FC is [1x128]
mEnergyWeightsDevice = CudaMemory<float>(energyWeightsHost);
// initialize cublas
if (cublasCreate(&mCublasHandle) != CUBLAS_STATUS_SUCCESS)
{
throw std::runtime_error("Failed to create cublas handle.");
}
}
Taco2AttentionLayerKernel::~Taco2AttentionLayerKernel()
{
cublasDestroy(mCublasHandle);
}
/******************************************************************************
* PUBLIC METHODS *************************************************************
*****************************************************************************/
void Taco2AttentionLayerKernel::execute(const float* const memoryDevice, const float* const processedMemoryDevice,
const float* const weightsDevice, const float* const attentionHiddenDevice, float* const outputContextDevice,
float* const outputWeightsDevice, const int inputLength, float* const workspace, cudaStream_t stream)
{
float* const queryOutput = workspace;
float* const convOutput = queryOutput + mNumAttentionDimension;
float* const elemSum = convOutput + (inputLength * mNumFilters);
float* const energyScratch = elemSum + (inputLength * mNumAttentionDimension);
cublasSetStream(mCublasHandle, stream);
// launch fully connected layer to parse LSTM hidden states -
// multiplying 128x1024 weights with 1024 inputs, to get 128 outputs
{
const dim3 grid(mNumAttentionDimension);
const dim3 block(QUERY_COL_SIZE);
attentionQueryGemvKernel<<<grid, block, 0, stream>>>(
mQueryWeightsDevice.data(),
attentionHiddenDevice,
queryOutput,
mNumQueryDimension,
mNumAttentionDimension);
}
// perform convolution
{
const dim3 grid(mNumFilters);
const dim3 block(CONV_BLOCK_SIZE, 2);
// only works for 2 channels
assert(mConvKernelSize <= CONV_BLOCK_SIZE);
attentionConvolutionKernel<<<grid, block, 0, stream>>>(
mConvWeightsDevice.data(),
weightsDevice,
convOutput,
inputLength,
mConvKernelSize);
}
// location linear layer - 128x128x32
cublasStatus_t err = cublasSgemm(
mCublasHandle,
CUBLAS_OP_N,
CUBLAS_OP_T,
mNumAttentionDimension,
inputLength,
mNumFilters,
&ONE,
mLocationWeightsDevice.data(),
mNumAttentionDimension,
convOutput,
inputLength,
&ZERO,
elemSum,
mNumAttentionDimension);
if (err != CUBLAS_STATUS_SUCCESS)
{
throw std::runtime_error("Location layer failed in cublas.");
}
// perform energy calculation
{
const int numBlocks = inputLength;
if (ENERGY_BLOCK_SIZE != mNumAttentionDimension)
{
throw std::runtime_error("mNumAttentionDimension must be " + std::to_string(ENERGY_BLOCK_SIZE));
}
const dim3 grid(numBlocks);
const dim3 block(ENERGY_BLOCK_SIZE);
attentionEnergyKernel<<<grid, block, 0, stream>>>(
queryOutput,
processedMemoryDevice,
elemSum,
mEnergyWeightsDevice.data(),
inputLength,
energyScratch);
attentionNormalizeAndSumKernel<<<grid, block, 0, stream>>>(
weightsDevice + inputLength, outputWeightsDevice, inputLength, energyScratch);
}
// finally perform mmLayer
err = cublasSgemv(mCublasHandle, CUBLAS_OP_N, mNumEncodingDimension, inputLength, &ONE, memoryDevice,
mNumEncodingDimension, outputWeightsDevice, 1, &ZERO, outputContextDevice, 1);
if (err != CUBLAS_STATUS_SUCCESS)
{
throw std::runtime_error("Matrix multiply layer failed in cublas.");
}
}
} // namespace plugin
} // namespace nvinfer1
|
PaddlePaddle/Classification/RN50v1.5 | RN50v1.5 | profile | # Copyright (c) 2022 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from contextlib import contextmanager
from utils.cuda_bind import cuda_profile_start, cuda_profile_stop
from utils.cuda_bind import cuda_nvtx_range_push, cuda_nvtx_range_pop
class Profiler:
def __init__(self):
super().__init__()
self._enable_profile = int(os.environ.get('ENABLE_PROFILE', 0))
self._start_step = int(os.environ.get('PROFILE_START_STEP', 0))
self._stop_step = int(os.environ.get('PROFILE_STOP_STEP', 0))
if self._enable_profile:
log_msg = f"Profiling start at {self._start_step}-th and stop at {self._stop_step}-th iteration"
logging.info(log_msg)
def profile_setup(self, step):
"""
Setup profiling related status.
Args:
step (int): the index of iteration.
Return:
stop (bool): a signal to indicate whether profiling should stop or not.
"""
if self._enable_profile and step == self._start_step:
cuda_profile_start()
logging.info("Profiling start at %d-th iteration",
self._start_step)
if self._enable_profile and step == self._stop_step:
cuda_profile_stop()
logging.info("Profiling stop at %d-th iteration", self._stop_step)
return True
return False
def profile_tag_push(self, step, msg):
if self._enable_profile and \
step >= self._start_step and \
step < self._stop_step:
tag_msg = f"Iter-{step}-{msg}"
cuda_nvtx_range_push(tag_msg)
def profile_tag_pop(self):
if self._enable_profile:
cuda_nvtx_range_pop()
@contextmanager
def profile_tag(self, step, msg):
self.profile_tag_push(step, msg)
yield
self.profile_tag_pop()
|
TensorFlow/Detection/SSD/models/research/object_detection/core | core | batcher_test | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.core.batcher."""
import numpy as np
import tensorflow as tf
from object_detection.core import batcher
slim = tf.contrib.slim
class BatcherTest(tf.test.TestCase):
def test_batch_and_unpad_2d_tensors_of_different_sizes_in_1st_dimension(self):
with self.test_session() as sess:
batch_size = 3
num_batches = 2
examples = tf.Variable(tf.constant(2, dtype=tf.int32))
counter = examples.count_up_to(num_batches * batch_size + 2)
boxes = tf.tile(
tf.reshape(tf.range(4), [1, 4]), tf.stack([counter, tf.constant(1)]))
batch_queue = batcher.BatchQueue(
tensor_dict={'boxes': boxes},
batch_size=batch_size,
batch_queue_capacity=100,
num_batch_queue_threads=1,
prefetch_queue_capacity=100)
batch = batch_queue.dequeue()
for tensor_dict in batch:
for tensor in tensor_dict.values():
self.assertAllEqual([None, 4], tensor.get_shape().as_list())
tf.initialize_all_variables().run()
with slim.queues.QueueRunners(sess):
i = 2
for _ in range(num_batches):
batch_np = sess.run(batch)
for tensor_dict in batch_np:
for tensor in tensor_dict.values():
self.assertAllEqual(tensor, np.tile(np.arange(4), (i, 1)))
i += 1
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batch)
def test_batch_and_unpad_2d_tensors_of_different_sizes_in_all_dimensions(
self):
with self.test_session() as sess:
batch_size = 3
num_batches = 2
examples = tf.Variable(tf.constant(2, dtype=tf.int32))
counter = examples.count_up_to(num_batches * batch_size + 2)
image = tf.reshape(
tf.range(counter * counter), tf.stack([counter, counter]))
batch_queue = batcher.BatchQueue(
tensor_dict={'image': image},
batch_size=batch_size,
batch_queue_capacity=100,
num_batch_queue_threads=1,
prefetch_queue_capacity=100)
batch = batch_queue.dequeue()
for tensor_dict in batch:
for tensor in tensor_dict.values():
self.assertAllEqual([None, None], tensor.get_shape().as_list())
tf.initialize_all_variables().run()
with slim.queues.QueueRunners(sess):
i = 2
for _ in range(num_batches):
batch_np = sess.run(batch)
for tensor_dict in batch_np:
for tensor in tensor_dict.values():
self.assertAllEqual(tensor, np.arange(i * i).reshape((i, i)))
i += 1
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batch)
def test_batch_and_unpad_2d_tensors_of_same_size_in_all_dimensions(self):
with self.test_session() as sess:
batch_size = 3
num_batches = 2
examples = tf.Variable(tf.constant(1, dtype=tf.int32))
counter = examples.count_up_to(num_batches * batch_size + 1)
image = tf.reshape(tf.range(1, 13), [4, 3]) * counter
batch_queue = batcher.BatchQueue(
tensor_dict={'image': image},
batch_size=batch_size,
batch_queue_capacity=100,
num_batch_queue_threads=1,
prefetch_queue_capacity=100)
batch = batch_queue.dequeue()
for tensor_dict in batch:
for tensor in tensor_dict.values():
self.assertAllEqual([4, 3], tensor.get_shape().as_list())
tf.initialize_all_variables().run()
with slim.queues.QueueRunners(sess):
i = 1
for _ in range(num_batches):
batch_np = sess.run(batch)
for tensor_dict in batch_np:
for tensor in tensor_dict.values():
self.assertAllEqual(tensor, np.arange(1, 13).reshape((4, 3)) * i)
i += 1
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batch)
def test_batcher_when_batch_size_is_one(self):
with self.test_session() as sess:
batch_size = 1
num_batches = 2
examples = tf.Variable(tf.constant(2, dtype=tf.int32))
counter = examples.count_up_to(num_batches * batch_size + 2)
image = tf.reshape(
tf.range(counter * counter), tf.stack([counter, counter]))
batch_queue = batcher.BatchQueue(
tensor_dict={'image': image},
batch_size=batch_size,
batch_queue_capacity=100,
num_batch_queue_threads=1,
prefetch_queue_capacity=100)
batch = batch_queue.dequeue()
for tensor_dict in batch:
for tensor in tensor_dict.values():
self.assertAllEqual([None, None], tensor.get_shape().as_list())
tf.initialize_all_variables().run()
with slim.queues.QueueRunners(sess):
i = 2
for _ in range(num_batches):
batch_np = sess.run(batch)
for tensor_dict in batch_np:
for tensor in tensor_dict.values():
self.assertAllEqual(tensor, np.arange(i * i).reshape((i, i)))
i += 1
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batch)
if __name__ == '__main__':
tf.test.main()
|
TensorFlow2/Classification/ConvNets/model/blocks | blocks | fused_mb_conv_block | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from typing import Any, Dict, Optional, Text, Tuple
from model.layers import get_activation
from model.blocks import conv2d_block
__all__ = ['fused_mb_conv_block']
def fused_mb_conv_block(inputs: tf.Tensor,
block: dict,
config: dict,
prefix: Text = None):
"""Mobile Inverted Residual Bottleneck.
Args:
inputs: the Keras input to the block
block: BlockConfig, arguments to create a Block
config: ModelConfig, a set of model parameters
prefix: prefix for naming all layers
Returns:
the output of the block
"""
# use_se = config.mparams.use_se
activation = get_activation(config.mparams.activation)
drop_connect_rate = config.mparams.drop_connect_rate
conv_dropout = config.mparams.conv_dropout
data_format = tf.keras.backend.image_data_format()
# use_depthwise = block['conv_type'] != 'no_depthwise'
prefix = prefix or ''
filters = block['input_filters'] * block['expand_ratio']
x = inputs
if block['expand_ratio'] != 1:
# Expansion phase
x = conv2d_block(x,
filters,
config,
kernel_size= block['kernel_size'],
strides= block['strides'],
activation=activation,
name=prefix + 'expand')
if conv_dropout and conv_dropout > 0 and block['expand_ratio'] > 1:
x = tf.keras.layers.Dropout(conv_dropout)(x) # training unset
# Squeeze and Excitation phase
if block['se_ratio'] is not None:
assert 0 < block['se_ratio'] <= 1
num_reduced_filters = max(1, int(
block['input_filters'] * block['se_ratio']
))
if data_format == 'channels_first':
se_shape = (filters, 1, 1)
else:
se_shape = (1, 1, filters)
se = tf.keras.layers.GlobalAveragePooling2D(name=prefix + 'se_squeeze',data_format=data_format)(x)
se = tf.keras.layers.Reshape(se_shape, name=prefix + 'se_reshape')(se)
se = conv2d_block(se,
num_reduced_filters,
config,
use_bias=True,
use_batch_norm=False,
activation=activation,
name=prefix + 'se_reduce')
se = conv2d_block(se,
filters,
config,
use_bias=True,
use_batch_norm=False,
activation='sigmoid',
name=prefix + 'se_expand')
x = tf.keras.layers.multiply([x, se], name=prefix + 'se_excite')
# Output phase
x = conv2d_block(x,
block['output_filters'],
config,
kernel_size=1 if block['expand_ratio'] != 1 else block['kernel_size'],
strides=1 if block['expand_ratio'] != 1 else block['strides'],
activation=None,
name=prefix + 'project')
# add act if no expansion. check interaction with identity act below.
if block['expand_ratio'] == 1:
x = tf.keras.layers.Activation(activation,
name=prefix + 'project_activation')(x)
# Add identity so that quantization-aware training can insert quantization
# ops correctly.
x = tf.keras.layers.Activation(get_activation('identity'),
name=prefix + 'id')(x)
if (block['id_skip']
and all(s == 1 for s in block['strides'])
and block['input_filters'] == block['output_filters']):
if drop_connect_rate and drop_connect_rate > 0:
# Apply dropconnect
# The only difference between dropout and dropconnect in TF is scaling by
# drop_connect_rate during training. See:
# https://github.com/keras-team/keras/pull/9898#issuecomment-380577612
x = tf.keras.layers.Dropout(drop_connect_rate,
noise_shape=(None, 1, 1, 1),
name=prefix + 'drop')(x)
x = tf.keras.layers.add([x, inputs], name=prefix + 'add')
return x |
PyTorch/Detection/Efficientdet/effdet/csrc/nms/cpu | cpu | nms_cpu | // Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "cpu/vision.h"
template <typename scalar_t>
at::Tensor nms_cpu_kernel(const at::Tensor& dets,
const at::Tensor& scores,
const float threshold) {
AT_ASSERTM(!dets.is_cuda(), "dets must be a CPU tensor");
AT_ASSERTM(!scores.is_cuda(), "scores must be a CPU tensor");
AT_ASSERTM(dets.scalar_type() == scores.scalar_type(), "dets should have the same type as scores");
if (dets.numel() == 0) {
return at::empty({0}, dets.options().dtype(at::kLong).device(at::kCPU));
}
auto x1_t = dets.select(1, 0).contiguous();
auto y1_t = dets.select(1, 1).contiguous();
auto x2_t = dets.select(1, 2).contiguous();
auto y2_t = dets.select(1, 3).contiguous();
at::Tensor areas_t = (x2_t - x1_t + 1) * (y2_t - y1_t + 1);
auto order_t = std::get<1>(scores.sort(0, /* descending=*/true));
auto ndets = dets.size(0);
at::Tensor suppressed_t = at::zeros({ndets}, dets.options().dtype(at::kByte).device(at::kCPU));
auto suppressed = suppressed_t.data_ptr<uint8_t>();
auto order = order_t.data_ptr<int64_t>();
auto x1 = x1_t.data_ptr<scalar_t>();
auto y1 = y1_t.data_ptr<scalar_t>();
auto x2 = x2_t.data_ptr<scalar_t>();
auto y2 = y2_t.data_ptr<scalar_t>();
auto areas = areas_t.data_ptr<scalar_t>();
for (int64_t _i = 0; _i < ndets; _i++) {
auto i = order[_i];
if (suppressed[i] == 1)
continue;
auto ix1 = x1[i];
auto iy1 = y1[i];
auto ix2 = x2[i];
auto iy2 = y2[i];
auto iarea = areas[i];
for (int64_t _j = _i + 1; _j < ndets; _j++) {
auto j = order[_j];
if (suppressed[j] == 1)
continue;
auto xx1 = std::max(ix1, x1[j]);
auto yy1 = std::max(iy1, y1[j]);
auto xx2 = std::min(ix2, x2[j]);
auto yy2 = std::min(iy2, y2[j]);
auto w = std::max(static_cast<scalar_t>(0), xx2 - xx1 + 1);
auto h = std::max(static_cast<scalar_t>(0), yy2 - yy1 + 1);
auto inter = w * h;
auto ovr = inter / (iarea + areas[j] - inter);
if (ovr >= threshold)
suppressed[j] = 1;
}
}
return at::nonzero(suppressed_t == 0).squeeze(1);
}
at::Tensor nms_cpu(const at::Tensor& dets,
const at::Tensor& scores,
const float threshold) {
at::Tensor result;
AT_DISPATCH_FLOATING_TYPES(dets.scalar_type(), "nms", [&] {
result = nms_cpu_kernel<scalar_t>(dets, scores, threshold);
});
return result;
}
|
TensorFlow2/LanguageModeling/BERT/official/utils/flags | flags | guidelines | # Using flags in official models
1. **All common flags must be incorporated in the models.**
Common flags (i.e. batch_size, model_dir, etc.) are provided by various flag definition functions,
and channeled through `official.utils.flags.core`. For instance to define common supervised
learning parameters one could use the following code:
```$xslt
from absl import app as absl_app
from absl import flags
from official.utils.flags import core as flags_core
def define_flags():
flags_core.define_base()
flags.adopt_key_flags(flags_core)
def main(_):
flags_obj = flags.FLAGS
print(flags_obj)
if __name__ == "__main__"
absl_app.run(main)
```
2. **Validate flag values.**
See the [Validators](#validators) section for implementation details.
Validators in the official model repo should not access the file system, such as verifying
that files exist, due to the strict ordering requirements.
3. **Flag values should not be mutated.**
Instead of mutating flag values, use getter functions to return the desired values. An example
getter function is `get_tf_dtype` function below:
```
# Map string to TensorFlow dtype
DTYPE_MAP = {
"fp16": tf.float16,
"fp32": tf.float32,
}
def get_tf_dtype(flags_obj):
if getattr(flags_obj, "fp16_implementation", None) == "graph_rewrite":
# If the graph_rewrite is used, we build the graph with fp32, and let the
# graph rewrite change ops to fp16.
return tf.float32
return DTYPE_MAP[flags_obj.dtype]
def main(_):
flags_obj = flags.FLAGS()
# Do not mutate flags_obj
# if flags_obj.fp16_implementation == "graph_rewrite":
# flags_obj.dtype = "float32" # Don't do this
print(get_tf_dtype(flags_obj))
...
``` |
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/modeling/rpn | rpn | loss | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""
This file contains specific functions for computing losses on the RPN
file
"""
import torch
from torch.nn import functional as F
from ..balanced_positive_negative_sampler import BalancedPositiveNegativeSampler
from ..utils import cat
from maskrcnn_benchmark.layers import smooth_l1_loss
from maskrcnn_benchmark.modeling.matcher import Matcher
from maskrcnn_benchmark.structures.boxlist_ops import boxlist_iou
from maskrcnn_benchmark.structures.boxlist_ops import cat_boxlist
class RPNLossComputation(object):
"""
This class computes the RPN loss.
"""
def __init__(self, proposal_matcher, fg_bg_sampler, box_coder):
"""
Arguments:
proposal_matcher (Matcher)
fg_bg_sampler (BalancedPositiveNegativeSampler)
box_coder (BoxCoder)
"""
# self.target_preparator = target_preparator
self.proposal_matcher = proposal_matcher
self.fg_bg_sampler = fg_bg_sampler
self.box_coder = box_coder
def match_targets_to_anchors(self, anchor, target):
match_quality_matrix = boxlist_iou(target, anchor)
matched_idxs = self.proposal_matcher(match_quality_matrix)
# RPN doesn't need any fields from target
# for creating the labels, so clear them all
target = target.copy_with_fields([])
# get the targets corresponding GT for each anchor
# NB: need to clamp the indices because we can have a single
# GT in the image, and matched_idxs can be -2, which goes
# out of bounds
matched_targets = target[matched_idxs.clamp(min=0)]
matched_targets.add_field("matched_idxs", matched_idxs)
return matched_targets
def prepare_targets(self, anchors, targets):
labels = []
regression_targets = []
for anchors_per_image, targets_per_image in zip(anchors, targets):
matched_targets = self.match_targets_to_anchors(
anchors_per_image, targets_per_image
)
matched_idxs = matched_targets.get_field("matched_idxs")
labels_per_image = matched_idxs >= 0
labels_per_image = labels_per_image.to(dtype=torch.float32)
# discard anchors that go out of the boundaries of the image
labels_per_image[~anchors_per_image.get_field("visibility")] = -1
# discard indices that are between thresholds
inds_to_discard = matched_idxs == Matcher.BETWEEN_THRESHOLDS
labels_per_image[inds_to_discard] = -1
# compute regression targets
regression_targets_per_image = self.box_coder.encode(
matched_targets.bbox, anchors_per_image.bbox
)
labels.append(labels_per_image)
regression_targets.append(regression_targets_per_image)
return labels, regression_targets
def __call__(self, anchors, objectness, box_regression, targets):
"""
Arguments:
anchors (list[BoxList])
objectness (list[Tensor])
box_regression (list[Tensor])
targets (list[BoxList])
Returns:
objectness_loss (Tensor)
box_loss (Tensor
"""
anchors = [cat_boxlist(anchors_per_image) for anchors_per_image in anchors]
labels, regression_targets = self.prepare_targets(anchors, targets)
sampled_pos_inds, sampled_neg_inds = self.fg_bg_sampler(labels)
sampled_pos_inds = torch.nonzero(torch.cat(sampled_pos_inds, dim=0)).squeeze(1)
sampled_neg_inds = torch.nonzero(torch.cat(sampled_neg_inds, dim=0)).squeeze(1)
sampled_inds = torch.cat([sampled_pos_inds, sampled_neg_inds], dim=0)
objectness_flattened = []
box_regression_flattened = []
# for each feature level, permute the outputs to make them be in the
# same format as the labels. Note that the labels are computed for
# all feature levels concatenated, so we keep the same representation
# for the objectness and the box_regression
for objectness_per_level, box_regression_per_level in zip(
objectness, box_regression
):
N, A, H, W = objectness_per_level.shape
objectness_per_level = objectness_per_level.permute(0, 2, 3, 1).reshape(
N, -1
)
box_regression_per_level = box_regression_per_level.view(N, -1, 4, H, W)
box_regression_per_level = box_regression_per_level.permute(0, 3, 4, 1, 2)
box_regression_per_level = box_regression_per_level.reshape(N, -1, 4)
objectness_flattened.append(objectness_per_level)
box_regression_flattened.append(box_regression_per_level)
# concatenate on the first dimension (representing the feature levels), to
# take into account the way the labels were generated (with all feature maps
# being concatenated as well)
objectness = cat(objectness_flattened, dim=1).reshape(-1)
box_regression = cat(box_regression_flattened, dim=1).reshape(-1, 4)
labels = torch.cat(labels, dim=0)
regression_targets = torch.cat(regression_targets, dim=0)
box_loss = smooth_l1_loss(
box_regression[sampled_pos_inds],
regression_targets[sampled_pos_inds],
beta=1.0 / 9,
size_average=False,
) / (sampled_inds.numel())
objectness_loss = F.binary_cross_entropy_with_logits(
objectness[sampled_inds], labels[sampled_inds]
)
return objectness_loss, box_loss
def make_rpn_loss_evaluator(cfg, box_coder):
matcher = Matcher(
cfg.MODEL.RPN.FG_IOU_THRESHOLD,
cfg.MODEL.RPN.BG_IOU_THRESHOLD,
allow_low_quality_matches=True,
)
fg_bg_sampler = BalancedPositiveNegativeSampler(
cfg.MODEL.RPN.BATCH_SIZE_PER_IMAGE, cfg.MODEL.RPN.POSITIVE_FRACTION
)
loss_evaluator = RPNLossComputation(matcher, fg_bg_sampler, box_coder)
return loss_evaluator
|
PyTorch/SpeechRecognition/Jasper/jasper | jasper | config | import copy
import inspect
import typing
from ast import literal_eval
from contextlib import suppress
from numbers import Number
import yaml
from .model import JasperDecoderForCTC, JasperBlock, JasperEncoder
from common.audio import GainPerturbation, ShiftPerturbation, SpeedPerturbation
from common.dataset import AudioDataset
from common.features import CutoutAugment, FilterbankFeatures, SpecAugment
from common.helpers import print_once
def default_args(klass):
sig = inspect.signature(klass.__init__)
return {k: v.default for k,v in sig.parameters.items() if k != 'self'}
def load(fpath):
if fpath.endswith('.toml'):
raise ValueError('.toml config format has been changed to .yaml')
cfg = yaml.safe_load(open(fpath, 'r'))
# Reload to deep copy shallow copies, which were made with yaml anchors
yaml.Dumper.ignore_aliases = lambda *args: True
cfg = yaml.dump(cfg)
cfg = yaml.safe_load(cfg)
return cfg
def validate_and_fill(klass, user_conf, ignore_unk=[], optional=[]):
conf = default_args(klass)
for k,v in user_conf.items():
assert k in conf or k in ignore_unk, f'Unknown parameter {k} for {klass}'
conf[k] = v
# Keep only mandatory or optional-nonempty
conf = {k:v for k,v in conf.items()
if k not in optional or v is not inspect.Parameter.empty}
# Validate
for k,v in conf.items():
assert v is not inspect.Parameter.empty, \
f'Value for {k} not specified for {klass}'
return conf
def input(conf_yaml, split='train'):
conf = copy.deepcopy(conf_yaml[f'input_{split}'])
conf_dataset = conf.pop('audio_dataset')
conf_features = conf.pop('filterbank_features')
# Validate known inner classes
inner_classes = [
(conf_dataset, 'speed_perturbation', SpeedPerturbation),
(conf_dataset, 'gain_perturbation', GainPerturbation),
(conf_dataset, 'shift_perturbation', ShiftPerturbation),
(conf_features, 'spec_augment', SpecAugment),
(conf_features, 'cutout_augment', CutoutAugment),
]
for conf_tgt, key, klass in inner_classes:
if key in conf_tgt:
conf_tgt[key] = validate_and_fill(klass, conf_tgt[key])
for k in conf:
raise ValueError(f'Unknown key {k}')
# Validate outer classes
conf_dataset = validate_and_fill(
AudioDataset, conf_dataset,
optional=['data_dir', 'labels', 'manifest_fpaths'])
conf_features = validate_and_fill(
FilterbankFeatures, conf_features)
# Check params shared between classes
shared = ['sample_rate', 'max_duration', 'pad_to_max_duration']
for sh in shared:
assert conf_dataset[sh] == conf_features[sh], (
f'{sh} should match in Dataset and FeatureProcessor: '
f'{conf_dataset[sh]}, {conf_features[sh]}')
return conf_dataset, conf_features
def encoder(conf):
"""Validate config for JasperEncoder and subsequent JasperBlocks"""
# Validate, but don't overwrite with defaults
for blk in conf['jasper']['encoder']['blocks']:
validate_and_fill(JasperBlock, blk, optional=['infilters'],
ignore_unk=['residual_dense'])
return validate_and_fill(JasperEncoder, conf['jasper']['encoder'])
def decoder(conf, n_classes):
decoder_kw = {'n_classes': n_classes, **conf['jasper']['decoder']}
return validate_and_fill(JasperDecoderForCTC, decoder_kw)
def apply_config_overrides(conf, args):
if args.override_config is None:
return
for override_key_val in args.override_config:
key, val = override_key_val.split('=')
with suppress(TypeError, ValueError):
val = literal_eval(val)
apply_nested_config_override(conf, key, val)
def apply_nested_config_override(conf, key_str, val):
fields = key_str.split('.')
for f in fields[:-1]:
conf = conf[f]
f = fields[-1]
assert (f not in conf
or type(val) is type(conf[f])
or (isinstance(val, Number) and isinstance(conf[f], Number)))
conf[f] = val
|
PyTorch/SpeechRecognition/wav2vec2/scripts | scripts | pretrain_base_benchmark | #!/usr/bin/env bash
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -a
: ${NUM_WARMUP_EPOCHS:=2} # Number of warmup epochs
: ${NUM_EPOCHS:=5} # Number of epochs for collecting perf measurements
: ${TRAIN_SUBSET:="train-full-960"}
: ${FP16:=false}
: ${BF16:=false}
: ${NUM_GPUS:=8}
: ${MAX_TOKENS:=1400000}
: ${NUM_CONCAT_BATCHES:=8}
: ${UPDATE_FREQ:=1}
if [ "$FP16" = true ]; then PREC=fp16; elif [ "$BF16" = true ]; then PREC=bf16; else PREC=fp32; fi
: ${OUTPUT_DIR:="results/pretrain_base_benchmark_${NUM_GPUS}x${MAX_TOKENS}x${NUM_CONCAT_BATCHES}x${UPDATE_FREQ}_${PREC}"}
NO_SAVE=true
EPOCHS_THIS_JOB=$(($NUM_EPOCHS + $NUM_WARMUP_EPOCHS))
ARGS+=" --benchmark_epochs_num $NUM_EPOCHS"
bash scripts/pretrain_base.sh
|
PyTorch/SpeechSynthesis/HiFiGAN/scripts | scripts | download_models | #!/usr/bin/env bash
set -e
MODEL_NAMES="$@"
[ -z "$MODEL_NAMES" ] && { echo "Usage: $0 [fastpitch|waveglow|hifigan|hifigan-finetuned-fastpitch]"; exit 1; }
function download_ngc_model() {
mkdir -p "$MODEL_DIR"
if [ ! -f "${MODEL_DIR}/${MODEL_ZIP}" ]; then
echo "Downloading ${MODEL_ZIP} ..."
wget --content-disposition -O ${MODEL_DIR}/${MODEL_ZIP} ${MODEL_URL} \
|| { echo "ERROR: Failed to download ${MODEL_ZIP} from NGC"; exit 1; }
fi
if [ ! -f "${MODEL_DIR}/${MODEL}" ]; then
echo "Extracting ${MODEL} ..."
unzip -qo ${MODEL_DIR}/${MODEL_ZIP} -d ${MODEL_DIR} \
|| { echo "ERROR: Failed to extract ${MODEL_ZIP}"; exit 1; }
echo "OK"
else
echo "${MODEL} already downloaded."
fi
}
for MODEL_NAME in $MODEL_NAMES
do
case $MODEL_NAME in
"fastpitch")
MODEL_DIR="pretrained_models/fastpitch"
MODEL_ZIP="fastpitch_pyt_fp32_ckpt_v1_1_21.05.0.zip"
MODEL="nvidia_fastpitch_210824.pt"
MODEL_URL="https://api.ngc.nvidia.com/v2/models/nvidia/fastpitch_pyt_fp32_ckpt_v1_1/versions/21.05.0/zip"
;;
"hifigan")
MODEL_DIR="pretrained_models/hifigan"
MODEL_ZIP="hifigan__pyt_ckpt_ds-ljs22khz_21.08.0_amp.zip"
MODEL="hifigan_gen_checkpoint_6500.pt"
MODEL_URL="https://api.ngc.nvidia.com/v2/models/nvidia/dle/hifigan__pyt_ckpt_ds-ljs22khz/versions/21.08.0_amp/zip"
;;
"hifigan-finetuned-fastpitch")
MODEL_DIR="pretrained_models/hifigan"
MODEL_ZIP="hifigan__pyt_ckpt_mode-finetune_ds-ljs22khz_21.08.0_amp.zip"
MODEL="hifigan_gen_checkpoint_10000_ft.pt"
MODEL_URL="https://api.ngc.nvidia.com/v2/models/nvidia/dle/hifigan__pyt_ckpt_mode-finetune_ds-ljs22khz/versions/21.08.0_amp/zip"
;;
"waveglow")
MODEL_DIR="pretrained_models/waveglow"
MODEL_ZIP="waveglow_ckpt_amp_256_20.01.0.zip"
MODEL="nvidia_waveglow256pyt_fp16.pt"
MODEL_URL="https://api.ngc.nvidia.com/v2/models/nvidia/waveglow_ckpt_amp_256/versions/20.01.0/zip"
;;
*)
echo "Unrecognized model: ${MODEL_NAME}"
exit 2
;;
esac
download_ngc_model "$MODEL_DIR" "$MODEL_ZIP" "$MODEL" "$MODEL_URL"
done
|
PyTorch/SpeechSynthesis/FastPitch/common/text/unidecoder | unidecoder | __init__ | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import warnings
from .homoglyphs import homoglyphs
from .replacements import replacements
_replacements = {uni: asc for uni, asc in replacements}
_homoglyphs = {g: asc for asc, glyphs in homoglyphs.items() for g in glyphs}
def unidecoder(s, homoglyphs=False):
"""Transliterate unicode
Args:
s (str): unicode string
homoglyphs (bool): prioritize translating to homoglyphs
"""
warned = False # Once per utterance
ret = ''
for u in s:
if ord(u) < 127:
a = u
elif homoglyphs:
a = _homoglyphs.get(u, _replacements.get(u, None))
else:
a = _replacements.get(u, _homoglyphs.get(u, None))
if a is None:
if not warned:
warnings.warn(f'Unexpected character {u}: '
'please revise your text cleaning rules.',
stacklevel=10**6)
warned = True
else:
ret += a
return ret
|
PyTorch/Detection/Efficientdet/utils | utils | scheduler | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, Any
import math
import numpy as np
import torch
class Scheduler:
""" Parameter Scheduler Base Class
A scheduler base class that can be used to schedule any optimizer parameter groups.
Unlike the builtin PyTorch schedulers, this is intended to be consistently called
* At the END of each epoch, before incrementing the epoch count, to calculate next epoch's value
* At the END of each optimizer update, after incrementing the update count, to calculate next update's value
The schedulers built on this should try to remain as stateless as possible (for simplicity).
This family of schedulers is attempting to avoid the confusion of the meaning of 'last_epoch'
and -1 values for special behaviour. All epoch and update counts must be tracked in the training
code and explicitly passed in to the schedulers on the corresponding step or step_update call.
Based on ideas from:
* https://github.com/pytorch/fairseq/tree/master/fairseq/optim/lr_scheduler
* https://github.com/allenai/allennlp/tree/master/allennlp/training/learning_rate_schedulers
"""
def __init__(self,
optimizer: torch.optim.Optimizer,
param_group_field: str,
noise_range_t=None,
noise_type='normal',
noise_pct=0.67,
noise_std=1.0,
noise_seed=None,
initialize: bool = True) -> None:
self.optimizer = optimizer
self.param_group_field = param_group_field
self._initial_param_group_field = f"initial_{param_group_field}"
if initialize:
for i, group in enumerate(self.optimizer.param_groups):
if param_group_field not in group:
raise KeyError(f"{param_group_field} missing from param_groups[{i}]")
group.setdefault(self._initial_param_group_field, group[param_group_field])
else:
for i, group in enumerate(self.optimizer.param_groups):
if self._initial_param_group_field not in group:
raise KeyError(f"{self._initial_param_group_field} missing from param_groups[{i}]")
self.base_values = [group[self._initial_param_group_field] for group in self.optimizer.param_groups]
self.metric = None # any point to having this for all?
self.noise_range_t = noise_range_t
self.noise_pct = noise_pct
self.noise_type = noise_type
self.noise_std = noise_std
self.noise_seed = noise_seed if noise_seed is not None else 42
self.update_groups(self.base_values)
def state_dict(self) -> Dict[str, Any]:
return {key: value for key, value in self.__dict__.items() if key != 'optimizer'}
def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
self.__dict__.update(state_dict)
def get_epoch_values(self, epoch: int):
return None
def get_update_values(self, num_updates: int):
return None
def step(self, epoch: int, metric: float = None) -> None:
self.metric = metric
values = self.get_epoch_values(epoch)
if values is not None:
values = self._add_noise(values, epoch)
self.update_groups(values)
def step_update(self, num_updates: int, metric: float = None):
self.metric = metric
values = self.get_update_values(num_updates)
if values is not None:
values = self._add_noise(values, num_updates)
self.update_groups(values)
def update_groups(self, values):
if not isinstance(values, (list, tuple)):
values = [values] * len(self.optimizer.param_groups)
for param_group, value in zip(self.optimizer.param_groups, values):
param_group[self.param_group_field] = value
def _add_noise(self, lrs, t):
if self.noise_range_t is not None:
if isinstance(self.noise_range_t, (list, tuple)):
apply_noise = self.noise_range_t[0] <= t < self.noise_range_t[1]
else:
apply_noise = t >= self.noise_range_t
if apply_noise:
g = torch.Generator()
g.manual_seed(self.noise_seed + t)
if self.noise_type == 'normal':
while True:
# resample if noise out of percent limit, brute force but shouldn't spin much
noise = torch.randn(1, generator=g).item()
if abs(noise) < self.noise_pct:
break
else:
noise = 2 * (torch.rand(1, generator=g).item() - 0.5) * self.noise_pct
lrs = [v + v * noise for v in lrs]
return lrs
class CosineLRScheduler(Scheduler):
"""
Cosine decay with restarts.
This is described in the paper https://arxiv.org/abs/1608.03983.
Inspiration from
https://github.com/allenai/allennlp/blob/master/allennlp/training/learning_rate_schedulers/cosine.py
"""
def __init__(self,
optimizer: torch.optim.Optimizer,
t_initial: int,
t_mul: float = 1.,
lr_min: float = 0.,
decay_rate: float = 1.,
warmup_t=0,
warmup_lr_init=0,
warmup_prefix=False,
cycle_limit=0,
t_in_epochs=True,
noise_range_t=None,
noise_pct=0.67,
noise_std=1.0,
noise_seed=42,
initialize=True) -> None:
super().__init__(
optimizer, param_group_field="lr",
noise_range_t=noise_range_t, noise_pct=noise_pct, noise_std=noise_std, noise_seed=noise_seed,
initialize=initialize)
assert t_initial > 0
assert lr_min >= 0
if t_initial == 1 and t_mul == 1 and decay_rate == 1:
print("Cosine annealing scheduler will have no effect on the learning "
"rate since t_initial = t_mul = eta_mul = 1.")
self.t_initial = t_initial
self.t_mul = t_mul
self.lr_min = lr_min
self.decay_rate = decay_rate
self.cycle_limit = cycle_limit
self.warmup_t = warmup_t
self.warmup_lr_init = warmup_lr_init
self.warmup_prefix = warmup_prefix
self.t_in_epochs = t_in_epochs
if self.warmup_t:
self.warmup_steps = [(v - warmup_lr_init) / self.warmup_t for v in self.base_values]
super().update_groups(self.warmup_lr_init)
else:
self.warmup_steps = [1 for _ in self.base_values]
def _get_lr(self, t):
if t < self.warmup_t:
lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps]
else:
if self.warmup_prefix:
t = t - self.warmup_t
if self.t_mul != 1:
i = math.floor(math.log(1 - t / self.t_initial * (1 - self.t_mul), self.t_mul))
t_i = self.t_mul ** i * self.t_initial
t_curr = t - (1 - self.t_mul ** i) / (1 - self.t_mul) * self.t_initial
else:
i = t // self.t_initial
t_i = self.t_initial
t_curr = t - (self.t_initial * i)
gamma = self.decay_rate ** i
lr_min = self.lr_min * gamma
lr_max_values = [v * gamma for v in self.base_values]
if self.cycle_limit == 0 or (self.cycle_limit > 0 and i < self.cycle_limit):
lrs = [
lr_min + 0.5 * (lr_max - lr_min) * (1 + math.cos(math.pi * t_curr / t_i)) for lr_max in lr_max_values
]
else:
lrs = [self.lr_min for _ in self.base_values]
return lrs
def get_epoch_values(self, epoch: int):
if self.t_in_epochs:
return self._get_lr(epoch)
else:
return None
def get_update_values(self, num_updates: int):
if not self.t_in_epochs:
return self._get_lr(num_updates)
else:
return None
def get_cycle_length(self, cycles=0):
if not cycles:
cycles = self.cycle_limit
cycles = max(1, cycles)
if self.t_mul == 1.0:
return self.t_initial * cycles
else:
return int(math.floor(-self.t_initial * (self.t_mul ** cycles - 1) / (1 - self.t_mul)))
def create_scheduler(args, optimizer):
num_epochs = args.epochs
if getattr(args, 'lr_noise', None) is not None:
lr_noise = getattr(args, 'lr_noise')
if isinstance(lr_noise, (list, tuple)):
noise_range = [n * num_epochs for n in lr_noise]
if len(noise_range) == 1:
noise_range = noise_range[0]
else:
noise_range = lr_noise * num_epochs
else:
noise_range = None
lr_scheduler = None
if args.sched == 'cosine':
lr_scheduler = CosineLRScheduler(
optimizer,
t_initial=num_epochs,
t_mul=getattr(args, 'lr_cycle_mul', 1.),
lr_min=args.min_lr,
decay_rate=args.decay_rate,
warmup_lr_init=args.warmup_lr,
warmup_t=args.warmup_epochs,
cycle_limit=getattr(args, 'lr_cycle_limit', 1),
t_in_epochs=True,
noise_range_t=noise_range,
noise_pct=getattr(args, 'lr_noise_pct', 0.67),
noise_std=getattr(args, 'lr_noise_std', 1.),
noise_seed=getattr(args, 'seed', 42),
)
num_epochs = lr_scheduler.get_cycle_length() + args.cooldown_epochs
else:
assert False and "Invalid Scheduler"
raise ValueError
return lr_scheduler, num_epochs |
TensorFlow2/Recommendation/DLRM_and_DCNv2/deployment/deployment_toolkit/triton_performance_runner/perf_analyzer | perf_analyzer | exceptions | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class PerfAnalyzerException(Exception):
def __init__(self, message: str):
self._message = message
def __str__(self):
"""
Get the exception string representation.
Returns
-------
str
The message associated with this exception, or None if no message.
"""
return self._message
@property
def message(self):
"""
Get the exception message.
Returns
-------
str
The message associated with this exception, or None if no message.
"""
return self._message
|
TensorFlow2/Detection/Efficientdet/dataset | dataset | tfrecord_util | # Copyright 2020 Google Research. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""TFRecord related utilities."""
from six.moves import range
import tensorflow.compat.v1 as tf
def int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def int64_list_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def bytes_list_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
def float_list_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def read_examples_list(path):
"""Read list of training or validation examples.
The file is assumed to contain a single example per line where the first
token in the line is an identifier that allows us to find the image and
annotation xml for that example.
For example, the line:
xyz 3
would allow us to find files xyz.jpg and xyz.xml (the 3 would be ignored).
Args:
path: absolute path to examples list file.
Returns:
list of example identifiers (strings).
"""
with tf.gfile.GFile(path) as fid:
lines = fid.readlines()
return [line.strip().split(' ')[0] for line in lines]
def recursive_parse_xml_to_dict(xml):
"""Recursively parses XML contents to python dict.
We assume that `object` tags are the only ones that can appear
multiple times at the same level of a tree.
Args:
xml: xml tree obtained by parsing XML file contents using lxml.etree
Returns:
Python dictionary holding XML contents.
"""
if not xml:
return {xml.tag: xml.text}
result = {}
for child in xml:
child_result = recursive_parse_xml_to_dict(child)
if child.tag != 'object':
result[child.tag] = child_result[child.tag]
else:
if child.tag not in result:
result[child.tag] = []
result[child.tag].append(child_result[child.tag])
return {xml.tag: result}
def open_sharded_output_tfrecords(exit_stack, base_path, num_shards):
"""Opens all TFRecord shards for writing and adds them to an exit stack.
Args:
exit_stack: A context2.ExitStack used to automatically closed the TFRecords
opened in this function.
base_path: The base path for all shards
num_shards: The number of shards
Returns:
The list of opened TFRecords. Position k in the list corresponds to shard k.
"""
tf_record_output_filenames = [
'{}-{:05d}-of-{:05d}'.format(base_path, idx, num_shards)
for idx in range(num_shards)
]
tfrecords = [
exit_stack.enter_context(tf.python_io.TFRecordWriter(file_name))
for file_name in tf_record_output_filenames
]
return tfrecords
|
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/utils | utils | model_zoo | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import os
import sys
try:
from torch.utils.model_zoo import _download_url_to_file
from torch.utils.model_zoo import urlparse
from torch.utils.model_zoo import HASH_REGEX
except:
from torch.hub import _download_url_to_file
from torch.hub import urlparse
from torch.hub import HASH_REGEX
from maskrcnn_benchmark.utils.comm import is_main_process
from maskrcnn_benchmark.utils.comm import synchronize
# very similar to https://github.com/pytorch/pytorch/blob/master/torch/utils/model_zoo.py
# but with a few improvements and modifications
def cache_url(url, model_dir=None, progress=True):
r"""Loads the Torch serialized object at the given URL.
If the object is already present in `model_dir`, it's deserialized and
returned. The filename part of the URL should follow the naming convention
``filename-<sha256>.ext`` where ``<sha256>`` is the first eight or more
digits of the SHA256 hash of the contents of the file. The hash is used to
ensure unique names and to verify the contents of the file.
The default value of `model_dir` is ``$TORCH_HOME/models`` where
``$TORCH_HOME`` defaults to ``~/.torch``. The default directory can be
overridden with the ``$TORCH_MODEL_ZOO`` environment variable.
Args:
url (string): URL of the object to download
model_dir (string, optional): directory in which to save the object
progress (bool, optional): whether or not to display a progress bar to stderr
Example:
>>> cached_file = maskrcnn_benchmark.utils.model_zoo.cache_url('https://s3.amazonaws.com/pytorch/models/resnet18-5c106cde.pth')
"""
if model_dir is None:
torch_home = os.path.expanduser(os.getenv('TORCH_HOME', '~/.torch'))
model_dir = os.getenv('TORCH_MODEL_ZOO', os.path.join(torch_home, 'models'))
if not os.path.exists(model_dir) and is_main_process():
os.makedirs(model_dir)
parts = urlparse(url)
filename = os.path.basename(parts.path)
if filename == "model_final.pkl":
# workaround as pre-trained Caffe2 models from Detectron have all the same filename
# so make the full path the filename by replacing / with _
filename = parts.path.replace("/", "_")
cached_file = os.path.join(model_dir, filename)
if not os.path.exists(cached_file) and is_main_process():
sys.stderr.write('Downloading: "{}" to {}\n'.format(url, cached_file))
hash_prefix = HASH_REGEX.search(filename)
if hash_prefix is not None:
hash_prefix = hash_prefix.group(1)
# workaround: Caffe2 models don't have a hash, but follow the R-50 convention,
# which matches the hash PyTorch uses. So we skip the hash matching
# if the hash_prefix is less than 6 characters
if len(hash_prefix) < 6:
hash_prefix = None
_download_url_to_file(url, cached_file, hash_prefix, progress=progress)
synchronize()
return cached_file
|
PyTorch/Classification/GPUNet/triton/085ms/runner | runner | __main__ | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import pathlib
from typing import List
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from ...runner.config import Config
from ...runner.executor import Executor
from ...runner.finalizer import ExperimentFinalizer
from ...runner.maintainer import DockerMaintainer
from ...runner.preparer import ExperimentPreparer
from ...runner.runner_proxy import RunnerProxy
from .pipeline_impl import pipeline
class ExperimentRunner(RunnerProxy):
"""
Experiment Runner proxy for runner wrapper
"""
maintainer_cls = DockerMaintainer
executor_cls = Executor
preparer_cls = ExperimentPreparer
finalizer_cls = ExperimentFinalizer
def execute(config_path: str, devices: List[str]):
if len(devices) == 0:
devices = ["0"]
config = Config.from_file(config_path)
runner = ExperimentRunner(config=config, pipeline=pipeline, devices=devices)
runner.start()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--config-path", type=str, required=True, help="Path to configuration file with details.")
parser.add_argument(
"--devices", type=str, nargs="*", required=False, help="Path to configuration file with details."
)
args = parser.parse_args()
config_path = args.config_path
devices = args.devices
execute(config_path, devices) |
PyTorch/SpeechSynthesis/Tacotron2 | Tacotron2 | models | # *****************************************************************************
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NVIDIA CORPORATION nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# *****************************************************************************
import sys
from os.path import abspath, dirname
# enabling modules discovery from global entrypoint
sys.path.append(abspath(dirname(__file__)+'/'))
from tacotron2.model import Tacotron2
from waveglow.model import WaveGlow
import torch
def model_parser(model_name, parser, add_help=False):
if model_name == 'Tacotron2':
from tacotron2.arg_parser import tacotron2_parser
return tacotron2_parser(parser, add_help)
if model_name == 'WaveGlow':
from waveglow.arg_parser import waveglow_parser
return waveglow_parser(parser, add_help)
else:
raise NotImplementedError(model_name)
def batchnorm_to_float(module):
"""Converts batch norm to FP32"""
if isinstance(module, torch.nn.modules.batchnorm._BatchNorm):
module.float()
for child in module.children():
batchnorm_to_float(child)
return module
def init_bn(module):
if isinstance(module, torch.nn.modules.batchnorm._BatchNorm):
if module.affine:
module.weight.data.uniform_()
for child in module.children():
init_bn(child)
def get_model(model_name, model_config, cpu_run,
uniform_initialize_bn_weight=False, forward_is_infer=False,
jittable=False):
""" Code chooses a model based on name"""
model = None
if model_name == 'Tacotron2':
if forward_is_infer:
class Tacotron2__forward_is_infer(Tacotron2):
def forward(self, inputs, input_lengths):
return self.infer(inputs, input_lengths)
model = Tacotron2__forward_is_infer(**model_config)
else:
model = Tacotron2(**model_config)
elif model_name == 'WaveGlow':
model = WaveGlow(**model_config)
if forward_is_infer:
model.forward = model.infer
else:
raise NotImplementedError(model_name)
if uniform_initialize_bn_weight:
init_bn(model)
if not cpu_run:
model = model.cuda()
return model
def get_model_config(model_name, args):
""" Code chooses a model based on name"""
if model_name == 'Tacotron2':
model_config = dict(
# optimization
mask_padding=args.mask_padding,
# audio
n_mel_channels=args.n_mel_channels,
# symbols
n_symbols=args.n_symbols,
symbols_embedding_dim=args.symbols_embedding_dim,
# encoder
encoder_kernel_size=args.encoder_kernel_size,
encoder_n_convolutions=args.encoder_n_convolutions,
encoder_embedding_dim=args.encoder_embedding_dim,
# attention
attention_rnn_dim=args.attention_rnn_dim,
attention_dim=args.attention_dim,
# attention location
attention_location_n_filters=args.attention_location_n_filters,
attention_location_kernel_size=args.attention_location_kernel_size,
# decoder
n_frames_per_step=args.n_frames_per_step,
decoder_rnn_dim=args.decoder_rnn_dim,
prenet_dim=args.prenet_dim,
max_decoder_steps=args.max_decoder_steps,
gate_threshold=args.gate_threshold,
p_attention_dropout=args.p_attention_dropout,
p_decoder_dropout=args.p_decoder_dropout,
# postnet
postnet_embedding_dim=args.postnet_embedding_dim,
postnet_kernel_size=args.postnet_kernel_size,
postnet_n_convolutions=args.postnet_n_convolutions,
decoder_no_early_stopping=args.decoder_no_early_stopping
)
return model_config
elif model_name == 'WaveGlow':
model_config = dict(
n_mel_channels=args.n_mel_channels,
n_flows=args.flows,
n_group=args.groups,
n_early_every=args.early_every,
n_early_size=args.early_size,
WN_config=dict(
n_layers=args.wn_layers,
kernel_size=args.wn_kernel_size,
n_channels=args.wn_channels
)
)
return model_config
else:
raise NotImplementedError(model_name)
|
TensorFlow/Detection/SSD/models/research/object_detection/samples/configs | configs | mask_rcnn_resnet101_pets | # Mask R-CNN with Resnet-101 (v1) configured for the Oxford-IIIT Pet Dataset.
# Users should configure the fine_tune_checkpoint field in the train config as
# well as the label_map_path and input_path fields in the train_input_reader and
# eval_input_reader. Search for "PATH_TO_BE_CONFIGURED" to find the fields that
# should be configured.
model {
faster_rcnn {
num_classes: 37
image_resizer {
keep_aspect_ratio_resizer {
min_dimension: 600
max_dimension: 1024
}
}
number_of_stages: 3
feature_extractor {
type: 'faster_rcnn_resnet101'
first_stage_features_stride: 16
}
first_stage_anchor_generator {
grid_anchor_generator {
scales: [0.25, 0.5, 1.0, 2.0]
aspect_ratios: [0.5, 1.0, 2.0]
height_stride: 16
width_stride: 16
}
}
first_stage_box_predictor_conv_hyperparams {
op: CONV
regularizer {
l2_regularizer {
weight: 0.0
}
}
initializer {
truncated_normal_initializer {
stddev: 0.01
}
}
}
first_stage_nms_score_threshold: 0.0
first_stage_nms_iou_threshold: 0.7
first_stage_max_proposals: 300
first_stage_localization_loss_weight: 2.0
first_stage_objectness_loss_weight: 1.0
initial_crop_size: 14
maxpool_kernel_size: 2
maxpool_stride: 2
second_stage_box_predictor {
mask_rcnn_box_predictor {
use_dropout: false
dropout_keep_probability: 1.0
predict_instance_masks: true
conv_hyperparams {
op: CONV
regularizer {
l2_regularizer {
weight: 0.0
}
}
initializer {
truncated_normal_initializer {
stddev: 0.01
}
}
}
fc_hyperparams {
op: FC
regularizer {
l2_regularizer {
weight: 0.0
}
}
initializer {
variance_scaling_initializer {
factor: 1.0
uniform: true
mode: FAN_AVG
}
}
}
}
}
second_stage_post_processing {
batch_non_max_suppression {
score_threshold: 0.0
iou_threshold: 0.6
max_detections_per_class: 100
max_total_detections: 300
}
score_converter: SOFTMAX
}
second_stage_localization_loss_weight: 2.0
second_stage_classification_loss_weight: 1.0
}
}
train_config: {
batch_size: 1
optimizer {
momentum_optimizer: {
learning_rate: {
manual_step_learning_rate {
initial_learning_rate: 0.0007
schedule {
step: 15000
learning_rate: 0.00007
}
schedule {
step: 30000
learning_rate: 0.000007
}
}
}
momentum_optimizer_value: 0.9
}
use_moving_average: false
}
gradient_clipping_by_norm: 10.0
fine_tune_checkpoint: "PATH_TO_BE_CONFIGURED/model.ckpt"
from_detection_checkpoint: true
load_all_detection_checkpoint_vars: true
# Note: The below line limits the training process to 200K steps, which we
# empirically found to be sufficient enough to train the pets dataset. This
# effectively bypasses the learning rate schedule (the learning rate will
# never decay). Remove the below line to train indefinitely.
num_steps: 200000
data_augmentation_options {
random_horizontal_flip {
}
}
}
train_input_reader: {
tf_record_input_reader {
input_path: "PATH_TO_BE_CONFIGURED/pet_fullbody_with_masks_train.record-?????-of-00010"
}
label_map_path: "PATH_TO_BE_CONFIGURED/pet_label_map.pbtxt"
load_instance_masks: true
}
eval_config: {
metrics_set: "coco_mask_metrics"
num_examples: 1101
}
eval_input_reader: {
tf_record_input_reader {
input_path: "PATH_TO_BE_CONFIGURED/pet_fullbody_with_masks_val.record-?????-of-00010"
}
label_map_path: "PATH_TO_BE_CONFIGURED/pet_label_map.pbtxt"
load_instance_masks: true
shuffle: false
num_readers: 1
}
|
PyTorch/SpeechSynthesis/FastPitch/common/text/unidecoder | unidecoder | replacements | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# MIT License
#
# Copyright (c) Sindre Sorhus <[email protected]> (https://sindresorhus.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Based on:
# https://github.com/sindresorhus/transliterate/blob/main/replacements.js
#
replacements = [
# German umlauts
['ß', 'ss'],
['ẞ', 'Ss'],
['ä', 'ae'],
['Ä', 'Ae'],
['ö', 'oe'],
['Ö', 'Oe'],
['ü', 'ue'],
['Ü', 'Ue'],
# Latin
['À', 'A'],
['Á', 'A'],
['Â', 'A'],
['Ã', 'A'],
['Ä', 'Ae'],
['Å', 'A'],
['Æ', 'AE'],
['Ç', 'C'],
['È', 'E'],
['É', 'E'],
['Ê', 'E'],
['Ë', 'E'],
['Ì', 'I'],
['Í', 'I'],
['Î', 'I'],
['Ï', 'I'],
['Ð', 'D'],
['Ñ', 'N'],
['Ò', 'O'],
['Ó', 'O'],
['Ô', 'O'],
['Õ', 'O'],
['Ö', 'Oe'],
['Ő', 'O'],
['Ø', 'O'],
['Ù', 'U'],
['Ú', 'U'],
['Û', 'U'],
['Ü', 'Ue'],
['Ű', 'U'],
['Ý', 'Y'],
['Þ', 'TH'],
['ß', 'ss'],
['à', 'a'],
['á', 'a'],
['â', 'a'],
['ã', 'a'],
['ä', 'ae'],
['å', 'a'],
['æ', 'ae'],
['ç', 'c'],
['è', 'e'],
['é', 'e'],
['ê', 'e'],
['ë', 'e'],
['ì', 'i'],
['í', 'i'],
['î', 'i'],
['ï', 'i'],
['ð', 'd'],
['ñ', 'n'],
['ò', 'o'],
['ó', 'o'],
['ô', 'o'],
['õ', 'o'],
['ö', 'oe'],
['ő', 'o'],
['ø', 'o'],
['ù', 'u'],
['ú', 'u'],
['û', 'u'],
['ü', 'ue'],
['ű', 'u'],
['ý', 'y'],
['þ', 'th'],
['ÿ', 'y'],
['ẞ', 'SS'],
# Vietnamese
['à', 'a'],
['À', 'A'],
['á', 'a'],
['Á', 'A'],
['â', 'a'],
['Â', 'A'],
['ã', 'a'],
['Ã', 'A'],
['è', 'e'],
['È', 'E'],
['é', 'e'],
['É', 'E'],
['ê', 'e'],
['Ê', 'E'],
['ì', 'i'],
['Ì', 'I'],
['í', 'i'],
['Í', 'I'],
['ò', 'o'],
['Ò', 'O'],
['ó', 'o'],
['Ó', 'O'],
['ô', 'o'],
['Ô', 'O'],
['õ', 'o'],
['Õ', 'O'],
['ù', 'u'],
['Ù', 'U'],
['ú', 'u'],
['Ú', 'U'],
['ý', 'y'],
['Ý', 'Y'],
['ă', 'a'],
['Ă', 'A'],
['Đ', 'D'],
['đ', 'd'],
['ĩ', 'i'],
['Ĩ', 'I'],
['ũ', 'u'],
['Ũ', 'U'],
['ơ', 'o'],
['Ơ', 'O'],
['ư', 'u'],
['Ư', 'U'],
['ạ', 'a'],
['Ạ', 'A'],
['ả', 'a'],
['Ả', 'A'],
['ấ', 'a'],
['Ấ', 'A'],
['ầ', 'a'],
['Ầ', 'A'],
['ẩ', 'a'],
['Ẩ', 'A'],
['ẫ', 'a'],
['Ẫ', 'A'],
['ậ', 'a'],
['Ậ', 'A'],
['ắ', 'a'],
['Ắ', 'A'],
['ằ', 'a'],
['Ằ', 'A'],
['ẳ', 'a'],
['Ẳ', 'A'],
['ẵ', 'a'],
['Ẵ', 'A'],
['ặ', 'a'],
['Ặ', 'A'],
['ẹ', 'e'],
['Ẹ', 'E'],
['ẻ', 'e'],
['Ẻ', 'E'],
['ẽ', 'e'],
['Ẽ', 'E'],
['ế', 'e'],
['Ế', 'E'],
['ề', 'e'],
['Ề', 'E'],
['ể', 'e'],
['Ể', 'E'],
['ễ', 'e'],
['Ễ', 'E'],
['ệ', 'e'],
['Ệ', 'E'],
['ỉ', 'i'],
['Ỉ', 'I'],
['ị', 'i'],
['Ị', 'I'],
['ọ', 'o'],
['Ọ', 'O'],
['ỏ', 'o'],
['Ỏ', 'O'],
['ố', 'o'],
['Ố', 'O'],
['ồ', 'o'],
['Ồ', 'O'],
['ổ', 'o'],
['Ổ', 'O'],
['ỗ', 'o'],
['Ỗ', 'O'],
['ộ', 'o'],
['Ộ', 'O'],
['ớ', 'o'],
['Ớ', 'O'],
['ờ', 'o'],
['Ờ', 'O'],
['ở', 'o'],
['Ở', 'O'],
['ỡ', 'o'],
['Ỡ', 'O'],
['ợ', 'o'],
['Ợ', 'O'],
['ụ', 'u'],
['Ụ', 'U'],
['ủ', 'u'],
['Ủ', 'U'],
['ứ', 'u'],
['Ứ', 'U'],
['ừ', 'u'],
['Ừ', 'U'],
['ử', 'u'],
['Ử', 'U'],
['ữ', 'u'],
['Ữ', 'U'],
['ự', 'u'],
['Ự', 'U'],
['ỳ', 'y'],
['Ỳ', 'Y'],
['ỵ', 'y'],
['Ỵ', 'Y'],
['ỷ', 'y'],
['Ỷ', 'Y'],
['ỹ', 'y'],
['Ỹ', 'Y'],
# Arabic
['ء', 'e'],
['آ', 'a'],
['أ', 'a'],
['ؤ', 'w'],
['إ', 'i'],
['ئ', 'y'],
['ا', 'a'],
['ب', 'b'],
['ة', 't'],
['ت', 't'],
['ث', 'th'],
['ج', 'j'],
['ح', 'h'],
['خ', 'kh'],
['د', 'd'],
['ذ', 'dh'],
['ر', 'r'],
['ز', 'z'],
['س', 's'],
['ش', 'sh'],
['ص', 's'],
['ض', 'd'],
['ط', 't'],
['ظ', 'z'],
['ع', 'e'],
['غ', 'gh'],
['ـ', '_'],
['ف', 'f'],
['ق', 'q'],
['ك', 'k'],
['ل', 'l'],
['م', 'm'],
['ن', 'n'],
['ه', 'h'],
['و', 'w'],
['ى', 'a'],
['ي', 'y'],
['َ', 'a'],
['ُ', 'u'],
['ِ', 'i'],
['٠', '0'],
['١', '1'],
['٢', '2'],
['٣', '3'],
['٤', '4'],
['٥', '5'],
['٦', '6'],
['٧', '7'],
['٨', '8'],
['٩', '9'],
# Persian / Farsi
['چ', 'ch'],
['ک', 'k'],
['گ', 'g'],
['پ', 'p'],
['ژ', 'zh'],
['ی', 'y'],
['۰', '0'],
['۱', '1'],
['۲', '2'],
['۳', '3'],
['۴', '4'],
['۵', '5'],
['۶', '6'],
['۷', '7'],
['۸', '8'],
['۹', '9'],
# Pashto
['ټ', 'p'],
['ځ', 'z'],
['څ', 'c'],
['ډ', 'd'],
['ﺫ', 'd'],
['ﺭ', 'r'],
['ړ', 'r'],
['ﺯ', 'z'],
['ږ', 'g'],
['ښ', 'x'],
['ګ', 'g'],
['ڼ', 'n'],
['ۀ', 'e'],
['ې', 'e'],
['ۍ', 'ai'],
# Urdu
['ٹ', 't'],
['ڈ', 'd'],
['ڑ', 'r'],
['ں', 'n'],
['ہ', 'h'],
['ھ', 'h'],
['ے', 'e'],
# Russian
['А', 'A'],
['а', 'a'],
['Б', 'B'],
['б', 'b'],
['В', 'V'],
['в', 'v'],
['Г', 'G'],
['г', 'g'],
['Д', 'D'],
['д', 'd'],
['ъе', 'ye'],
['Ъе', 'Ye'],
['ъЕ', 'yE'],
['ЪЕ', 'YE'],
['Е', 'E'],
['е', 'e'],
['Ё', 'Yo'],
['ё', 'yo'],
['Ж', 'Zh'],
['ж', 'zh'],
['З', 'Z'],
['з', 'z'],
['И', 'I'],
['и', 'i'],
['ый', 'iy'],
['Ый', 'Iy'],
['ЫЙ', 'IY'],
['ыЙ', 'iY'],
['Й', 'Y'],
['й', 'y'],
['К', 'K'],
['к', 'k'],
['Л', 'L'],
['л', 'l'],
['М', 'M'],
['м', 'm'],
['Н', 'N'],
['н', 'n'],
['О', 'O'],
['о', 'o'],
['П', 'P'],
['п', 'p'],
['Р', 'R'],
['р', 'r'],
['С', 'S'],
['с', 's'],
['Т', 'T'],
['т', 't'],
['У', 'U'],
['у', 'u'],
['Ф', 'F'],
['ф', 'f'],
['Х', 'Kh'],
['х', 'kh'],
['Ц', 'Ts'],
['ц', 'ts'],
['Ч', 'Ch'],
['ч', 'ch'],
['Ш', 'Sh'],
['ш', 'sh'],
['Щ', 'Sch'],
['щ', 'sch'],
['Ъ', ''],
['ъ', ''],
['Ы', 'Y'],
['ы', 'y'],
['Ь', ''],
['ь', ''],
['Э', 'E'],
['э', 'e'],
['Ю', 'Yu'],
['ю', 'yu'],
['Я', 'Ya'],
['я', 'ya'],
# Romanian
['ă', 'a'],
['Ă', 'A'],
['ș', 's'],
['Ș', 'S'],
['ț', 't'],
['Ț', 'T'],
['ţ', 't'],
['Ţ', 'T'],
# Turkish
['ş', 's'],
['Ş', 'S'],
['ç', 'c'],
['Ç', 'C'],
['ğ', 'g'],
['Ğ', 'G'],
['ı', 'i'],
['İ', 'I'],
# Armenian
['ա', 'a'],
['Ա', 'A'],
['բ', 'b'],
['Բ', 'B'],
['գ', 'g'],
['Գ', 'G'],
['դ', 'd'],
['Դ', 'D'],
['ե', 'ye'],
['Ե', 'Ye'],
['զ', 'z'],
['Զ', 'Z'],
['է', 'e'],
['Է', 'E'],
['ը', 'y'],
['Ը', 'Y'],
['թ', 't'],
['Թ', 'T'],
['ժ', 'zh'],
['Ժ', 'Zh'],
['ի', 'i'],
['Ի', 'I'],
['լ', 'l'],
['Լ', 'L'],
['խ', 'kh'],
['Խ', 'Kh'],
['ծ', 'ts'],
['Ծ', 'Ts'],
['կ', 'k'],
['Կ', 'K'],
['հ', 'h'],
['Հ', 'H'],
['ձ', 'dz'],
['Ձ', 'Dz'],
['ղ', 'gh'],
['Ղ', 'Gh'],
['ճ', 'tch'],
['Ճ', 'Tch'],
['մ', 'm'],
['Մ', 'M'],
['յ', 'y'],
['Յ', 'Y'],
['ն', 'n'],
['Ն', 'N'],
['շ', 'sh'],
['Շ', 'Sh'],
['ո', 'vo'],
['Ո', 'Vo'],
['չ', 'ch'],
['Չ', 'Ch'],
['պ', 'p'],
['Պ', 'P'],
['ջ', 'j'],
['Ջ', 'J'],
['ռ', 'r'],
['Ռ', 'R'],
['ս', 's'],
['Ս', 'S'],
['վ', 'v'],
['Վ', 'V'],
['տ', 't'],
['Տ', 'T'],
['ր', 'r'],
['Ր', 'R'],
['ց', 'c'],
['Ց', 'C'],
['ու', 'u'],
['ՈՒ', 'U'],
['Ու', 'U'],
['փ', 'p'],
['Փ', 'P'],
['ք', 'q'],
['Ք', 'Q'],
['օ', 'o'],
['Օ', 'O'],
['ֆ', 'f'],
['Ֆ', 'F'],
['և', 'yev'],
# Georgian
['ა', 'a'],
['ბ', 'b'],
['გ', 'g'],
['დ', 'd'],
['ე', 'e'],
['ვ', 'v'],
['ზ', 'z'],
['თ', 't'],
['ი', 'i'],
['კ', 'k'],
['ლ', 'l'],
['მ', 'm'],
['ნ', 'n'],
['ო', 'o'],
['პ', 'p'],
['ჟ', 'zh'],
['რ', 'r'],
['ს', 's'],
['ტ', 't'],
['უ', 'u'],
['ფ', 'ph'],
['ქ', 'q'],
['ღ', 'gh'],
['ყ', 'k'],
['შ', 'sh'],
['ჩ', 'ch'],
['ც', 'ts'],
['ძ', 'dz'],
['წ', 'ts'],
['ჭ', 'tch'],
['ხ', 'kh'],
['ჯ', 'j'],
['ჰ', 'h'],
# Czech
['č', 'c'],
['ď', 'd'],
['ě', 'e'],
['ň', 'n'],
['ř', 'r'],
['š', 's'],
['ť', 't'],
['ů', 'u'],
['ž', 'z'],
['Č', 'C'],
['Ď', 'D'],
['Ě', 'E'],
['Ň', 'N'],
['Ř', 'R'],
['Š', 'S'],
['Ť', 'T'],
['Ů', 'U'],
['Ž', 'Z'],
# Dhivehi
['ހ', 'h'],
['ށ', 'sh'],
['ނ', 'n'],
['ރ', 'r'],
['ބ', 'b'],
['ޅ', 'lh'],
['ކ', 'k'],
['އ', 'a'],
['ވ', 'v'],
['މ', 'm'],
['ފ', 'f'],
['ދ', 'dh'],
['ތ', 'th'],
['ލ', 'l'],
['ގ', 'g'],
['ޏ', 'gn'],
['ސ', 's'],
['ޑ', 'd'],
['ޒ', 'z'],
['ޓ', 't'],
['ޔ', 'y'],
['ޕ', 'p'],
['ޖ', 'j'],
['ޗ', 'ch'],
['ޘ', 'tt'],
['ޙ', 'hh'],
['ޚ', 'kh'],
['ޛ', 'th'],
['ޜ', 'z'],
['ޝ', 'sh'],
['ޞ', 's'],
['ޟ', 'd'],
['ޠ', 't'],
['ޡ', 'z'],
['ޢ', 'a'],
['ޣ', 'gh'],
['ޤ', 'q'],
['ޥ', 'w'],
['ަ', 'a'],
['ާ', 'aa'],
['ި', 'i'],
['ީ', 'ee'],
['ު', 'u'],
['ޫ', 'oo'],
['ެ', 'e'],
['ޭ', 'ey'],
['ޮ', 'o'],
['ޯ', 'oa'],
['ް', ''],
# Greek
['α', 'a'],
['β', 'v'],
['γ', 'g'],
['δ', 'd'],
['ε', 'e'],
['ζ', 'z'],
['η', 'i'],
['θ', 'th'],
['ι', 'i'],
['κ', 'k'],
['λ', 'l'],
['μ', 'm'],
['ν', 'n'],
['ξ', 'ks'],
['ο', 'o'],
['π', 'p'],
['ρ', 'r'],
['σ', 's'],
['τ', 't'],
['υ', 'y'],
['φ', 'f'],
['χ', 'x'],
['ψ', 'ps'],
['ω', 'o'],
['ά', 'a'],
['έ', 'e'],
['ί', 'i'],
['ό', 'o'],
['ύ', 'y'],
['ή', 'i'],
['ώ', 'o'],
['ς', 's'],
['ϊ', 'i'],
['ΰ', 'y'],
['ϋ', 'y'],
['ΐ', 'i'],
['Α', 'A'],
['Β', 'B'],
['Γ', 'G'],
['Δ', 'D'],
['Ε', 'E'],
['Ζ', 'Z'],
['Η', 'I'],
['Θ', 'TH'],
['Ι', 'I'],
['Κ', 'K'],
['Λ', 'L'],
['Μ', 'M'],
['Ν', 'N'],
['Ξ', 'KS'],
['Ο', 'O'],
['Π', 'P'],
['Ρ', 'R'],
['Σ', 'S'],
['Τ', 'T'],
['Υ', 'Y'],
['Φ', 'F'],
['Χ', 'X'],
['Ψ', 'PS'],
['Ω', 'O'],
['Ά', 'A'],
['Έ', 'E'],
['Ί', 'I'],
['Ό', 'O'],
['Ύ', 'Y'],
['Ή', 'I'],
['Ώ', 'O'],
['Ϊ', 'I'],
['Ϋ', 'Y'],
# Disabled as it conflicts with German and Latin.
# Hungarian
# ['ä', 'a'],
# ['Ä', 'A'],
# ['ö', 'o'],
# ['Ö', 'O'],
# ['ü', 'u'],
# ['Ü', 'U'],
# ['ű', 'u'],
# ['Ű', 'U'],
# Latvian
['ā', 'a'],
['ē', 'e'],
['ģ', 'g'],
['ī', 'i'],
['ķ', 'k'],
['ļ', 'l'],
['ņ', 'n'],
['ū', 'u'],
['Ā', 'A'],
['Ē', 'E'],
['Ģ', 'G'],
['Ī', 'I'],
['Ķ', 'K'],
['Ļ', 'L'],
['Ņ', 'N'],
['Ū', 'U'],
['č', 'c'],
['š', 's'],
['ž', 'z'],
['Č', 'C'],
['Š', 'S'],
['Ž', 'Z'],
# Lithuanian
['ą', 'a'],
['č', 'c'],
['ę', 'e'],
['ė', 'e'],
['į', 'i'],
['š', 's'],
['ų', 'u'],
['ū', 'u'],
['ž', 'z'],
['Ą', 'A'],
['Č', 'C'],
['Ę', 'E'],
['Ė', 'E'],
['Į', 'I'],
['Š', 'S'],
['Ų', 'U'],
['Ū', 'U'],
# Macedonian
['Ќ', 'Kj'],
['ќ', 'kj'],
['Љ', 'Lj'],
['љ', 'lj'],
['Њ', 'Nj'],
['њ', 'nj'],
['Тс', 'Ts'],
['тс', 'ts'],
# Polish
['ą', 'a'],
['ć', 'c'],
['ę', 'e'],
['ł', 'l'],
['ń', 'n'],
['ś', 's'],
['ź', 'z'],
['ż', 'z'],
['Ą', 'A'],
['Ć', 'C'],
['Ę', 'E'],
['Ł', 'L'],
['Ń', 'N'],
['Ś', 'S'],
['Ź', 'Z'],
['Ż', 'Z'],
# Disabled as it conflicts with Vietnamese.
# Serbian
# ['љ', 'lj'],
# ['њ', 'nj'],
# ['Љ', 'Lj'],
# ['Њ', 'Nj'],
# ['đ', 'dj'],
# ['Đ', 'Dj'],
# ['ђ', 'dj'],
# ['ј', 'j'],
# ['ћ', 'c'],
# ['џ', 'dz'],
# ['Ђ', 'Dj'],
# ['Ј', 'j'],
# ['Ћ', 'C'],
# ['Џ', 'Dz'],
# Disabled as it conflicts with German and Latin.
# Slovak
# ['ä', 'a'],
# ['Ä', 'A'],
# ['ľ', 'l'],
# ['ĺ', 'l'],
# ['ŕ', 'r'],
# ['Ľ', 'L'],
# ['Ĺ', 'L'],
# ['Ŕ', 'R'],
# Disabled as it conflicts with German and Latin.
# Swedish
# ['å', 'o'],
# ['Å', 'o'],
# ['ä', 'a'],
# ['Ä', 'A'],
# ['ë', 'e'],
# ['Ë', 'E'],
# ['ö', 'o'],
# ['Ö', 'O'],
# Ukrainian
['Є', 'Ye'],
['І', 'I'],
['Ї', 'Yi'],
['Ґ', 'G'],
['є', 'ye'],
['і', 'i'],
['ї', 'yi'],
['ґ', 'g'],
# Dutch
['IJ', 'IJ'],
['ij', 'ij'],
# Danish
# ['Æ', 'Ae'],
# ['Ø', 'Oe'],
# ['Å', 'Aa'],
# ['æ', 'ae'],
# ['ø', 'oe'],
# ['å', 'aa']
# Currencies
['¢', 'c'],
['¥', 'Y'],
['߿', 'b'],
['৳', 't'],
['૱', 'Bo'],
['฿', 'B'],
['₠', 'CE'],
['₡', 'C'],
['₢', 'Cr'],
['₣', 'F'],
['₥', 'm'],
['₦', 'N'],
['₧', 'Pt'],
['₨', 'Rs'],
['₩', 'W'],
['₫', 's'],
['€', 'E'],
['₭', 'K'],
['₮', 'T'],
['₯', 'Dp'],
['₰', 'S'],
['₱', 'P'],
['₲', 'G'],
['₳', 'A'],
['₴', 'S'],
['₵', 'C'],
['₶', 'tt'],
['₷', 'S'],
['₸', 'T'],
['₹', 'R'],
['₺', 'L'],
['₽', 'P'],
['₿', 'B'],
['﹩', '$'],
['¢', 'c'],
['¥', 'Y'],
['₩', 'W'],
# Latin
['𝐀', 'A'],
['𝐁', 'B'],
['𝐂', 'C'],
['𝐃', 'D'],
['𝐄', 'E'],
['𝐅', 'F'],
['𝐆', 'G'],
['𝐇', 'H'],
['𝐈', 'I'],
['𝐉', 'J'],
['𝐊', 'K'],
['𝐋', 'L'],
['𝐌', 'M'],
['𝐍', 'N'],
['𝐎', 'O'],
['𝐏', 'P'],
['𝐐', 'Q'],
['𝐑', 'R'],
['𝐒', 'S'],
['𝐓', 'T'],
['𝐔', 'U'],
['𝐕', 'V'],
['𝐖', 'W'],
['𝐗', 'X'],
['𝐘', 'Y'],
['𝐙', 'Z'],
['𝐚', 'a'],
['𝐛', 'b'],
['𝐜', 'c'],
['𝐝', 'd'],
['𝐞', 'e'],
['𝐟', 'f'],
['𝐠', 'g'],
['𝐡', 'h'],
['𝐢', 'i'],
['𝐣', 'j'],
['𝐤', 'k'],
['𝐥', 'l'],
['𝐦', 'm'],
['𝐧', 'n'],
['𝐨', 'o'],
['𝐩', 'p'],
['𝐪', 'q'],
['𝐫', 'r'],
['𝐬', 's'],
['𝐭', 't'],
['𝐮', 'u'],
['𝐯', 'v'],
['𝐰', 'w'],
['𝐱', 'x'],
['𝐲', 'y'],
['𝐳', 'z'],
['𝐴', 'A'],
['𝐵', 'B'],
['𝐶', 'C'],
['𝐷', 'D'],
['𝐸', 'E'],
['𝐹', 'F'],
['𝐺', 'G'],
['𝐻', 'H'],
['𝐼', 'I'],
['𝐽', 'J'],
['𝐾', 'K'],
['𝐿', 'L'],
['𝑀', 'M'],
['𝑁', 'N'],
['𝑂', 'O'],
['𝑃', 'P'],
['𝑄', 'Q'],
['𝑅', 'R'],
['𝑆', 'S'],
['𝑇', 'T'],
['𝑈', 'U'],
['𝑉', 'V'],
['𝑊', 'W'],
['𝑋', 'X'],
['𝑌', 'Y'],
['𝑍', 'Z'],
['𝑎', 'a'],
['𝑏', 'b'],
['𝑐', 'c'],
['𝑑', 'd'],
['𝑒', 'e'],
['𝑓', 'f'],
['𝑔', 'g'],
['𝑖', 'i'],
['𝑗', 'j'],
['𝑘', 'k'],
['𝑙', 'l'],
['𝑚', 'm'],
['𝑛', 'n'],
['𝑜', 'o'],
['𝑝', 'p'],
['𝑞', 'q'],
['𝑟', 'r'],
['𝑠', 's'],
['𝑡', 't'],
['𝑢', 'u'],
['𝑣', 'v'],
['𝑤', 'w'],
['𝑥', 'x'],
['𝑦', 'y'],
['𝑧', 'z'],
['𝑨', 'A'],
['𝑩', 'B'],
['𝑪', 'C'],
['𝑫', 'D'],
['𝑬', 'E'],
['𝑭', 'F'],
['𝑮', 'G'],
['𝑯', 'H'],
['𝑰', 'I'],
['𝑱', 'J'],
['𝑲', 'K'],
['𝑳', 'L'],
['𝑴', 'M'],
['𝑵', 'N'],
['𝑶', 'O'],
['𝑷', 'P'],
['𝑸', 'Q'],
['𝑹', 'R'],
['𝑺', 'S'],
['𝑻', 'T'],
['𝑼', 'U'],
['𝑽', 'V'],
['𝑾', 'W'],
['𝑿', 'X'],
['𝒀', 'Y'],
['𝒁', 'Z'],
['𝒂', 'a'],
['𝒃', 'b'],
['𝒄', 'c'],
['𝒅', 'd'],
['𝒆', 'e'],
['𝒇', 'f'],
['𝒈', 'g'],
['𝒉', 'h'],
['𝒊', 'i'],
['𝒋', 'j'],
['𝒌', 'k'],
['𝒍', 'l'],
['𝒎', 'm'],
['𝒏', 'n'],
['𝒐', 'o'],
['𝒑', 'p'],
['𝒒', 'q'],
['𝒓', 'r'],
['𝒔', 's'],
['𝒕', 't'],
['𝒖', 'u'],
['𝒗', 'v'],
['𝒘', 'w'],
['𝒙', 'x'],
['𝒚', 'y'],
['𝒛', 'z'],
['𝒜', 'A'],
['𝒞', 'C'],
['𝒟', 'D'],
['𝒢', 'g'],
['𝒥', 'J'],
['𝒦', 'K'],
['𝒩', 'N'],
['𝒪', 'O'],
['𝒫', 'P'],
['𝒬', 'Q'],
['𝒮', 'S'],
['𝒯', 'T'],
['𝒰', 'U'],
['𝒱', 'V'],
['𝒲', 'W'],
['𝒳', 'X'],
['𝒴', 'Y'],
['𝒵', 'Z'],
['𝒶', 'a'],
['𝒷', 'b'],
['𝒸', 'c'],
['𝒹', 'd'],
['𝒻', 'f'],
['𝒽', 'h'],
['𝒾', 'i'],
['𝒿', 'j'],
['𝓀', 'h'],
['𝓁', 'l'],
['𝓂', 'm'],
['𝓃', 'n'],
['𝓅', 'p'],
['𝓆', 'q'],
['𝓇', 'r'],
['𝓈', 's'],
['𝓉', 't'],
['𝓊', 'u'],
['𝓋', 'v'],
['𝓌', 'w'],
['𝓍', 'x'],
['𝓎', 'y'],
['𝓏', 'z'],
['𝓐', 'A'],
['𝓑', 'B'],
['𝓒', 'C'],
['𝓓', 'D'],
['𝓔', 'E'],
['𝓕', 'F'],
['𝓖', 'G'],
['𝓗', 'H'],
['𝓘', 'I'],
['𝓙', 'J'],
['𝓚', 'K'],
['𝓛', 'L'],
['𝓜', 'M'],
['𝓝', 'N'],
['𝓞', 'O'],
['𝓟', 'P'],
['𝓠', 'Q'],
['𝓡', 'R'],
['𝓢', 'S'],
['𝓣', 'T'],
['𝓤', 'U'],
['𝓥', 'V'],
['𝓦', 'W'],
['𝓧', 'X'],
['𝓨', 'Y'],
['𝓩', 'Z'],
['𝓪', 'a'],
['𝓫', 'b'],
['𝓬', 'c'],
['𝓭', 'd'],
['𝓮', 'e'],
['𝓯', 'f'],
['𝓰', 'g'],
['𝓱', 'h'],
['𝓲', 'i'],
['𝓳', 'j'],
['𝓴', 'k'],
['𝓵', 'l'],
['𝓶', 'm'],
['𝓷', 'n'],
['𝓸', 'o'],
['𝓹', 'p'],
['𝓺', 'q'],
['𝓻', 'r'],
['𝓼', 's'],
['𝓽', 't'],
['𝓾', 'u'],
['𝓿', 'v'],
['𝔀', 'w'],
['𝔁', 'x'],
['𝔂', 'y'],
['𝔃', 'z'],
['𝔄', 'A'],
['𝔅', 'B'],
['𝔇', 'D'],
['𝔈', 'E'],
['𝔉', 'F'],
['𝔊', 'G'],
['𝔍', 'J'],
['𝔎', 'K'],
['𝔏', 'L'],
['𝔐', 'M'],
['𝔑', 'N'],
['𝔒', 'O'],
['𝔓', 'P'],
['𝔔', 'Q'],
['𝔖', 'S'],
['𝔗', 'T'],
['𝔘', 'U'],
['𝔙', 'V'],
['𝔚', 'W'],
['𝔛', 'X'],
['𝔜', 'Y'],
['𝔞', 'a'],
['𝔟', 'b'],
['𝔠', 'c'],
['𝔡', 'd'],
['𝔢', 'e'],
['𝔣', 'f'],
['𝔤', 'g'],
['𝔥', 'h'],
['𝔦', 'i'],
['𝔧', 'j'],
['𝔨', 'k'],
['𝔩', 'l'],
['𝔪', 'm'],
['𝔫', 'n'],
['𝔬', 'o'],
['𝔭', 'p'],
['𝔮', 'q'],
['𝔯', 'r'],
['𝔰', 's'],
['𝔱', 't'],
['𝔲', 'u'],
['𝔳', 'v'],
['𝔴', 'w'],
['𝔵', 'x'],
['𝔶', 'y'],
['𝔷', 'z'],
['𝔸', 'A'],
['𝔹', 'B'],
['𝔻', 'D'],
['𝔼', 'E'],
['𝔽', 'F'],
['𝔾', 'G'],
['𝕀', 'I'],
['𝕁', 'J'],
['𝕂', 'K'],
['𝕃', 'L'],
['𝕄', 'M'],
['𝕆', 'N'],
['𝕊', 'S'],
['𝕋', 'T'],
['𝕌', 'U'],
['𝕍', 'V'],
['𝕎', 'W'],
['𝕏', 'X'],
['𝕐', 'Y'],
['𝕒', 'a'],
['𝕓', 'b'],
['𝕔', 'c'],
['𝕕', 'd'],
['𝕖', 'e'],
['𝕗', 'f'],
['𝕘', 'g'],
['𝕙', 'h'],
['𝕚', 'i'],
['𝕛', 'j'],
['𝕜', 'k'],
['𝕝', 'l'],
['𝕞', 'm'],
['𝕟', 'n'],
['𝕠', 'o'],
['𝕡', 'p'],
['𝕢', 'q'],
['𝕣', 'r'],
['𝕤', 's'],
['𝕥', 't'],
['𝕦', 'u'],
['𝕧', 'v'],
['𝕨', 'w'],
['𝕩', 'x'],
['𝕪', 'y'],
['𝕫', 'z'],
['𝕬', 'A'],
['𝕭', 'B'],
['𝕮', 'C'],
['𝕯', 'D'],
['𝕰', 'E'],
['𝕱', 'F'],
['𝕲', 'G'],
['𝕳', 'H'],
['𝕴', 'I'],
['𝕵', 'J'],
['𝕶', 'K'],
['𝕷', 'L'],
['𝕸', 'M'],
['𝕹', 'N'],
['𝕺', 'O'],
['𝕻', 'P'],
['𝕼', 'Q'],
['𝕽', 'R'],
['𝕾', 'S'],
['𝕿', 'T'],
['𝖀', 'U'],
['𝖁', 'V'],
['𝖂', 'W'],
['𝖃', 'X'],
['𝖄', 'Y'],
['𝖅', 'Z'],
['𝖆', 'a'],
['𝖇', 'b'],
['𝖈', 'c'],
['𝖉', 'd'],
['𝖊', 'e'],
['𝖋', 'f'],
['𝖌', 'g'],
['𝖍', 'h'],
['𝖎', 'i'],
['𝖏', 'j'],
['𝖐', 'k'],
['𝖑', 'l'],
['𝖒', 'm'],
['𝖓', 'n'],
['𝖔', 'o'],
['𝖕', 'p'],
['𝖖', 'q'],
['𝖗', 'r'],
['𝖘', 's'],
['𝖙', 't'],
['𝖚', 'u'],
['𝖛', 'v'],
['𝖜', 'w'],
['𝖝', 'x'],
['𝖞', 'y'],
['𝖟', 'z'],
['𝖠', 'A'],
['𝖡', 'B'],
['𝖢', 'C'],
['𝖣', 'D'],
['𝖤', 'E'],
['𝖥', 'F'],
['𝖦', 'G'],
['𝖧', 'H'],
['𝖨', 'I'],
['𝖩', 'J'],
['𝖪', 'K'],
['𝖫', 'L'],
['𝖬', 'M'],
['𝖭', 'N'],
['𝖮', 'O'],
['𝖯', 'P'],
['𝖰', 'Q'],
['𝖱', 'R'],
['𝖲', 'S'],
['𝖳', 'T'],
['𝖴', 'U'],
['𝖵', 'V'],
['𝖶', 'W'],
['𝖷', 'X'],
['𝖸', 'Y'],
['𝖹', 'Z'],
['𝖺', 'a'],
['𝖻', 'b'],
['𝖼', 'c'],
['𝖽', 'd'],
['𝖾', 'e'],
['𝖿', 'f'],
['𝗀', 'g'],
['𝗁', 'h'],
['𝗂', 'i'],
['𝗃', 'j'],
['𝗄', 'k'],
['𝗅', 'l'],
['𝗆', 'm'],
['𝗇', 'n'],
['𝗈', 'o'],
['𝗉', 'p'],
['𝗊', 'q'],
['𝗋', 'r'],
['𝗌', 's'],
['𝗍', 't'],
['𝗎', 'u'],
['𝗏', 'v'],
['𝗐', 'w'],
['𝗑', 'x'],
['𝗒', 'y'],
['𝗓', 'z'],
['𝗔', 'A'],
['𝗕', 'B'],
['𝗖', 'C'],
['𝗗', 'D'],
['𝗘', 'E'],
['𝗙', 'F'],
['𝗚', 'G'],
['𝗛', 'H'],
['𝗜', 'I'],
['𝗝', 'J'],
['𝗞', 'K'],
['𝗟', 'L'],
['𝗠', 'M'],
['𝗡', 'N'],
['𝗢', 'O'],
['𝗣', 'P'],
['𝗤', 'Q'],
['𝗥', 'R'],
['𝗦', 'S'],
['𝗧', 'T'],
['𝗨', 'U'],
['𝗩', 'V'],
['𝗪', 'W'],
['𝗫', 'X'],
['𝗬', 'Y'],
['𝗭', 'Z'],
['𝗮', 'a'],
['𝗯', 'b'],
['𝗰', 'c'],
['𝗱', 'd'],
['𝗲', 'e'],
['𝗳', 'f'],
['𝗴', 'g'],
['𝗵', 'h'],
['𝗶', 'i'],
['𝗷', 'j'],
['𝗸', 'k'],
['𝗹', 'l'],
['𝗺', 'm'],
['𝗻', 'n'],
['𝗼', 'o'],
['𝗽', 'p'],
['𝗾', 'q'],
['𝗿', 'r'],
['𝘀', 's'],
['𝘁', 't'],
['𝘂', 'u'],
['𝘃', 'v'],
['𝘄', 'w'],
['𝘅', 'x'],
['𝘆', 'y'],
['𝘇', 'z'],
['𝘈', 'A'],
['𝘉', 'B'],
['𝘊', 'C'],
['𝘋', 'D'],
['𝘌', 'E'],
['𝘍', 'F'],
['𝘎', 'G'],
['𝘏', 'H'],
['𝘐', 'I'],
['𝘑', 'J'],
['𝘒', 'K'],
['𝘓', 'L'],
['𝘔', 'M'],
['𝘕', 'N'],
['𝘖', 'O'],
['𝘗', 'P'],
['𝘘', 'Q'],
['𝘙', 'R'],
['𝘚', 'S'],
['𝘛', 'T'],
['𝘜', 'U'],
['𝘝', 'V'],
['𝘞', 'W'],
['𝘟', 'X'],
['𝘠', 'Y'],
['𝘡', 'Z'],
['𝘢', 'a'],
['𝘣', 'b'],
['𝘤', 'c'],
['𝘥', 'd'],
['𝘦', 'e'],
['𝘧', 'f'],
['𝘨', 'g'],
['𝘩', 'h'],
['𝘪', 'i'],
['𝘫', 'j'],
['𝘬', 'k'],
['𝘭', 'l'],
['𝘮', 'm'],
['𝘯', 'n'],
['𝘰', 'o'],
['𝘱', 'p'],
['𝘲', 'q'],
['𝘳', 'r'],
['𝘴', 's'],
['𝘵', 't'],
['𝘶', 'u'],
['𝘷', 'v'],
['𝘸', 'w'],
['𝘹', 'x'],
['𝘺', 'y'],
['𝘻', 'z'],
['𝘼', 'A'],
['𝘽', 'B'],
['𝘾', 'C'],
['𝘿', 'D'],
['𝙀', 'E'],
['𝙁', 'F'],
['𝙂', 'G'],
['𝙃', 'H'],
['𝙄', 'I'],
['𝙅', 'J'],
['𝙆', 'K'],
['𝙇', 'L'],
['𝙈', 'M'],
['𝙉', 'N'],
['𝙊', 'O'],
['𝙋', 'P'],
['𝙌', 'Q'],
['𝙍', 'R'],
['𝙎', 'S'],
['𝙏', 'T'],
['𝙐', 'U'],
['𝙑', 'V'],
['𝙒', 'W'],
['𝙓', 'X'],
['𝙔', 'Y'],
['𝙕', 'Z'],
['𝙖', 'a'],
['𝙗', 'b'],
['𝙘', 'c'],
['𝙙', 'd'],
['𝙚', 'e'],
['𝙛', 'f'],
['𝙜', 'g'],
['𝙝', 'h'],
['𝙞', 'i'],
['𝙟', 'j'],
['𝙠', 'k'],
['𝙡', 'l'],
['𝙢', 'm'],
['𝙣', 'n'],
['𝙤', 'o'],
['𝙥', 'p'],
['𝙦', 'q'],
['𝙧', 'r'],
['𝙨', 's'],
['𝙩', 't'],
['𝙪', 'u'],
['𝙫', 'v'],
['𝙬', 'w'],
['𝙭', 'x'],
['𝙮', 'y'],
['𝙯', 'z'],
['𝙰', 'A'],
['𝙱', 'B'],
['𝙲', 'C'],
['𝙳', 'D'],
['𝙴', 'E'],
['𝙵', 'F'],
['𝙶', 'G'],
['𝙷', 'H'],
['𝙸', 'I'],
['𝙹', 'J'],
['𝙺', 'K'],
['𝙻', 'L'],
['𝙼', 'M'],
['𝙽', 'N'],
['𝙾', 'O'],
['𝙿', 'P'],
['𝚀', 'Q'],
['𝚁', 'R'],
['𝚂', 'S'],
['𝚃', 'T'],
['𝚄', 'U'],
['𝚅', 'V'],
['𝚆', 'W'],
['𝚇', 'X'],
['𝚈', 'Y'],
['𝚉', 'Z'],
['𝚊', 'a'],
['𝚋', 'b'],
['𝚌', 'c'],
['𝚍', 'd'],
['𝚎', 'e'],
['𝚏', 'f'],
['𝚐', 'g'],
['𝚑', 'h'],
['𝚒', 'i'],
['𝚓', 'j'],
['𝚔', 'k'],
['𝚕', 'l'],
['𝚖', 'm'],
['𝚗', 'n'],
['𝚘', 'o'],
['𝚙', 'p'],
['𝚚', 'q'],
['𝚛', 'r'],
['𝚜', 's'],
['𝚝', 't'],
['𝚞', 'u'],
['𝚟', 'v'],
['𝚠', 'w'],
['𝚡', 'x'],
['𝚢', 'y'],
['𝚣', 'z'],
# Dotless letters
['𝚤', 'l'],
['𝚥', 'j'],
# Greek
['𝛢', 'A'],
['𝛣', 'B'],
['𝛤', 'G'],
['𝛥', 'D'],
['𝛦', 'E'],
['𝛧', 'Z'],
['𝛨', 'I'],
['𝛩', 'TH'],
['𝛪', 'I'],
['𝛫', 'K'],
['𝛬', 'L'],
['𝛭', 'M'],
['𝛮', 'N'],
['𝛯', 'KS'],
['𝛰', 'O'],
['𝛱', 'P'],
['𝛲', 'R'],
['𝛳', 'TH'],
['𝛴', 'S'],
['𝛵', 'T'],
['𝛶', 'Y'],
['𝛷', 'F'],
['𝛸', 'x'],
['𝛹', 'PS'],
['𝛺', 'O'],
['𝛻', 'D'],
['𝛼', 'a'],
['𝛽', 'b'],
['𝛾', 'g'],
['𝛿', 'd'],
['𝜀', 'e'],
['𝜁', 'z'],
['𝜂', 'i'],
['𝜃', 'th'],
['𝜄', 'i'],
['𝜅', 'k'],
['𝜆', 'l'],
['𝜇', 'm'],
['𝜈', 'n'],
['𝜉', 'ks'],
['𝜊', 'o'],
['𝜋', 'p'],
['𝜌', 'r'],
['𝜍', 's'],
['𝜎', 's'],
['𝜏', 't'],
['𝜐', 'y'],
['𝜑', 'f'],
['𝜒', 'x'],
['𝜓', 'ps'],
['𝜔', 'o'],
['𝜕', 'd'],
['𝜖', 'E'],
['𝜗', 'TH'],
['𝜘', 'K'],
['𝜙', 'f'],
['𝜚', 'r'],
['𝜛', 'p'],
['𝜜', 'A'],
['𝜝', 'V'],
['𝜞', 'G'],
['𝜟', 'D'],
['𝜠', 'E'],
['𝜡', 'Z'],
['𝜢', 'I'],
['𝜣', 'TH'],
['𝜤', 'I'],
['𝜥', 'K'],
['𝜦', 'L'],
['𝜧', 'M'],
['𝜨', 'N'],
['𝜩', 'KS'],
['𝜪', 'O'],
['𝜫', 'P'],
['𝜬', 'S'],
['𝜭', 'TH'],
['𝜮', 'S'],
['𝜯', 'T'],
['𝜰', 'Y'],
['𝜱', 'F'],
['𝜲', 'X'],
['𝜳', 'PS'],
['𝜴', 'O'],
['𝜵', 'D'],
['𝜶', 'a'],
['𝜷', 'v'],
['𝜸', 'g'],
['𝜹', 'd'],
['𝜺', 'e'],
['𝜻', 'z'],
['𝜼', 'i'],
['𝜽', 'th'],
['𝜾', 'i'],
['𝜿', 'k'],
['𝝀', 'l'],
['𝝁', 'm'],
['𝝂', 'n'],
['𝝃', 'ks'],
['𝝄', 'o'],
['𝝅', 'p'],
['𝝆', 'r'],
['𝝇', 's'],
['𝝈', 's'],
['𝝉', 't'],
['𝝊', 'y'],
['𝝋', 'f'],
['𝝌', 'x'],
['𝝍', 'ps'],
['𝝎', 'o'],
['𝝏', 'a'],
['𝝐', 'e'],
['𝝑', 'i'],
['𝝒', 'k'],
['𝝓', 'f'],
['𝝔', 'r'],
['𝝕', 'p'],
['𝝖', 'A'],
['𝝗', 'B'],
['𝝘', 'G'],
['𝝙', 'D'],
['𝝚', 'E'],
['𝝛', 'Z'],
['𝝜', 'I'],
['𝝝', 'TH'],
['𝝞', 'I'],
['𝝟', 'K'],
['𝝠', 'L'],
['𝝡', 'M'],
['𝝢', 'N'],
['𝝣', 'KS'],
['𝝤', 'O'],
['𝝥', 'P'],
['𝝦', 'R'],
['𝝧', 'TH'],
['𝝨', 'S'],
['𝝩', 'T'],
['𝝪', 'Y'],
['𝝫', 'F'],
['𝝬', 'X'],
['𝝭', 'PS'],
['𝝮', 'O'],
['𝝯', 'D'],
['𝝰', 'a'],
['𝝱', 'v'],
['𝝲', 'g'],
['𝝳', 'd'],
['𝝴', 'e'],
['𝝵', 'z'],
['𝝶', 'i'],
['𝝷', 'th'],
['𝝸', 'i'],
['𝝹', 'k'],
['𝝺', 'l'],
['𝝻', 'm'],
['𝝼', 'n'],
['𝝽', 'ks'],
['𝝾', 'o'],
['𝝿', 'p'],
['𝞀', 'r'],
['𝞁', 's'],
['𝞂', 's'],
['𝞃', 't'],
['𝞄', 'y'],
['𝞅', 'f'],
['𝞆', 'x'],
['𝞇', 'ps'],
['𝞈', 'o'],
['𝞉', 'a'],
['𝞊', 'e'],
['𝞋', 'i'],
['𝞌', 'k'],
['𝞍', 'f'],
['𝞎', 'r'],
['𝞏', 'p'],
['𝞐', 'A'],
['𝞑', 'V'],
['𝞒', 'G'],
['𝞓', 'D'],
['𝞔', 'E'],
['𝞕', 'Z'],
['𝞖', 'I'],
['𝞗', 'TH'],
['𝞘', 'I'],
['𝞙', 'K'],
['𝞚', 'L'],
['𝞛', 'M'],
['𝞜', 'N'],
['𝞝', 'KS'],
['𝞞', 'O'],
['𝞟', 'P'],
['𝞠', 'S'],
['𝞡', 'TH'],
['𝞢', 'S'],
['𝞣', 'T'],
['𝞤', 'Y'],
['𝞥', 'F'],
['𝞦', 'X'],
['𝞧', 'PS'],
['𝞨', 'O'],
['𝞩', 'D'],
['𝞪', 'av'],
['𝞫', 'g'],
['𝞬', 'd'],
['𝞭', 'e'],
['𝞮', 'z'],
['𝞯', 'i'],
['𝞰', 'i'],
['𝞱', 'th'],
['𝞲', 'i'],
['𝞳', 'k'],
['𝞴', 'l'],
['𝞵', 'm'],
['𝞶', 'n'],
['𝞷', 'ks'],
['𝞸', 'o'],
['𝞹', 'p'],
['𝞺', 'r'],
['𝞻', 's'],
['𝞼', 's'],
['𝞽', 't'],
['𝞾', 'y'],
['𝞿', 'f'],
['𝟀', 'x'],
['𝟁', 'ps'],
['𝟂', 'o'],
['𝟃', 'a'],
['𝟄', 'e'],
['𝟅', 'i'],
['𝟆', 'k'],
['𝟇', 'f'],
['𝟈', 'r'],
['𝟉', 'p'],
['𝟊', 'F'],
['𝟋', 'f'],
['⒜', '(a)'],
['⒝', '(b)'],
['⒞', '(c)'],
['⒟', '(d)'],
['⒠', '(e)'],
['⒡', '(f)'],
['⒢', '(g)'],
['⒣', '(h)'],
['⒤', '(i)'],
['⒥', '(j)'],
['⒦', '(k)'],
['⒧', '(l)'],
['⒨', '(m)'],
['⒩', '(n)'],
['⒪', '(o)'],
['⒫', '(p)'],
['⒬', '(q)'],
['⒭', '(r)'],
['⒮', '(s)'],
['⒯', '(t)'],
['⒰', '(u)'],
['⒱', '(v)'],
['⒲', '(w)'],
['⒳', '(x)'],
['⒴', '(y)'],
['⒵', '(z)'],
['Ⓐ', '(A)'],
['Ⓑ', '(B)'],
['Ⓒ', '(C)'],
['Ⓓ', '(D)'],
['Ⓔ', '(E)'],
['Ⓕ', '(F)'],
['Ⓖ', '(G)'],
['Ⓗ', '(H)'],
['Ⓘ', '(I)'],
['Ⓙ', '(J)'],
['Ⓚ', '(K)'],
['Ⓛ', '(L)'],
['Ⓝ', '(N)'],
['Ⓞ', '(O)'],
['Ⓟ', '(P)'],
['Ⓠ', '(Q)'],
['Ⓡ', '(R)'],
['Ⓢ', '(S)'],
['Ⓣ', '(T)'],
['Ⓤ', '(U)'],
['Ⓥ', '(V)'],
['Ⓦ', '(W)'],
['Ⓧ', '(X)'],
['Ⓨ', '(Y)'],
['Ⓩ', '(Z)'],
['ⓐ', '(a)'],
['ⓑ', '(b)'],
['ⓒ', '(b)'],
['ⓓ', '(c)'],
['ⓔ', '(e)'],
['ⓕ', '(f)'],
['ⓖ', '(g)'],
['ⓗ', '(h)'],
['ⓘ', '(i)'],
['ⓙ', '(j)'],
['ⓚ', '(k)'],
['ⓛ', '(l)'],
['ⓜ', '(m)'],
['ⓝ', '(n)'],
['ⓞ', '(o)'],
['ⓟ', '(p)'],
['ⓠ', '(q)'],
['ⓡ', '(r)'],
['ⓢ', '(s)'],
['ⓣ', '(t)'],
['ⓤ', '(u)'],
['ⓥ', '(v)'],
['ⓦ', '(w)'],
['ⓧ', '(x)'],
['ⓨ', '(y)'],
['ⓩ', '(z)'],
# Numbers
['𝟎', '0'],
['𝟏', '1'],
['𝟐', '2'],
['𝟑', '3'],
['𝟒', '4'],
['𝟓', '5'],
['𝟔', '6'],
['𝟕', '7'],
['𝟖', '8'],
['𝟗', '9'],
['𝟘', '0'],
['𝟙', '1'],
['𝟚', '2'],
['𝟛', '3'],
['𝟜', '4'],
['𝟝', '5'],
['𝟞', '6'],
['𝟟', '7'],
['𝟠', '8'],
['𝟡', '9'],
['𝟢', '0'],
['𝟣', '1'],
['𝟤', '2'],
['𝟥', '3'],
['𝟦', '4'],
['𝟧', '5'],
['𝟨', '6'],
['𝟩', '7'],
['𝟪', '8'],
['𝟫', '9'],
['𝟬', '0'],
['𝟭', '1'],
['𝟮', '2'],
['𝟯', '3'],
['𝟰', '4'],
['𝟱', '5'],
['𝟲', '6'],
['𝟳', '7'],
['𝟴', '8'],
['𝟵', '9'],
['𝟶', '0'],
['𝟷', '1'],
['𝟸', '2'],
['𝟹', '3'],
['𝟺', '4'],
['𝟻', '5'],
['𝟼', '6'],
['𝟽', '7'],
['𝟾', '8'],
['𝟿', '9'],
['①', '1'],
['②', '2'],
['③', '3'],
['④', '4'],
['⑤', '5'],
['⑥', '6'],
['⑦', '7'],
['⑧', '8'],
['⑨', '9'],
['⑩', '10'],
['⑪', '11'],
['⑫', '12'],
['⑬', '13'],
['⑭', '14'],
['⑮', '15'],
['⑯', '16'],
['⑰', '17'],
['⑱', '18'],
['⑲', '19'],
['⑳', '20'],
['⑴', '1'],
['⑵', '2'],
['⑶', '3'],
['⑷', '4'],
['⑸', '5'],
['⑹', '6'],
['⑺', '7'],
['⑻', '8'],
['⑼', '9'],
['⑽', '10'],
['⑾', '11'],
['⑿', '12'],
['⒀', '13'],
['⒁', '14'],
['⒂', '15'],
['⒃', '16'],
['⒄', '17'],
['⒅', '18'],
['⒆', '19'],
['⒇', '20'],
['⒈', '1.'],
['⒉', '2.'],
['⒊', '3.'],
['⒋', '4.'],
['⒌', '5.'],
['⒍', '6.'],
['⒎', '7.'],
['⒏', '8.'],
['⒐', '9.'],
['⒑', '10.'],
['⒒', '11.'],
['⒓', '12.'],
['⒔', '13.'],
['⒕', '14.'],
['⒖', '15.'],
['⒗', '16.'],
['⒘', '17.'],
['⒙', '18.'],
['⒚', '19.'],
['⒛', '20.'],
['⓪', '0'],
['⓫', '11'],
['⓬', '12'],
['⓭', '13'],
['⓮', '14'],
['⓯', '15'],
['⓰', '16'],
['⓱', '17'],
['⓲', '18'],
['⓳', '19'],
['⓴', '20'],
['⓵', '1'],
['⓶', '2'],
['⓷', '3'],
['⓸', '4'],
['⓹', '5'],
['⓺', '6'],
['⓻', '7'],
['⓼', '8'],
['⓽', '9'],
['⓾', '10'],
['⓿', '0'],
# Punctuation
['🙰', '&'],
['🙱', '&'],
['🙲', '&'],
['🙳', '&'],
['🙴', '&'],
['🙵', '&'],
['🙶', '"'],
['🙷', '"'],
['🙸', '"'],
['‽', '?!'],
['🙹', '?!'],
['🙺', '?!'],
['🙻', '?!'],
['🙼', '/'],
['🙽', '\\'],
# Alchemy
['🜇', 'AR'],
['🜈', 'V'],
['🜉', 'V'],
['🜆', 'VR'],
['🜅', 'VF'],
['🜩', '2'],
['🜪', '5'],
['🝡', 'f'],
['🝢', 'W'],
['🝣', 'U'],
['🝧', 'V'],
['🝨', 'T'],
['🝪', 'V'],
['🝫', 'MB'],
['🝬', 'VB'],
['🝲', '3B'],
['🝳', '3B'],
# Emojis
['💯', '100'],
['🔙', 'BACK'],
['🔚', 'END'],
['🔛', 'ON!'],
['🔜', 'SOON'],
['🔝', 'TOP'],
['🔞', '18'],
['🔤', 'abc'],
['🔠', 'ABCD'],
['🔡', 'abcd'],
['🔢', '1234'],
['🔣', 'T&@%'],
['#️⃣', '#'],
['*️⃣', '*'],
['0️⃣', '0'],
['1️⃣', '1'],
['2️⃣', '2'],
['3️⃣', '3'],
['4️⃣', '4'],
['5️⃣', '5'],
['6️⃣', '6'],
['7️⃣', '7'],
['8️⃣', '8'],
['9️⃣', '9'],
['🔟', '10'],
['🅰️', 'A'],
['🅱️', 'B'],
['🆎', 'AB'],
['🆑', 'CL'],
['🅾️', 'O'],
['🅿', 'P'],
['🆘', 'SOS'],
['🅲', 'C'],
['🅳', 'D'],
['🅴', 'E'],
['🅵', 'F'],
['🅶', 'G'],
['🅷', 'H'],
['🅸', 'I'],
['🅹', 'J'],
['🅺', 'K'],
['🅻', 'L'],
['🅼', 'M'],
['🅽', 'N'],
['🆀', 'Q'],
['🆁', 'R'],
['🆂', 'S'],
['🆃', 'T'],
['🆄', 'U'],
['🆅', 'V'],
['🆆', 'W'],
['🆇', 'X'],
['🆈', 'Y'],
['🆉', 'Z'],
]
|
TensorFlow/Recommendation/WideAndDeep/scripts | scripts | DGX1_benchmark_training_amp_1gpu | #!/bin/bash
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -e
python -m trainer.task \
--benchmark_warmup_steps 500 \
--benchmark_steps 1000 \
--gpu \
--benchmark \
--amp
|
PyTorch/LanguageModeling/BART/bart/modeling | modeling | bert_attn | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
class BertSelfAttention(nn.Module):
def __init__(
self,
embed_dim,
num_heads,
dropout=0.0,
bias=True,
encoder_decoder_attention=False, # otherwise self_attention
):
def __init__(self, config):
super(BertSelfAttention, self).__init__()
if config.hidden_size % num_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (config.hidden_size, num_heads))
self.num_heads = num_heads
self.attention_head_size = int(config.hidden_size / num_heads)
self.all_head_size = self.num_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size)
self.key = nn.Linear(config.hidden_size, self.all_head_size)
self.value = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_heads, self.attention_head_size)
x = torch.reshape(x, new_x_shape)
return x.permute(0, 2, 1, 3)
def transpose_key_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_heads, self.attention_head_size)
x = torch.reshape(x, new_x_shape)
return x.permute(0, 2, 3, 1)
def forward(self, hidden_states, attention_mask):
mixed_query_layer = self.query(hidden_states)
mixed_key_layer = self.key(hidden_states)
mixed_value_layer = self.value(hidden_states)
query_layer = self.transpose_for_scores(mixed_query_layer)
key_layer = self.transpose_key_for_scores(mixed_key_layer)
value_layer = self.transpose_for_scores(mixed_value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer)
attention_scores = attention_scores / math.sqrt(self.attention_head_size)
# Apply the attention mask is (precomputed for all layers in BertModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = F.softmax(attention_scores, dim=-1)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
context_layer = torch.reshape(context_layer, new_context_layer_shape)
return context_layer
|
PyTorch/Segmentation/nnUNet/triton/scripts/docker | docker | build | #!/usr/bin/env bash
# Copyright (c) 2021 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
docker build -t nnunet . -f Dockerfile-Triton
|
PyTorch/SpeechRecognition/Jasper/platform | platform | DGX2_Jasper_AMP_16GPU | #!/bin/bash
NUM_GPUS=16 AMP=true BATCH_SIZE=64 GRAD_ACCUMULATION_STEPS=1 bash scripts/train.sh "$@"
|
PyTorch/Classification/GPUNet | GPUNet | requirements | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
git+https://github.com/NVIDIA/[email protected]#egg=dllogger
timm==0.5.4
|
PyTorch/LanguageModeling/BERT/triton/dist6l/runner | runner | __main__ | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import pathlib
from typing import List
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from ...runner.config import Config
from ...runner.executor import Executor
from ...runner.finalizer import ExperimentFinalizer
from ...runner.maintainer import DockerMaintainer
from ...runner.preparer import ExperimentPreparer
from ...runner.runner_proxy import RunnerProxy
from .pipeline_impl import pipeline
class ExperimentRunner(RunnerProxy):
"""
Experiment Runner proxy for runner wrapper
"""
maintainer_cls = DockerMaintainer
executor_cls = Executor
preparer_cls = ExperimentPreparer
finalizer_cls = ExperimentFinalizer
def execute(config_path: str, devices: List[str]):
if len(devices) == 0:
devices = ["0"]
config = Config.from_file(config_path)
runner = ExperimentRunner(config=config, pipeline=pipeline, devices=devices)
runner.start()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--config-path", type=str, required=True, help="Path to configuration file with details.")
parser.add_argument(
"--devices", type=str, nargs="*", required=False, help="Path to configuration file with details."
)
args = parser.parse_args()
config_path = args.config_path
devices = args.devices
execute(config_path, devices) |
TensorFlow2/Recommendation/WideAndDeep/triton/deployment_toolkit/library | library | tf | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from pathlib import Path
from typing import Dict, Optional, Tuple, Union
# pytype: disable=import-error
import tensorflow as tf
from tensorflow.python.eager import wrap_function
from tf2onnx.shape_inference import infer_shape
from tf2onnx.tf_loader import freeze_session, inputs_without_resource, is_function, remove_redundant_inputs, tf_optimize
from ..args import filter_fn_args
from ..core import (
GET_MODEL_FN_NAME,
GET_SERVING_INPUT_RECEIVER_FN,
BaseLoader,
BaseRunner,
BaseRunnerSession,
BaseSaver,
ExportFormat,
Format,
Model,
ModelInputType,
TensorSpec,
load_from_file,
)
from ..extensions import loaders, runners, savers
# pytype: enable=import-error
LOGGER = logging.getLogger(__name__)
def is_tf2():
return tf.__version__.startswith("2.")
def create_session_config(*, allow_growth=False, use_xla=False, gpu_memory_fraction=1.0):
gpu_options = tf.compat.v1.GPUOptions(
per_process_gpu_memory_fraction=gpu_memory_fraction, allow_growth=allow_growth
)
config = tf.compat.v1.ConfigProto(gpu_options=gpu_options)
if use_xla:
config.graph_options.optimizer_options.global_jit_level = tf.OptimizerOptions.ON_1
LOGGER.debug(
f"Using gpu memory fraction: allow_growth={allow_growth} "
f"gpu_memory_fraction={gpu_memory_fraction} "
f"use_xla={use_xla}"
)
return config
def _from_saved_model_v1(sess, model_path, tag, signatures):
"""
Load tensorflow graph from saved_model.
NOTICE: Modified version from tf2onnx project
"""
wrn_no_tag = "'--tag' not specified for saved_model. Using --tag serve"
wrn_empty_tag = "'--tag' value is empty string. Using tag =[[]]"
if tag is None:
tag = [tf.saved_model.SERVING]
LOGGER.warning(wrn_no_tag)
if tag == "":
tag = [[]]
LOGGER.warning(wrn_empty_tag)
if not isinstance(tag, list):
tag = [tag]
imported = tf.compat.v1.saved_model.loader.load(sess, tag, model_path)
for k in imported.signature_def.keys():
if k.startswith("_"):
# consider signatures starting with '_' private
continue
signatures.append(k)
try:
from tensorflow.contrib.saved_model.python.saved_model import ( # pytype: disable=import-error
signature_def_utils,
)
def get_signature_def(meta_graph_def, k):
return signature_def_utils.get_signature_def_by_key(meta_graph_def, k)
except ImportError:
# TF1.12 changed the api
def get_signature_def(meta_graph_def, k):
return meta_graph_def.signature_def[k]
inputs = {}
outputs = {}
for k in signatures:
inputs_tensor_info = get_signature_def(imported, k).inputs
for name, input_tensor in inputs_tensor_info.items():
inputs[name] = input_tensor.name
outputs_tensor_info = get_signature_def(imported, k).outputs
for name, output_tensor in outputs_tensor_info.items():
outputs[name] = output_tensor.name
frozen_graph = freeze_session(sess, input_names=list(inputs.values()), output_names=list(outputs.values()))
return frozen_graph, inputs, outputs
class TFEstimatorLoader(BaseLoader):
required_fn_name_for_signature_parsing: Optional[str] = GET_MODEL_FN_NAME
def __init__(self, **kwargs):
self._model_args = kwargs
def load(self, model_path: Union[str, Path], **_) -> Model:
if isinstance(model_path, Path):
model_path = model_path.as_posix()
get_model = load_from_file(model_path, "model", GET_MODEL_FN_NAME)
get_serving_input_receiver_fn = load_from_file(model_path, "model", GET_SERVING_INPUT_RECEIVER_FN)
if get_model is None:
raise RuntimeError(f"Could not find {GET_MODEL_FN_NAME} in {model_path}")
if get_serving_input_receiver_fn is None:
raise RuntimeError(f"Could not find {GET_SERVING_INPUT_RECEIVER_FN} in {model_path}")
model_args = filter_fn_args(self._model_args, fn=get_model)
serving_input_receiver_args = filter_fn_args(self._model_args, fn=get_serving_input_receiver_fn)
session_config = create_session_config(allow_growth=True)
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session(config=session_config) as sess:
estimator = get_model(**model_args)
serving_input_receiver_fn = get_serving_input_receiver_fn(**serving_input_receiver_args)
input_receiver = serving_input_receiver_fn()
estimator_spec = estimator.model_fn(
features=input_receiver.features,
labels=None,
mode=tf.estimator.ModeKeys.PREDICT,
config=estimator.config,
)
input_tensors_dict = input_receiver.receiver_tensors
output_tensors_dict = estimator_spec.predictions
inputs_dict = {k: tensor2tensor_spec(tensor) for k, tensor in input_tensors_dict.items()}
outputs_dict = {k: tensor2tensor_spec(tensor) for k, tensor in output_tensors_dict.items()}
input_tensor_names = [t.name for t in inputs_dict.values()]
output_tensor_names = [t.name for t in outputs_dict.values()]
graph_saver = estimator_spec.scaffold.saver or tf.compat.v1.train.Saver(sharded=True)
graph_saver.restore(sess, estimator.latest_checkpoint())
input_tensor_names = inputs_without_resource(sess, input_tensor_names)
frozen_graph = freeze_session(sess, input_names=input_tensor_names, output_names=output_tensor_names)
input_tensor_names = remove_redundant_inputs(frozen_graph, input_tensor_names)
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session(config=estimator.config.session_config):
frozen_graph = tf_optimize(input_tensor_names, output_tensor_names, frozen_graph)
tf.compat.v1.reset_default_graph()
return Model(frozen_graph, None, inputs_dict, outputs_dict)
class TFKerasLoader(BaseLoader):
"""
Loads keras model from source code
The tf-allow-growth flag control limiting GPU memory growth feature
(https://www.tensorflow.org/guide/gpu#limiting_gpu_memory_growth). By default it is disabled.
"""
required_fn_name_for_signature_parsing: Optional[str] = GET_MODEL_FN_NAME
def __init__(self, tf_allow_growth: bool = False, **kwargs):
self._allow_growth = tf_allow_growth
self._model_args = kwargs
def load(self, model_path: Union[str, Path], **_) -> Model:
# TODO fix: RuntimeError: Physical devices cannot be modified after being initialized
# if self._allow_growth:
# physical_devices = tf.config.experimental.list_physical_devices("GPU")
# for device in physical_devices:
# tf.config.experimental.set_memory_growth(device, True)
tf.keras.backend.clear_session()
tf.keras.backend.set_learning_phase(False)
if isinstance(model_path, Path):
model_path = model_path.as_posix()
get_model = load_from_file(model_path, "model", GET_MODEL_FN_NAME)
if get_model is None:
raise RuntimeError(f"Could not find {GET_MODEL_FN_NAME} in {model_path}")
model_args = filter_fn_args(self._model_args, fn=get_model)
model, call_fn = get_model(**model_args)
inputs_dict: Dict[str, TensorSpec] = {
input_name: TensorSpec(t.name, t.dtype.name, tuple(t.shape.as_list()))
for input_name, t in zip(model.input_names, model.inputs)
}
concrete_func = call_fn.get_concrete_function(
*(tf.TensorSpec(shape=spec.shape, dtype=spec.dtype, name=name) for name, spec in inputs_dict.items())
)
output_tensors_names = [tensor.name for tensor in concrete_func.outputs]
outputs_dict: Dict[str, TensorSpec] = {
output_name: TensorSpec(output_tensor_name, t.dtype.name, tuple(t.shape.as_list()))
for output_name, output_tensor_name, t in zip(model.output_names, output_tensors_names, model.outputs)
}
tf.keras.backend.clear_session()
tf.keras.backend.set_learning_phase(False)
def _add_suffix_as_quickfix_for_tf24_func_refactor(spec):
if not spec.name.endswith(":0"):
spec = spec._replace(name=spec.name + ":0")
return spec
inputs_dict = {name: _add_suffix_as_quickfix_for_tf24_func_refactor(spec) for name, spec in inputs_dict.items()}
return Model(model, None, inputs_dict, outputs_dict)
class TFSavedModelLoader(BaseLoader):
def __init__(self, tf_allow_growth: bool = False):
self._allow_growth = tf_allow_growth
def load(self, model_path: Union[str, Path], **kwargs) -> Model:
if isinstance(model_path, Path):
model_path = model_path.as_posix()
tf.compat.v1.reset_default_graph()
if self._allow_growth:
physical_devices = tf.config.experimental.list_physical_devices("GPU")
for device in physical_devices:
tf.config.experimental.set_memory_growth(device, True)
if is_tf2():
from tf2onnx.tf_loader import _from_saved_model_v2 # pytype: disable=import-error
(
graph_def,
input_names,
output_names,
concrete_func,
imported,
initialized_tables,
tensors_to_rename,
) = _from_saved_model_v2(
model_path=model_path,
input_names=None,
output_names=None,
tag=None,
signature_def=[],
concrete_function_index=None,
large_model=False,
use_graph_names=False,
)
# inspired by
# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/tools/saved_model_cli.py#L205
if concrete_func.structured_input_signature:
input_args, input_kwargs = concrete_func.structured_input_signature
input_names = list(input_kwargs)
assert (
not input_args
), f"Not supported args in concrete function signature args={input_args}, kwargs={input_kwargs}"
elif concrete_func._arg_keywords: # pylint: disable=protected-access
# For pure ConcreteFunctions we might have nothing better than _arg_keywords.
assert concrete_func._num_positional_args in [0, 1]
input_names = concrete_func._arg_keywords
input_tensors = [tensor for tensor in concrete_func.inputs if tensor.dtype != tf.dtypes.resource]
inputs = {name: tensor.name for name, tensor in zip(input_names, input_tensors)}
# they are already flattened
output_tensors = [tensor for tensor in concrete_func.outputs if tensor.dtype != tf.dtypes.resource]
output_names = sorted(concrete_func.structured_outputs) # because outputs are in flatten form
outputs = {name: tensor.name for name, tensor in zip(output_names, output_tensors)}
else:
session_config = create_session_config(allow_growth=True)
with tf.compat.v1.Session(config=session_config) as sess:
graph_def, inputs, outputs = _from_saved_model_v1(sess, model_path, tag=None, signatures=[])
inputs, outputs = handle_tensor_specs(graph_def, inputs, outputs)
return Model(graph_def, None, inputs, outputs)
class TFRunner(BaseRunner):
def __init__(self):
pass
def init_inference(self, model: Model):
if is_tf2():
return TF2RunnerSession(model=model)
else:
return TF1RunnerSession(model=model)
class TF1RunnerSession(BaseRunnerSession):
def __init__(self, model: Model):
super().__init__(model)
assert isinstance(model.handle, tf.compat.v1.GraphDef)
self._inputs = None
self._outputs = None
self._session = None
self._old_env_values = {}
def __enter__(self):
self._old_env_values = self._set_env_variables()
tf.compat.v1.reset_default_graph()
session_config = create_session_config(allow_growth=True)
self._session = tf.compat.v1.Session(config=session_config)
self._session.__enter__()
tf.import_graph_def(self._model.handle, name="")
self._inputs = {
name: self._session.graph.get_tensor_by_name(spec.name) for name, spec in self._model.inputs.items()
}
self._outputs = {
name: self._session.graph.get_tensor_by_name(spec.name) for name, spec in self._model.outputs.items()
}
return self
def __exit__(self, exc_type, exc_value, traceback):
self._session.__exit__(exc_type, exc_value, traceback)
tf.compat.v1.reset_default_graph()
self._inputs = None
self._outputs = None
self._session = None
self._recover_env_variables(self._old_env_values)
def __call__(self, x: Dict[str, object]):
feed_dict = {placeholder: x[name] for name, placeholder in self._inputs.items()}
return self._session.run(self._outputs, feed_dict=feed_dict)
class TF2RunnerSession(BaseRunnerSession):
def __init__(self, model: Model):
super().__init__(model)
assert isinstance(model.handle, tf.compat.v1.GraphDef)
self._concrete_func = None
def __enter__(self):
tf.compat.v1.reset_default_graph()
input_tensor_names = [spec.name for spec in self._model.inputs.values()]
output_tensor_names = [spec.name for spec in self._model.outputs.values()]
self._concrete_func = wrap_function.function_from_graph_def(
self._model.handle, input_tensor_names, output_tensor_names
)
self._concrete_func._signature = [
tf.TensorSpec(shape=spec.shape, dtype=spec.dtype, name=name) for name, spec in self._model.inputs.items()
]
return self
def __exit__(self, exc_type, exc_value, traceback):
self._concrete_func = None
tf.compat.v1.reset_default_graph()
def __call__(self, x: Dict[str, object]):
x = tf.nest.map_structure(tf.convert_to_tensor, x)
y_pred = self._concrete_func(**x)
output_struct = {name: spec.name for name, spec in self._model.outputs.items()}
y_pred = tf.nest.map_structure(lambda t: t.numpy(), y_pred)
y_pred = tf.nest.pack_sequence_as(output_struct, y_pred)
return y_pred
class TFSavedModelSaver(BaseSaver):
def save(self, model, model_path: Union[str, Path], dataloader_fn) -> None:
if isinstance(model_path, Path):
model_path = model_path.as_posix()
if is_tf2():
tf.keras.models.save_model(model=model.handle, filepath=model_path, overwrite=True)
else:
session_config = create_session_config(allow_growth=True)
with tf.compat.v1.Session(config=session_config) as sess:
tf.import_graph_def(model.handle, name="")
is_func = is_function(sess.graph)
if not is_func:
infer_shape(sess.graph, {})
inputs = {name: sess.graph.get_tensor_by_name(spec.name) for name, spec in model.inputs.items()}
outputs = {name: sess.graph.get_tensor_by_name(spec.name) for name, spec in model.outputs.items()}
def _ensure_shape(tensors_dict, tensors_specs):
for name, tensor in tensors_dict.items():
if tensor.shape.rank is None:
tensor.set_shape(tensors_specs[name].shape)
return tensors_dict
inputs = _ensure_shape(inputs, model.inputs)
outputs = _ensure_shape(outputs, model.outputs)
LOGGER.info(inputs)
LOGGER.info(outputs)
tf.compat.v1.saved_model.simple_save(sess, model_path, inputs, outputs, legacy_init_op=None)
def handle_tensor_specs(
graph_def, inputs: Dict[str, str], outputs: Dict[str, str]
) -> Tuple[Dict[str, TensorSpec], Dict[str, TensorSpec]]:
session_config = tf.compat.v1.ConfigProto(graph_options=tf.compat.v1.GraphOptions(infer_shapes=True))
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session(config=session_config) as sess:
tf.import_graph_def(graph_def, name="")
def _get_spec(tensors_dict):
tensors_dict = {name: sess.graph.get_tensor_by_name(tname) for name, tname in tensors_dict.items()}
return {name: tensor2tensor_spec(tensor) for name, tensor in tensors_dict.items()}
inputs = _get_spec(inputs)
outputs = _get_spec(outputs)
tf.compat.v1.reset_default_graph()
return inputs, outputs
def tensor2tensor_spec(tensor):
shape = tuple(s.value if hasattr(s, "value") else s for s in tensor.shape)
return TensorSpec(tensor.name, tensor.dtype.name, shape)
loaders.register_extension(ModelInputType.TF_ESTIMATOR.value, TFEstimatorLoader)
loaders.register_extension(ModelInputType.TF_KERAS.value, TFKerasLoader)
loaders.register_extension(Format.TF_SAVEDMODEL.value, TFSavedModelLoader)
loaders.register_extension(Format.TF_TRT.value, TFSavedModelLoader)
savers.register_extension(Format.TF_SAVEDMODEL.value, TFSavedModelSaver)
savers.register_extension(Format.TF_TRT.value, TFSavedModelSaver)
runners.register_extension(ModelInputType.TF_ESTIMATOR.value, TFRunner)
runners.register_extension(ModelInputType.TF_KERAS.value, TFRunner)
runners.register_extension(Format.TF_SAVEDMODEL.value, TFRunner)
runners.register_extension(Format.TF_TRT.value, TFRunner)
|
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/layers | layers | misc | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
"""
helper class that supports empty tensors on some nn functions.
Ideally, add support directly in PyTorch to empty tensors in
those functions.
This can be removed once https://github.com/pytorch/pytorch/issues/12013
is implemented
"""
import math
import torch
from torch.nn.modules.utils import _ntuple
class _NewEmptyTensorOp(torch.autograd.Function):
@staticmethod
def forward(ctx, x, new_shape):
ctx.shape = x.shape
return x.new_empty(new_shape)
@staticmethod
def backward(ctx, grad):
shape = ctx.shape
return _NewEmptyTensorOp.apply(grad, shape), None
class Conv2d(torch.nn.Conv2d):
def forward(self, x):
if x.numel() > 0:
return super(Conv2d, self).forward(x)
# get output shape
output_shape = [
(i + 2 * p - (di * (k - 1) + 1)) // d + 1
for i, p, di, k, d in zip(
x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride
)
]
output_shape = [x.shape[0], self.weight.shape[0]] + output_shape
return _NewEmptyTensorOp.apply(x, output_shape)
class ConvTranspose2d(torch.nn.ConvTranspose2d):
def forward(self, x):
if x.numel() > 0:
return super(ConvTranspose2d, self).forward(x)
# get output shape
output_shape = [
(i - 1) * d - 2 * p + (di * (k - 1) + 1) + op
for i, p, di, k, d, op in zip(
x.shape[-2:],
self.padding,
self.dilation,
self.kernel_size,
self.stride,
self.output_padding,
)
]
output_shape = [x.shape[0], self.bias.shape[0]] + output_shape
return _NewEmptyTensorOp.apply(x, output_shape)
def interpolate(
input, size=None, scale_factor=None, mode="nearest", align_corners=None
):
if input.numel() > 0:
return torch.nn.functional.interpolate(
input, size, scale_factor, mode, align_corners
)
def _check_size_scale_factor(dim):
if size is None and scale_factor is None:
raise ValueError("either size or scale_factor should be defined")
if size is not None and scale_factor is not None:
raise ValueError("only one of size or scale_factor should be defined")
if (
scale_factor is not None
and isinstance(scale_factor, tuple)
and len(scale_factor) != dim
):
raise ValueError(
"scale_factor shape must match input shape. "
"Input is {}D, scale_factor size is {}".format(dim, len(scale_factor))
)
def _output_size(dim):
_check_size_scale_factor(dim)
if size is not None:
return size
scale_factors = _ntuple(dim)(scale_factor)
# math.floor might return float in py2.7
return [
int(math.floor(input.size(i + 2) * scale_factors[i])) for i in range(dim)
]
output_shape = tuple(_output_size(2))
output_shape = input.shape[:-2] + output_shape
return _NewEmptyTensorOp.apply(input, output_shape)
def nhwc_to_nchw_transform(x):
if x.numel() == 0:
return x
return x.to(memory_format=torch.contiguous_format)
def nchw_to_nhwc_transform(x):
if x.numel() == 0:
return x
return x.to(memory_format=torch.channels_last) |
PyTorch/Recommendation/DLRM/preproc | preproc | run_NVTabular | #!/bin/bash
# Copyright (c) 2021 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#########################################################################
# File Name: run_NVTabular.sh
set -e
# the data path including 1TB criteo data, day_0, day_1, ...
export INPUT_PATH=${1:-'/data/dlrm/criteo'}
# the output path, use for generating the dictionary and the final dataset
# the output folder should have more than 300GB
export OUTPUT_PATH=${2:-'/data/dlrm/output'}
export FREQUENCY_LIMIT=${3:-'15'}
export CRITEO_PARQUET=${4:-'/data/dlrm/criteo_parquet'}
if [ "$DGX_VERSION" = "DGX-2" ]; then
export DEVICES=0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
else
export DEVICES=0,1,2,3,4,5,6,7
fi
echo "Preprocessing data"
python preproc_NVTabular.py $INPUT_PATH $OUTPUT_PATH --devices $DEVICES --intermediate_dir $CRITEO_PARQUET --freq_threshold $FREQUENCY_LIMIT
echo "Shuffling"
source ${DGX_VERSION}_config.sh
export SPARK_HOME=/opt/spark
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
export PATH=$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH
export MASTER=spark://$HOSTNAME:7077
export SPARK_LOCAL_DIRS='/data/dlrm/spark/tmp'
mkdir -p $SPARK_LOCAL_DIRS
echo "Starting spark standalone"
start-master.sh
start-slave.sh $MASTER
spark-submit --master $MASTER \
--driver-memory "${DRIVER_MEMORY}G" \
--executor-cores $NUM_EXECUTOR_CORES \
--executor-memory "${EXECUTOR_MEMORY}G" \
--conf spark.cores.max=$TOTAL_CORES \
--conf spark.task.cpus=1 \
--conf spark.sql.files.maxPartitionBytes=1073741824 \
--conf spark.sql.shuffle.partitions=1200 \
--conf spark.driver.maxResultSize=2G \
--conf spark.locality.wait=0s \
--conf spark.network.timeout=1800s \
--conf spark.task.resource.gpu.amount=0.01 \
--conf spark.executor.resource.gpu.amount=1 \
--conf spark.plugins=com.nvidia.spark.SQLPlugin \
--conf spark.rapids.sql.concurrentGpuTasks=2 \
--conf spark.rapids.sql.reader.batchSizeRows=4000000 \
--conf spark.rapids.memory.pinnedPool.size=16g \
--conf spark.rapids.sql.explain=ALL \
--conf spark.sql.autoBroadcastJoinThreshold=1GB \
--conf spark.rapids.sql.incompatibleOps.enabled=true \
--conf spark.driver.maxResultSize=2G \
--conf spark.executor.extraJavaOptions="-Dai.rapids.cudf.prefer-pinned=true\ -Djava.io.tmpdir=$SPARK_LOCAL_DIRS" \
NVT_shuffle_spark.py --input_path $OUTPUT_PATH/train --output_path $OUTPUT_PATH/shuffled_train
stop-master.sh
stop-slave.sh
rm -rf $OUTPUT_PATH/train
mv $OUTPUT_PATH/shuffled_train $OUTPUT_PATH/train
|
TensorFlow/Detection/SSD/models/research/slim/nets | nets | mobilenet_v1_train | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Build and train mobilenet_v1 with options for quantization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from datasets import dataset_factory
from nets import mobilenet_v1
from preprocessing import preprocessing_factory
slim = tf.contrib.slim
flags = tf.app.flags
flags.DEFINE_string('master', '', 'Session master')
flags.DEFINE_integer('task', 0, 'Task')
flags.DEFINE_integer('ps_tasks', 0, 'Number of ps')
flags.DEFINE_integer('batch_size', 64, 'Batch size')
flags.DEFINE_integer('num_classes', 1001, 'Number of classes to distinguish')
flags.DEFINE_integer('number_of_steps', None,
'Number of training steps to perform before stopping')
flags.DEFINE_integer('image_size', 224, 'Input image resolution')
flags.DEFINE_float('depth_multiplier', 1.0, 'Depth multiplier for mobilenet')
flags.DEFINE_bool('quantize', False, 'Quantize training')
flags.DEFINE_string('fine_tune_checkpoint', '',
'Checkpoint from which to start finetuning.')
flags.DEFINE_string('checkpoint_dir', '',
'Directory for writing training checkpoints and logs')
flags.DEFINE_string('dataset_dir', '', 'Location of dataset')
flags.DEFINE_integer('log_every_n_steps', 100, 'Number of steps per log')
flags.DEFINE_integer('save_summaries_secs', 100,
'How often to save summaries, secs')
flags.DEFINE_integer('save_interval_secs', 100,
'How often to save checkpoints, secs')
FLAGS = flags.FLAGS
_LEARNING_RATE_DECAY_FACTOR = 0.94
def get_learning_rate():
if FLAGS.fine_tune_checkpoint:
# If we are fine tuning a checkpoint we need to start at a lower learning
# rate since we are farther along on training.
return 1e-4
else:
return 0.045
def get_quant_delay():
if FLAGS.fine_tune_checkpoint:
# We can start quantizing immediately if we are finetuning.
return 0
else:
# We need to wait for the model to train a bit before we quantize if we are
# training from scratch.
return 250000
def imagenet_input(is_training):
"""Data reader for imagenet.
Reads in imagenet data and performs pre-processing on the images.
Args:
is_training: bool specifying if train or validation dataset is needed.
Returns:
A batch of images and labels.
"""
if is_training:
dataset = dataset_factory.get_dataset('imagenet', 'train',
FLAGS.dataset_dir)
else:
dataset = dataset_factory.get_dataset('imagenet', 'validation',
FLAGS.dataset_dir)
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset,
shuffle=is_training,
common_queue_capacity=2 * FLAGS.batch_size,
common_queue_min=FLAGS.batch_size)
[image, label] = provider.get(['image', 'label'])
image_preprocessing_fn = preprocessing_factory.get_preprocessing(
'mobilenet_v1', is_training=is_training)
image = image_preprocessing_fn(image, FLAGS.image_size, FLAGS.image_size)
images, labels = tf.train.batch(
[image, label],
batch_size=FLAGS.batch_size,
num_threads=4,
capacity=5 * FLAGS.batch_size)
labels = slim.one_hot_encoding(labels, FLAGS.num_classes)
return images, labels
def build_model():
"""Builds graph for model to train with rewrites for quantization.
Returns:
g: Graph with fake quantization ops and batch norm folding suitable for
training quantized weights.
train_tensor: Train op for execution during training.
"""
g = tf.Graph()
with g.as_default(), tf.device(
tf.train.replica_device_setter(FLAGS.ps_tasks)):
inputs, labels = imagenet_input(is_training=True)
with slim.arg_scope(mobilenet_v1.mobilenet_v1_arg_scope(is_training=True)):
logits, _ = mobilenet_v1.mobilenet_v1(
inputs,
is_training=True,
depth_multiplier=FLAGS.depth_multiplier,
num_classes=FLAGS.num_classes)
tf.losses.softmax_cross_entropy(labels, logits)
# Call rewriter to produce graph with fake quant ops and folded batch norms
# quant_delay delays start of quantization till quant_delay steps, allowing
# for better model accuracy.
if FLAGS.quantize:
tf.contrib.quantize.create_training_graph(quant_delay=get_quant_delay())
total_loss = tf.losses.get_total_loss(name='total_loss')
# Configure the learning rate using an exponential decay.
num_epochs_per_decay = 2.5
imagenet_size = 1271167
decay_steps = int(imagenet_size / FLAGS.batch_size * num_epochs_per_decay)
learning_rate = tf.train.exponential_decay(
get_learning_rate(),
tf.train.get_or_create_global_step(),
decay_steps,
_LEARNING_RATE_DECAY_FACTOR,
staircase=True)
opt = tf.train.GradientDescentOptimizer(learning_rate)
train_tensor = slim.learning.create_train_op(
total_loss,
optimizer=opt)
slim.summaries.add_scalar_summary(total_loss, 'total_loss', 'losses')
slim.summaries.add_scalar_summary(learning_rate, 'learning_rate', 'training')
return g, train_tensor
def get_checkpoint_init_fn():
"""Returns the checkpoint init_fn if the checkpoint is provided."""
if FLAGS.fine_tune_checkpoint:
variables_to_restore = slim.get_variables_to_restore()
global_step_reset = tf.assign(tf.train.get_or_create_global_step(), 0)
# When restoring from a floating point model, the min/max values for
# quantized weights and activations are not present.
# We instruct slim to ignore variables that are missing during restoration
# by setting ignore_missing_vars=True
slim_init_fn = slim.assign_from_checkpoint_fn(
FLAGS.fine_tune_checkpoint,
variables_to_restore,
ignore_missing_vars=True)
def init_fn(sess):
slim_init_fn(sess)
# If we are restoring from a floating point model, we need to initialize
# the global step to zero for the exponential decay to result in
# reasonable learning rates.
sess.run(global_step_reset)
return init_fn
else:
return None
def train_model():
"""Trains mobilenet_v1."""
g, train_tensor = build_model()
with g.as_default():
slim.learning.train(
train_tensor,
FLAGS.checkpoint_dir,
is_chief=(FLAGS.task == 0),
master=FLAGS.master,
log_every_n_steps=FLAGS.log_every_n_steps,
graph=g,
number_of_steps=FLAGS.number_of_steps,
save_summaries_secs=FLAGS.save_summaries_secs,
save_interval_secs=FLAGS.save_interval_secs,
init_fn=get_checkpoint_init_fn(),
global_step=tf.train.get_global_step())
def main(unused_arg):
train_model()
if __name__ == '__main__':
tf.app.run(main)
|
TensorFlow/LanguageModeling/BERT | BERT | fp16_utils | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
import numpy as np
def float32_variable_storage_getter(getter, name, shape=None, dtype=None,
initializer=None, regularizer=None,
trainable=True,
*args, **kwargs):
"""Custom variable getter that forces trainable variables to be stored in
float32 precision and then casts them to the training precision.
"""
storage_dtype = tf.float32 if trainable else dtype
variable = getter(name, shape, dtype=storage_dtype,
initializer=initializer, regularizer=regularizer,
trainable=trainable,
*args, **kwargs)
if trainable and dtype != tf.float32:
variable = tf.cast(variable, dtype)
return variable
|
PyTorch/Classification/GPUNet/triton/085ms/runner | runner | config_NVIDIA-DGX-A100-(1x-A100-80GB) | batching: dynamic
checkpoints:
- name: 0.85ms
url: https://api.ngc.nvidia.com/v2/models/nvidia/dle/gpunet_1_pyt_ckpt/versions/21.12.0_amp/zip
configurations:
- checkpoint: 0.85ms
parameters:
backend_accelerator: trt
checkpoint: 0.85ms
device_kind: gpu
export_format: onnx
export_precision: fp16
format: onnx
max_batch_size: 64
number_of_model_instances: 2
precision: fp16
tensorrt_capture_cuda_graph: 0
torch_jit: none
container_version: '21.12'
datasets:
- name: imagenet
datasets_dir: datasets
ensemble_model_name: null
framework: PyTorch
measurement_steps_offline: 8
measurement_steps_online: 32
model_name: GPUnet
performance_tool: model_analyzer
triton_container_image: nvcr.io/nvidia/tritonserver:21.12-py3
triton_custom_operations: null
triton_dockerfile: null
triton_load_model_method: explicit
|
TensorFlow/Translation/GNMT/examples | examples | DGX1_FP32_8GPU | python nmt.py --output_dir=results --batch_size=1024 --learning_rate=2e-3 --num_gpus=8
|
TensorFlow2/LanguageModeling/BERT/scripts/docker | docker | build | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
docker build . --rm -t bert_tf2
|
TensorFlow/Classification/ConvNets/resnet50v1.5/training | training | DGX2_RN50_FP32_250E | #!/bin/bash
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
WORKSPACE=${1:-"/workspace/rn50v15_tf"}
DATA_DIR=${2:-"/data"}
OTHER=${@:3}
if [[ ! -z "${BIND_TO_SOCKET}" ]]; then
BIND_TO_SOCKET="--bind-to socket"
fi
mpiexec --allow-run-as-root ${BIND_TO_SOCKET} -np 8 python3 main.py --arch=resnet50 \
--mode=train_and_evaluate --iter_unit=epoch --num_iter=250 --mixup=0.2 \
--batch_size=128 --warmup_steps=100 --cosine_lr --label_smoothing 0.1 \
--lr_init=0.256 --lr_warmup_epochs=8 --momentum=0.875 --weight_decay=3.0517578125e-05 \
--data_dir=${DATA_DIR}/tfrecords --data_idx_dir=${DATA_DIR}/dali_idx \
--results_dir=${WORKSPACE}/results --weight_init=fan_in ${OTHER}
|
PyTorch/Classification/GPUNet/triton/deployment_toolkit/triton_inference_runner | triton_inference_runner | grpc | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import logging
import queue
import threading
from pathlib import Path
from typing import Optional
# pytype: disable=import-error
try:
from tritonclient import utils as client_utils # noqa: F401
except ImportError:
import tritonclientutils as client_utils # noqa: F401
try:
import tritonclient.grpc as grpc_client
except ImportError:
import tritongrpcclient as grpc_client
# pytype: enable=import-error
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = Path(__file__).parent.name
from .base import BaseRunner
LOGGER = logging.getLogger("triton_inference_runner.grpc")
class SyncInferenceRunner(BaseRunner):
def __iter__(self):
LOGGER.debug(f"Connecting to {self._server_url}")
client = grpc_client.InferenceServerClient(url=self._server_url, verbose=self._verbose)
error = self._verify_triton_state(client)
if error:
raise RuntimeError(f"Could not communicate to Triton Server: {error}")
LOGGER.debug(
f"Triton server {self._server_url} and model {self._model_name}:{self._model_version} " f"are up and ready!"
)
model_config = client.get_model_config(self._model_name, self._model_version)
model_metadata = client.get_model_metadata(self._model_name, self._model_version)
LOGGER.info(f"Model config {model_config}")
LOGGER.info(f"Model metadata {model_metadata}")
inputs = {tm.name: tm for tm in model_metadata.inputs}
outputs = {tm.name: tm for tm in model_metadata.outputs}
output_names = list(outputs)
outputs_req = [grpc_client.InferRequestedOutput(name) for name in outputs]
for ids, x, y_real in self._dataloader:
infer_inputs = []
for name in inputs:
data = x[name]
datatype = inputs[name].datatype
infer_input = grpc_client.InferInput(name, data.shape, datatype)
target_np_dtype = client_utils.triton_to_np_dtype(datatype)
data = data.astype(target_np_dtype)
infer_input.set_data_from_numpy(data)
infer_inputs.append(infer_input)
results = client.infer(
model_name=self._model_name,
model_version=self._model_version,
inputs=infer_inputs,
outputs=outputs_req,
timeout=self._response_wait_t,
)
y_pred = {name: results.as_numpy(name) for name in output_names}
yield ids, x, y_pred, y_real
class AsyncInferenceRunner(BaseRunner):
DEFAULT_MAX_UNRESP_REQS = 128
def __init__(
self,
server_url: str,
model_name: str,
model_version: str,
*,
dataloader,
verbose=False,
response_wait_time: Optional[float] = None,
max_unresponded_requests: Optional[int] = None,
):
super().__init__(
server_url,
model_name,
model_version,
dataloader=dataloader,
verbose=verbose,
response_wait_time=response_wait_time,
)
self._max_unresp_reqs = (
self.DEFAULT_MAX_UNRESP_REQS if max_unresponded_requests is None else max_unresponded_requests
)
self._results = queue.Queue()
self._processed_all = False
self._errors = []
self._num_waiting_for = 0
self._sync = threading.Condition()
self._req_thread = threading.Thread(target=self.req_loop, daemon=True)
def __iter__(self):
self._req_thread.start()
timeout_s = 0.050 # check flags processed_all and error flags every 50ms
while True:
try:
ids, x, y_pred, y_real = self._results.get(timeout=timeout_s)
yield ids, x, y_pred, y_real
except queue.Empty:
shall_stop = self._processed_all or self._errors
if shall_stop:
break
LOGGER.debug("Waiting for request thread to stop")
self._req_thread.join()
if self._errors:
error_msg = "\n".join(map(str, self._errors))
raise RuntimeError(error_msg)
def _on_result(self, ids, x, y_real, output_names, result, error):
with self._sync:
request_id = str(ids[0])
NOT_MATCHING_REQUEST_ID_MSG = (
"Error during processing result - request_id doesn't match. This shouldn't have happened."
)
if error:
response_id = error.get_response().id
if response_id != request_id:
raise RuntimeError(NOT_MATCHING_REQUEST_ID_MSG)
self._errors.append(error)
else:
response_id = result.get_response().id
if response_id != request_id:
raise RuntimeError(NOT_MATCHING_REQUEST_ID_MSG)
y_pred = {name: result.as_numpy(name) for name in output_names}
self._results.put((ids, x, y_pred, y_real))
self._num_waiting_for -= 1
self._sync.notify_all()
def req_loop(self):
LOGGER.debug(f"Connecting to {self._server_url}")
client = grpc_client.InferenceServerClient(url=self._server_url, verbose=self._verbose)
self._errors = self._verify_triton_state(client)
if self._errors:
return
LOGGER.debug(
f"Triton server {self._server_url} and model {self._model_name}:{self._model_version} " f"are up and ready!"
)
model_config = client.get_model_config(self._model_name, self._model_version)
model_metadata = client.get_model_metadata(self._model_name, self._model_version)
LOGGER.info(f"Model config {model_config}")
LOGGER.info(f"Model metadata {model_metadata}")
inputs = {tm.name: tm for tm in model_metadata.inputs}
outputs = {tm.name: tm for tm in model_metadata.outputs}
output_names = list(outputs)
self._num_waiting_for = 0
for ids, x, y_real in self._dataloader:
infer_inputs = []
for name in inputs:
data = x[name]
datatype = inputs[name].datatype
infer_input = grpc_client.InferInput(name, data.shape, datatype)
target_np_dtype = client_utils.triton_to_np_dtype(datatype)
data = data.astype(target_np_dtype)
infer_input.set_data_from_numpy(data)
infer_inputs.append(infer_input)
outputs_req = [grpc_client.InferRequestedOutput(name) for name in outputs]
with self._sync:
def _check_can_send():
return self._num_waiting_for < self._max_unresp_reqs
can_send = self._sync.wait_for(_check_can_send, timeout=self._response_wait_t)
if not can_send:
error_msg = f"Runner could not send new requests for {self._response_wait_t}s"
self._errors.append(error_msg)
self._sync.notify_all()
break
request_id = str(ids[0])
callback = functools.partial(AsyncInferenceRunner._on_result, self, ids, x, y_real, output_names)
client.async_infer(
model_name=self._model_name,
model_version=self._model_version,
inputs=infer_inputs,
outputs=outputs_req,
callback=callback,
request_id=request_id,
)
self._num_waiting_for += 1
self._sync.notify_all()
# wait till receive all requested data
with self._sync:
def _all_processed():
LOGGER.debug(f"wait for {self._num_waiting_for} unprocessed jobs")
return self._num_waiting_for == 0
self._processed_all = self._sync.wait_for(_all_processed, self._max_wait_time)
if not self._processed_all:
error_msg = f"Runner {self._response_wait_t}s timeout received while waiting for results from server"
self._errors.append(error_msg)
self._sync.notify_all()
LOGGER.debug("Finished request thread")
|
PyTorch/Detection/Efficientdet/data | data | loader | """ Object detection loader/collate
Hacked together by Ross Wightman
"""
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.utils.data
from .transforms import *
from utils.distributed_sampler import OrderedDistributedSampler
from functools import partial
MAX_NUM_INSTANCES = 100
def fast_collate(memory_format, batch):
batch_size = len(batch)
# FIXME this needs to be more robust
target = dict()
for k, v in batch[0][1].items():
if isinstance(v, np.ndarray):
# if a numpy array, assume it relates to object instances, pad to MAX_NUM_INSTANCES
target_shape = (batch_size, MAX_NUM_INSTANCES)
if len(v.shape) > 1:
target_shape = target_shape + v.shape[1:]
target_dtype = torch.float32
elif isinstance(v, (tuple, list)):
# if tuple or list, assume per batch
target_shape = (batch_size, len(v))
target_dtype = torch.float32 if isinstance(v[0], float) else torch.int32
elif isinstance(v, torch.Tensor):
target_dtype = v.dtype
target_shape = (batch_size,) + tuple(v.size())
else:
# scalar, assume per batch
target_shape = batch_size,
target_dtype = torch.float32 if isinstance(v, float) else torch.int64
target[k] = torch.zeros(target_shape, dtype=target_dtype)
tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8).contiguous(
memory_format=memory_format
)
for i in range(batch_size):
tensor[i] += torch.from_numpy(batch[i][0])
for tk, tv in batch[i][1].items():
if isinstance(tv, np.ndarray) and len(tv.shape):
target[tk][i, 0:tv.shape[0]] = torch.from_numpy(tv)
elif isinstance(tv, torch.Tensor):
target[tk][i] = tv
else:
target[tk][i] = torch.tensor(tv, dtype=target[tk].dtype)
return tensor, target
class PrefetchLoader:
def __init__(self,
loader,
mean=IMAGENET_DEFAULT_MEAN,
std=IMAGENET_DEFAULT_STD):
self.loader = loader
self.mean = torch.tensor([x * 255 for x in mean]).cuda().view(1, 3, 1, 1)
self.std = torch.tensor([x * 255 for x in std]).cuda().view(1, 3, 1, 1)
def __iter__(self):
stream = torch.cuda.Stream()
first = True
for next_input, next_target in self.loader:
with torch.cuda.stream(stream):
next_input = next_input.cuda(non_blocking=True)
next_input = next_input.float().sub_(self.mean).div_(self.std)
next_target = {k: v.cuda(non_blocking=True) for k, v in next_target.items()}
if not first:
yield input, target
else:
first = False
torch.cuda.current_stream().wait_stream(stream)
input = next_input
target = next_target
yield input, target
def __len__(self):
return len(self.loader)
@property
def sampler(self):
return self.loader.batch_sampler
class IterationBasedBatchSampler(torch.utils.data.sampler.BatchSampler):
"""
Wraps a BatchSampler, resampling from it until
a specified number of iterations have been sampled
"""
def __init__(self, batch_sampler):
self.batch_sampler = batch_sampler
def __iter__(self):
while True:
for batch in self.batch_sampler:
yield batch
def __len__(self):
return len(self.batch_sampler)
def set_epoch(self, epoch):
if hasattr(self.batch_sampler.sampler, "set_epoch"):
self.batch_sampler.sampler.set_epoch(epoch)
def create_loader(
dataset,
input_size,
batch_size,
is_training=False,
use_prefetcher=True,
interpolation='bilinear',
fill_color='mean',
mean=IMAGENET_DEFAULT_MEAN,
std=IMAGENET_DEFAULT_STD,
num_workers=1,
distributed=False,
pin_mem=False,
memory_format=torch.contiguous_format
):
if isinstance(input_size, tuple):
img_size = input_size[-2:]
else:
img_size = input_size
if is_training:
transform = transforms_coco_train(
img_size,
interpolation=interpolation,
use_prefetcher=use_prefetcher,
fill_color=fill_color,
mean=mean,
std=std)
else:
transform = transforms_coco_eval(
img_size,
interpolation=interpolation,
use_prefetcher=use_prefetcher,
fill_color=fill_color,
mean=mean,
std=std)
dataset.transform = transform
sampler = None
if distributed:
if is_training:
sampler = torch.utils.data.distributed.DistributedSampler(dataset)
else:
# This will add extra duplicate entries to result in equal num
# of samples per-process, will slightly alter validation results
sampler = OrderedDistributedSampler(dataset)
else:
sampler = torch.utils.data.RandomSampler(dataset)
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, batch_size, drop_last=False)
if is_training:
batch_sampler = IterationBasedBatchSampler(batch_sampler)
loader = torch.utils.data.DataLoader(
dataset,
shuffle=False,
num_workers=num_workers,
batch_sampler=batch_sampler,
pin_memory=pin_mem,
collate_fn=partial(fast_collate, memory_format) if use_prefetcher else torch.utils.data.dataloader.default_collate,
)
else:
loader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=False,
num_workers=num_workers,
sampler=sampler,
pin_memory=pin_mem,
collate_fn=partial(fast_collate, memory_format) if use_prefetcher else torch.utils.data.dataloader.default_collate,
)
if use_prefetcher:
loader = PrefetchLoader(loader, mean=mean, std=std)
return loader
|
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/generator/graph | graph | rmat_bipartite | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List, Optional, Set, Tuple
import numpy as np
from syngen.generator.graph.base_graph_generator import BaseBipartiteGraphGenerator
from syngen.generator.graph.fitter import RMATFitter
from syngen.generator.graph.utils import (
effective_nonsquare_rmat_exact,
generate_gpu_rmat,
get_reversed_part,
rearrange_graph,
recreate_graph, generate_gpu_chunked_rmat,
)
class RMATBipartiteGenerator(BaseBipartiteGraphGenerator):
""" Graph generator based on RMAT that generate bipartite graphs
Args:
seed (int): Seed to reproduce the results. If None then random seed will be used.
logdir (str): Directory to store the logging results.
fitter (RMATFitter): RMATFitter to be used.
"""
def __init__(
self,
seed: Optional[int] = None,
logdir: str = "./logs",
gpu: bool = True,
fitter: Optional[RMATFitter] = None,
**kwargs,
):
super().__init__(seed, logdir, gpu)
self.fitter = fitter or RMATFitter()
def fit(
self,
graph: List[Tuple[int, int]],
src_set: Optional[Set[int]],
dst_set: Optional[Set[int]],
is_directed: bool,
transform_graph: bool = True,
):
""" Fits generator on the graph
Args:
graph (List[Tuple[int, int]]): graph to be fitted on
transform_graph (bool): defines if the generator should transform the input graph using src and dst node sets
src_set (Set[int]): set of source nodes
dst_set (Set[int]): set of destination nodes
is_directed (bool): flag indicating whether the graph is directed
"""
assert graph is not None, "Wrong graph"
if transform_graph:
lower, upper = rearrange_graph(graph, src_set, dst_set, assume_unique=True)
else:
assert not is_directed
upper = graph
lower = []
if (
len(lower) and is_directed
): # No need to fit lower part for undirected graph
self._fit_dst_src_results = self.fitter.fit(lower)
if len(upper):
self._fit_src_dst_results = self.fitter.fit(upper)
self.logger.log(f"Fit results dst_src: {self._fit_dst_src_results}")
self.logger.log(f"Fit results src_dst: {self._fit_src_dst_results}")
def _generate_part(
self,
fit_results: Tuple[float, float, float, float],
part_shape: Tuple[int, int],
num_edges: int,
noise: float,
batch_size: int,
return_node_ids: bool,
save_path: Optional[str],
):
if self.gpu:
return self._generate_part_gpu(
fit_results=fit_results,
part_shape=part_shape,
num_edges=num_edges,
noise=noise,
return_node_ids=return_node_ids,
save_path=save_path,
)
else:
return self._generate_part_cpu(
fit_results=fit_results,
part_shape=part_shape,
num_edges=num_edges,
noise=noise,
batch_size=batch_size,
return_node_ids=return_node_ids,
)
def _generate_part_cpu(
self,
fit_results: Tuple[float, float, float, float],
part_shape: Tuple[int, int],
num_edges: int,
noise: float,
batch_size: int,
return_node_ids: bool,
):
a, b, c, d = fit_results
theta = np.array([[a, b], [c, d]])
theta /= a + b + c + d
res = effective_nonsquare_rmat_exact(
theta,
num_edges,
part_shape,
noise_scaling=noise,
batch_size=batch_size,
dtype=np.int64,
custom_samplers=None,
generate_back_edges=False,
remove_selfloops=False,
return_node_ids=2 if return_node_ids else 0,
verbose=self.verbose,
)
if return_node_ids:
return res[0], res[1], res[2]
return res[0]
def _generate_part_gpu(
self,
fit_results: Tuple[float, float, float, float],
part_shape: Tuple[int, int],
num_edges: int,
noise: float,
return_node_ids: bool,
save_path: Optional[str] = None,
_chunked: bool = True,
):
a, b, c, d = fit_results
theta = np.array([a, b, c, d])
theta /= a + b + c + d
a, b, c, d = theta
r_scale, c_scale = part_shape
if _chunked:
res = generate_gpu_chunked_rmat(
a,
b,
c,
d,
r_scale=r_scale,
c_scale=c_scale,
n_edges=num_edges,
noise=noise,
is_directed=True,
has_self_loop=True,
return_node_ids=2 if return_node_ids else 0,
save_path=save_path,
verbose=self.verbose,
)
else:
res = generate_gpu_rmat(
a,
b,
c,
d,
r_scale=r_scale,
c_scale=c_scale,
n_edges=num_edges,
noise=noise,
is_directed=True,
has_self_loop=True,
return_node_ids=2 if return_node_ids else 0
)
if return_node_ids:
return res[0], res[1], res[2]
return res
def generate(
self,
num_nodes_src_set: int,
num_nodes_dst_set: int,
num_edges_src_dst: int,
num_edges_dst_src: int,
is_directed: bool,
apply_edge_mirroring = True,
transform_graph: bool = True,
noise: float = 0.5,
batch_size: int = 1_000_000,
return_node_ids=False,
save_path: Optional[str] = None,
):
""" Generates graph with approximately `num_nodes_src_set`/`num_nodes_dst_set` nodes
and exactly `num_edges_src_dst`/`num_edges_dst_src` edges from generator
Args:
num_nodes_src_set (int): approximate number of source nodes to be generated
num_nodes_dst_set (int): approximate number of destination nodes to be generated
num_edges_src_dst (int): exact number of source->destination edges to be generated
num_edges_dst_src (int): exact number of destination->source to be generated
is_directed (bool): flag indicating whether the generated graph has to be directed
transform_graph (bool): defines if the generator should transform the output graph to avoid node id conflict between src and dst nodes
noise (float): noise for RMAT generation to get better degree distribution
batch_size (int): size of the edge chunk that will be generated in one generation step
return_node_ids (bool): flag indicating whether the generator has to return nodes_ids as the second output
save_path (bool): path to store the graph. if specified the method return the number of edges in the graph
Returns:
new_graph (np.array[int, int]): generated graph
"""
assert (
num_nodes_src_set > 0 and num_nodes_dst_set > 0
), "Wrong number of nodes"
assert (
num_edges_src_dst >= 0 and num_edges_dst_src >= 0
), "Wrong number of edges"
max_edges = num_nodes_src_set * num_nodes_dst_set
assert (
num_edges_src_dst < max_edges and num_edges_dst_src < max_edges
), "Configuration of nodes nad edges cannot form any graph"
assert (
self._fit_src_dst_results or self._fit_dst_src_results
), "There are no fit results, \
call fit method first or load the seeding matrix from the file"
if (self._fit_dst_src_results is not None) != is_directed:
requested = "directed" if is_directed else "undirected"
fitted = "undirected" if requested == "directed" else "directed"
raise RuntimeError(
f"Fitted {fitted} graph but requested to generate {requested} one"
)
if apply_edge_mirroring and is_directed:
warnings.warn('edge mirroring works only for undirected graphs')
if not is_directed:
assert (
num_edges_src_dst == num_edges_dst_src
), "For undirected graph expected the same number of edges for each side"
assert (
self._fit_dst_src_results is None
), "For undirected graph expected only src->dst results to be present"
log2_row = math.ceil(math.log2(num_nodes_src_set))
log2_col = math.ceil(math.log2(num_nodes_dst_set))
part_shape_upper = (log2_row, log2_col)
part_shape_lower = (log2_col, log2_row)
offset = int(2 ** log2_row)
if self._fit_src_dst_results and num_edges_src_dst:
upper_part_res = self._generate_part(
self._fit_src_dst_results,
part_shape_upper,
num_edges_src_dst,
noise,
batch_size,
return_node_ids=return_node_ids,
save_path=save_path,
)
if return_node_ids:
upper_part, upper_part_src_node_ids, upper_part_dst_node_ids = upper_part_res
else:
upper_part = upper_part_res
else:
upper_part = []
if self._fit_dst_src_results:
if save_path is not None:
raise NotImplementedError('save_path works only for undirected bipartite graphs')
if num_edges_dst_src:
lower_part_res = self._generate_part(
self._fit_dst_src_results,
part_shape_lower,
num_edges_dst_src,
noise,
batch_size,
save_path=save_path,
return_node_ids=return_node_ids,
)
if return_node_ids:
lower_part, lower_part_src_node_ids, lower_part_dst_node_ids = lower_part_res
else:
lower_part = lower_part_res
else:
lower_part = []
elif not is_directed and apply_edge_mirroring: # Recreate lower part for undirected graph
if return_node_ids:
lower_part_src_node_ids, lower_part_dst_node_ids = upper_part_dst_node_ids, upper_part_src_node_ids
lower_part = get_reversed_part(upper_part)
else:
lower_part = []
if transform_graph:
new_graph = recreate_graph(lower_part, upper_part, offset)
if return_node_ids:
lower_part_src_node_ids = lower_part_src_node_ids + offset
upper_part_dst_node_ids = upper_part_dst_node_ids + offset
src_node_ids = np.union1d(upper_part_src_node_ids, lower_part_dst_node_ids)
dst_node_ids = np.union1d(upper_part_dst_node_ids, lower_part_src_node_ids)
else:
if apply_edge_mirroring:
raise NotImplementedError('apply edge mirroring works only with `transform_graph=True`')
new_graph = upper_part
if return_node_ids:
src_node_ids, dst_node_ids = upper_part_src_node_ids, upper_part_dst_node_ids
if return_node_ids:
return new_graph, src_node_ids, dst_node_ids
return new_graph
|
TensorFlow2/Recommendation/DLRM_and_DCNv2/doc | doc | criteo_dataset | ## Quick Start Guide
To prepare the Criteo 1TB dataset for training, follow these steps.
1. Make sure you meet the prerequisites.
You will need around 4TB of storage for storing the original Criteo 1TB dataset, the results of some
intermediate preprocessing steps and the final dataset. The final dataset itself will take about 400GB.
We recommend using local storage, such as a fast SSD drive, to run the preprocessing. Using other types of storage
will negatively impact the preprocessing time.
2. Build the preprocessing docker image.
```bash
docker build -t preproc_docker_image -f Dockerfile_spark .
```
3. Download the data by following the instructions at: http://labs.criteo.com/2013/12/download-terabyte-click-logs/.
When you have successfully downloaded the dataset, put it in the `/data/criteo_orig` directory in the container
(`$PWD/data/criteo_orig` in the host system).
4. Start an interactive session in the NGC container to run preprocessing.
The DLRM TensorFlow container can be launched with:
```bash
mkdir -p data
docker run --runtime=nvidia -it --rm --ipc=host -v ${PWD}/data:/data preproc_docker_image bash
```
5. Unzip the data with:
```bash
gunzip /data/criteo_orig/*.gz
```
6. Preprocess the data.
Here are a few examples of different preprocessing commands.
For the details on how those scripts work and a detailed description of all the parameters,
consult the [preprocess with spark section](criteo_dataset.md#preprocess-with-spark).
```bash
export download_dir=/data/criteo_orig
export final_output_dir=/data/preprocessed
cd preproc
# to run on a DGX-2 with a frequency limit of 3 (will need 8xV100-32GB to fit the model in GPU memory)
./prepare_dataset.sh DGX2 3
# to run on a DGX-2 with a frequency limit of 15 (should fit on a single V100-32GB):
./prepare_dataset.sh DGX2 15
# to run on CPU with a frequency limit of 15:
./prepare_dataset.sh CPU 15
# to run on DGX-2 with no frequency limit:
./prepare_dataset.sh DGX2 0
```
7. Verify the preprocessed data
After running `tree /data/preprocessed` you should see the following directory structure:
```bash
$ tree /data/preprocessed
/data/preprocessed
├── feature_spec.yaml
├── test
│ ├── cat_0.bin
│ ├── cat_1.bin
│ ├── ...
│ ├── label.bin
│ └── numerical.bin
└── train
├── cat_0.bin
├── cat_1.bin
├── ...
├── label.bin
└── numerical.bin
2 directories, 57 files
```
## Advanced
### Dataset guidelines
The first 23 days are used as the training set. The last day is split in half.
The first part is used as a validation set and the second set is used as a hold-out test set.
The preprocessing steps applied to the raw data include:
- Replacing the missing values with `0`.
- Replacing the categorical values that exist fewer than 15 times with a special value.
- Converting the hash values to consecutive integers.
- Adding 2 to all the numerical features so that all of them are greater or equal to 1.
- Taking a natural logarithm of all numerical features.
### Preprocess with Spark
The preprocessing scripts provided in this repository support running both on CPU and on DGX-2 using [Apache Spark 3.0](https://www.nvidia.com/en-us/deep-learning-ai/solutions/data-science/apache-spark-3/).
It should be possible to change the values in `preproc/dgx2_config.sh`
so that they'll work on other hardware platforms such as DGX-1.
Note that the preprocessing will require about 4TB of disk storage.
The syntax for the preprocessing script is as follows:
```bash
cd preproc
./prepare_dataset.sh <DGX2|CPU> <frequency_threshold>
```
The first argument is the hardware platform to use (either DGX-2 or pure-CPU). The second argument means the frequency
threshold to apply to the categorical variables. For a frequency threshold `T`, the categorical values that occur less
often than `T` will be replaced with a special embedding. Thus, a larger value of `T` will require smaller embedding tables
and will substantially reduce the overall size of the model.
For the Criteo Terabyte dataset we recommend a frequency threshold of `T=3` if you intend to run the hybrid-parallel mode
on multiple GPUs. If you want to make the model fit into a single NVIDIA Tesla V100-32GB, you can set `T=15`.
The preprocessing scripts makes use of the following environment variables to configure the data directory paths:
- `download_dir` – this directory should contain the original Criteo Terabyte CSV files
- `spark_output_path` – directory to which the parquet data will be written
- `conversion_intermediate_dir` – directory used for storing intermediate data used to convert from parquet to train-ready format
- `final_output_dir` – directory to store the final results of the preprocessing which can then be used to train DLRM
The script `spark_data_utils.py` is a PySpark application, which is used to preprocess the Criteo Terabyte Dataset. In the Docker image, we have installed Spark 3.0.1, which will start a standalone cluster of Spark. The scripts `run_spark_cpu.sh` and `run_spark_gpu.sh` start Spark, then runs several PySpark jobs with `spark_data_utils.py`, for example:
generates the dictionary
- transforms the train dataset
- transforms the test dataset
- transforms the validation dataset
Change the variables in the `run-spark.sh` script according to your environment.
Configure the paths.
```
export SPARK_LOCAL_DIRS=/data/spark-tmp
export INPUT_PATH=/data/criteo
export OUTPUT_PATH=/data/output
```
Note that the Spark job requires about 3TB disk space used for data shuffle.
Where:
`SPARK_LOCAL_DIRS` is the path where Spark uses to write shuffle data.
`INPUT_PATH` is the path of the Criteo Terabyte Dataset, including uncompressed files like day_0, day_1…
`OUTPUT_PATH` is where the script writes the output data. It will generate the following subdirectories of `models`, `train`, `test`, and `validation`.
- The `model` is the dictionary folder.
- The `train` is the train dataset transformed from day_0 to day_22.
- The `test` is the test dataset transformed from the prior half of day_23.
- The `validation` is the dataset transformed from the latter half of day_23.
Configure the resources which Spark will use.
```
export TOTAL_CORES=80
export TOTAL_MEMORY=800
```
Where:
`TOTAL_CORES` is the total CPU cores you want Spark to use.
`TOTAL_MEMORY` is the total memory Spark will use.
Configure frequency limit.
```
USE_FREQUENCY_LIMIT=15
```
The frequency limit is used to filter out the categorical values which appear less than n times in the whole dataset, and make them be 0. Change this variable to 1 to enable it. The default frequency limit is 15 in the script. You also can change the number as you want by changing the line of `OPTS="--frequency_limit 8"`.
|
TensorFlow2/LanguageModeling/ELECTRA | ELECTRA | run_pretraining | # coding=utf-8
# Copyright 2020 The Google Research Authors.
# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pre-trains an ELECTRA model."""
import argparse
import collections
import json
import time
import datetime
import os
import tensorflow as tf
import horovod.tensorflow as hvd
from horovod.tensorflow.compression import Compression
from gpu_affinity import set_affinity
import utils
import sys
import pretrain_utils
from utils import get_rank, get_world_size, is_main_process, log, log_config, setup_logger, postprocess_dllog
from tokenization import ElectraTokenizer
from modeling import PretrainingModel
from optimization import create_optimizer, GradientAccumulator
import dllogger
class PretrainingConfig(object):
"""Defines pre-training hyperparameters."""
def __init__(self, model_name, **kwargs):
self.model_name = model_name
self.seed = 42
self.debug = False # debug mode for quickly running things
self.do_train = True # pre-train ELECTRA
self.do_eval = False # evaluate generator/discriminator on unlabeled data
self.phase2 = False
# amp
self.amp = True
self.xla = True
self.fp16_compression = False
# optimizer type
self.optimizer = 'adam'
self.gradient_accumulation_steps = 1
# lamb whitelisting for LN and biases
self.skip_adaptive = False
# loss functions
self.electra_objective = True # if False, use the BERT objective instead
self.gen_weight = 1.0 # masked language modeling / generator loss
self.disc_weight = 50.0 # discriminator loss
self.mask_prob = 0.15 # percent of input tokens to mask out / replace
# optimization
self.learning_rate = 5e-4
self.lr_decay_power = 0.5
self.weight_decay_rate = 0.01
self.num_warmup_steps = 10000
self.opt_beta_1 = 0.878
self.opt_beta_2 = 0.974
self.end_lr = 0.0
# training settings
self.log_freq = 10
self.skip_checkpoint = False
self.save_checkpoints_steps = 1000
self.num_train_steps = 1000000
self.num_eval_steps = 100
self.keep_checkpoint_max = 5 # maximum number of recent checkpoint files to keep; change to 0 or None to keep all checkpoints
self.restore_checkpoint = None
self.load_weights = False
self.steps_this_run = -1
# model settings
self.model_size = "base" # one of "small", "base", or "large"
# override the default transformer hparams for the provided model size; see
# modeling.BertConfig for the possible hparams and util.training_utils for
# the defaults
self.model_hparam_overrides = (
kwargs["model_hparam_overrides"]
if "model_hparam_overrides" in kwargs else {})
self.embedding_size = None # bert hidden size by default
self.vocab_size = 30522 # number of tokens in the vocabulary
self.do_lower_case = True # lowercase the input?
# generator settings
self.uniform_generator = False # generator is uniform at random
self.shared_embeddings = True # share generator/discriminator token embeddings?
# self.untied_generator = True # tie all generator/discriminator weights?
self.generator_layers = 1.0 # frac of discriminator layers for generator
self.generator_hidden_size = 0.25 # frac of discrim hidden size for gen
self.disallow_correct = False # force the generator to sample incorrect
# tokens (so 15% of tokens are always
# fake)
self.temperature = 1.0 # temperature for sampling from generator
# batch sizes
self.max_seq_length = 128
self.train_batch_size = 128
self.eval_batch_size = 128
self.results_dir = "results"
self.json_summary = None
self.update(kwargs)
# default locations of data files
self.pretrain_tfrecords = os.path.join(
"data", "pretrain_tfrecords/pretrain_data.tfrecord*")
self.vocab_file = os.path.join("vocab", "vocab.txt")
self.model_dir = os.path.join(self.results_dir, "models", model_name)
self.checkpoints_dir = os.path.join(self.model_dir, "checkpoints")
self.weights_dir = os.path.join(self.model_dir, "weights")
self.results_txt = os.path.join(self.results_dir, "unsup_results.txt")
self.results_pkl = os.path.join(self.results_dir, "unsup_results.pkl")
self.log_dir = os.path.join(self.model_dir, "logs")
self.max_predictions_per_seq = int((self.mask_prob + 0.005) *
self.max_seq_length)
# defaults for different-sized model
if self.model_size == "base":
self.embedding_size = 768
self.hidden_size = 768
self.num_hidden_layers = 12
if self.hidden_size % 64 != 0:
raise ValueError("Hidden size {} should be divisible by 64. Number of attention heads is hidden size {} / 64 ".format(self.hidden_size, self.hidden_size))
self.num_attention_heads = int(self.hidden_size / 64.)
elif self.model_size == "large":
self.embedding_size = 1024
self.hidden_size = 1024
self.num_hidden_layers = 24
if self.hidden_size % 64 != 0:
raise ValueError("Hidden size {} should be divisible by 64. Number of attention heads is hidden size {} / 64 ".format(self.hidden_size, self.hidden_size))
self.num_attention_heads = int(self.hidden_size / 64.)
else:
raise ValueError("--model_size : 'base' and 'large supported only.")
self.act_func = "gelu"
self.hidden_dropout_prob = 0.1
self.attention_probs_dropout_prob = 0.1
self.update(kwargs)
def update(self, kwargs):
for k, v in kwargs.items():
if v is not None:
self.__dict__[k] = v
def metric_fn(config, metrics, eval_fn_inputs):
"""Computes the loss and accuracy of the model."""
d = eval_fn_inputs
metrics["masked_lm_accuracy"].update_state(
y_true=tf.reshape(d["masked_lm_ids"], [-1]),
y_pred=tf.reshape(d["masked_lm_preds"], [-1]),
sample_weight=tf.reshape(d["masked_lm_weights"], [-1]))
metrics["masked_lm_loss"].update_state(
values=tf.reshape(d["mlm_loss"], [-1]),
sample_weight=tf.reshape(d["masked_lm_weights"], [-1]))
if config.electra_objective:
metrics["sampled_masked_lm_accuracy"].update_state(
y_true=tf.reshape(d["masked_lm_ids"], [-1]),
y_pred=tf.reshape(d["sampled_tokids"], [-1]),
sample_weight=tf.reshape(d["masked_lm_weights"], [-1]))
if config.disc_weight > 0:
metrics["disc_loss"].update_state(d["disc_loss"])
#metrics["disc_auc"].update_state(
# d["disc_labels"] * d["input_mask"],
# d["disc_probs"] * tf.cast(d["input_mask"], tf.float32))
metrics["disc_accuracy"].update_state(
y_true=d["disc_labels"], y_pred=d["disc_preds"],
sample_weight=d["input_mask"])
metrics["disc_precision"].update_state(
y_true=d["disc_labels"], y_pred=d["disc_preds"],
sample_weight=d["disc_preds"] * d["input_mask"])
metrics["disc_recall"].update_state(
y_true=d["disc_labels"], y_pred=d["disc_preds"],
sample_weight=d["disc_labels"] * d["input_mask"])
return metrics
@tf.function
def train_one_step(config, model, optimizer, features, accumulator, first_step, take_step, clip_norm=1.0):
#Forward and Backward pass
with tf.GradientTape() as tape:
total_loss, eval_fn_inputs = model(features, is_training=True)
unscaled_loss = tf.stop_gradient(total_loss)
if config.amp:
total_loss = optimizer.get_scaled_loss(total_loss)
#Backpropogate gradients
#tape = hvd.DistributedGradientTape(
# tape, sparse_as_dense=True,
# compression=Compression.fp16 if config.amp and config.fp16_compression else Compression.none)
gradients = tape.gradient(total_loss, model.trainable_variables)
#Get unscaled gradients if AMP
if config.amp:
gradients = optimizer.get_unscaled_gradients(gradients)
#Accumulate gradients
accumulator(gradients)
#Need to call apply_gradients on very first step irrespective of gradient accumulation
#This is required for the optimizer to build it's states
if first_step or take_step:
#All reduce and Clip the accumulated gradients
allreduced_accumulated_gradients = [None if g is None else hvd.allreduce(g / tf.cast(config.gradient_accumulation_steps, g.dtype),
compression=Compression.fp16 if config.amp and config.fp16_compression else Compression.none)
for g in accumulator.gradients]
(clipped_accumulated_gradients, _) = tf.clip_by_global_norm(allreduced_accumulated_gradients, clip_norm=clip_norm)
#Weight update
optimizer.apply_gradients(zip(clipped_accumulated_gradients, model.trainable_variables))
accumulator.reset()
#brodcast model weights after first train step
if first_step:
hvd.broadcast_variables(model.variables, root_rank=0)
hvd.broadcast_variables(optimizer.variables(), root_rank=0)
return unscaled_loss, eval_fn_inputs
def main(e2e_start_time):
# Parse essential argumentss
parser = argparse.ArgumentParser()
parser.add_argument("--model_name", required=True)
parser.add_argument("--model_size", default="base", type=str, help="base or large")
parser.add_argument("--pretrain_tfrecords", type=str)
parser.add_argument("--phase2", action='store_true')
parser.add_argument("--fp16_compression", action='store_true')
parser.add_argument("--amp", action='store_true',
help="Whether to use fp16.")
parser.add_argument("--xla", action='store_true',
help="Whether to use xla.")
parser.add_argument("--seed", default=42, type=int)
parser.add_argument("--num_train_steps", type=int)
parser.add_argument("--num_warmup_steps", type=int)
parser.add_argument("--learning_rate", type=float)
parser.add_argument("--train_batch_size", type=int)
parser.add_argument("--max_seq_length", type=int)
parser.add_argument("--mask_prob", type=float)
parser.add_argument("--disc_weight", type=float)
parser.add_argument("--generator_hidden_size", type=float)
parser.add_argument("--log_freq", type=int, default=10, help="Training metrics logging frequency")
parser.add_argument("--save_checkpoints_steps", type=int)
parser.add_argument("--steps_this_run", type=int, default=-1, help="run a fixed number of steps only")
parser.add_argument("--keep_checkpoint_max", type=int)
parser.add_argument("--restore_checkpoint", default=None, type=str)
parser.add_argument("--load_weights", action='store_true')
parser.add_argument("--weights_dir")
parser.add_argument("--optimizer", default="adam", type=str, help="adam or lamb")
parser.add_argument("--skip_adaptive", action='store_true', help="Whether to apply adaptive LR on LayerNorm and biases")
parser.add_argument("--gradient_accumulation_steps", type=int, default=1, help="Number of Gradient Accumulation steps")
parser.add_argument("--lr_decay_power", type=float, default=0.5, help="LR decay power")
parser.add_argument("--opt_beta_1", type=float, default=0.878, help="Optimizer beta1")
parser.add_argument("--opt_beta_2", type=float, default=0.974, help="Optimizer beta2")
parser.add_argument("--end_lr", type=float, default=0.0, help="Ending LR")
parser.add_argument("--log_dir", type=str, default=None, help="Path to store logs")
parser.add_argument("--results_dir", type=str, default=None, help="Path to store all model results")
parser.add_argument("--skip_checkpoint", action='store_true', default=False, help="Path to store logs")
parser.add_argument('--json-summary', type=str, default=None,
help='If provided, the json summary will be written to the specified file.')
args = parser.parse_args()
config = PretrainingConfig(**args.__dict__)
# Padding for divisibility by 8
if config.vocab_size % 8 != 0:
config.vocab_size += 8 - (config.vocab_size % 8)
# Set up tensorflow
hvd.init()
args.log_dir = config.log_dir
# DLLogger
setup_logger(args)
dllogger.metadata('training_sequences_per_second', {'unit': 'sequences/s'})
dllogger.metadata('final_loss', {'unit': None})
dllogger.metadata('e2e_train_time', {'unit': 's'})
set_affinity(hvd.local_rank())
gpus = tf.config.experimental.list_physical_devices('GPU')
if gpus:
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
tf.config.experimental.set_visible_devices(gpus[hvd.local_rank()], 'GPU')
tf.config.optimizer.set_jit(config.xla)
#tf.config.optimizer.set_experimental_options({"auto_mixed_precision": config.amp})
if config.amp:
policy = tf.keras.mixed_precision.experimental.Policy("mixed_float16", loss_scale="dynamic")
tf.keras.mixed_precision.experimental.set_policy(policy)
print('Compute dtype: %s' % policy.compute_dtype) # Compute dtype: float16
print('Variable dtype: %s' % policy.variable_dtype) # Variable dtype: float32
#tf.random.set_seed(config.seed)
# Set up config cont'
if config.load_weights and config.restore_checkpoint:
raise ValueError("`load_weights` and `restore_checkpoint` should not be on at the same time.")
if config.phase2 and not config.restore_checkpoint:
raise ValueError("`phase2` cannot be used without `restore_checkpoint`.")
utils.heading("Config:")
log_config(config)
# Save pretrain configs
pretrain_config_json = os.path.join(config.checkpoints_dir, 'pretrain_config.json')
if is_main_process():
utils.write_json(config.__dict__, pretrain_config_json)
log("Configuration saved in {}".format(pretrain_config_json))
# Set up model
model = PretrainingModel(config)
# Set up metrics
metrics = dict()
metrics["train_perf"] = tf.keras.metrics.Mean(name="train_perf")
metrics["total_loss"] = tf.keras.metrics.Mean(name="total_loss")
metrics["masked_lm_accuracy"] = tf.keras.metrics.Accuracy(name="masked_lm_accuracy")
metrics["masked_lm_loss"] = tf.keras.metrics.Mean(name="masked_lm_loss")
if config.electra_objective:
metrics["sampled_masked_lm_accuracy"] = tf.keras.metrics.Accuracy(name="sampled_masked_lm_accuracy")
if config.disc_weight > 0:
metrics["disc_loss"] = tf.keras.metrics.Mean(name="disc_loss")
metrics["disc_auc"] = tf.keras.metrics.AUC(name="disc_auc")
metrics["disc_accuracy"] = tf.keras.metrics.Accuracy(name="disc_accuracy")
metrics["disc_precision"] = tf.keras.metrics.Accuracy(name="disc_precision")
metrics["disc_recall"] = tf.keras.metrics.Accuracy(name="disc_recall")
# Set up tensorboard
current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
train_log_dir = os.path.join(config.log_dir, current_time,
'train_' + str(get_rank()) + '_of_' + str(get_world_size()))
train_summary_writer = tf.summary.create_file_writer(train_log_dir)
# Set up dataset
dataset = pretrain_utils.get_dataset(
config, config.train_batch_size, world_size=get_world_size(), rank=get_rank())
train_iterator = iter(dataset)
# Set up optimizer
optimizer = create_optimizer(
init_lr=config.learning_rate,
num_train_steps=config.num_train_steps,
num_warmup_steps=config.num_warmup_steps,
weight_decay_rate=config.weight_decay_rate,
optimizer=config.optimizer,
skip_adaptive=config.skip_adaptive,
power=config.lr_decay_power,
beta_1=config.opt_beta_1,
beta_2=config.opt_beta_2,
end_lr=config.end_lr)
accumulator = GradientAccumulator()
if config.amp:
optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer(optimizer, "dynamic")
# Set up model checkpoint
checkpoint = tf.train.Checkpoint(
step=tf.Variable(0), phase2=tf.Variable(False), optimizer=optimizer, model=model)
manager = tf.train.CheckpointManager(checkpoint, config.checkpoints_dir, max_to_keep=config.keep_checkpoint_max)
if config.restore_checkpoint and config.restore_checkpoint != "latest":
checkpoint.restore(config.restore_checkpoint)
log(" ** Restored model checkpoint from {}".format(config.restore_checkpoint))
elif config.restore_checkpoint and config.restore_checkpoint == "latest" and manager.latest_checkpoint:
checkpoint.restore(manager.latest_checkpoint)
log(" ** Restored model checkpoint from {}".format(manager.latest_checkpoint))
elif config.load_weights:
model.generator(model.generator.dummy_inputs)
model.discriminator(model.discriminator.dummy_inputs)
model.generator.load_weights(os.path.join(config.weights_dir, 'generator', 'tf_model.h5'))
model.discriminator.load_weights(os.path.join(config.weights_dir, 'discriminator', 'tf_model.h5'))
else:
log(" ** Initializing from scratch.")
restore_iterator = bool(config.restore_checkpoint) and config.restore_checkpoint == "latest"
# Initialize global step for phase2
if config.phase2 and not bool(checkpoint.phase2):
optimizer.iterations.assign(0)
checkpoint.step.assign(0)
checkpoint.phase2.assign(True)
restore_iterator = False
if bool(checkpoint.phase2):
manager = tf.train.CheckpointManager(
checkpoint, config.checkpoints_dir,
checkpoint_name='ckpt-p2',
max_to_keep=config.keep_checkpoint_max)
# Set up iterator checkpoint
iter_checkpoint = tf.train.Checkpoint(
train_iterator=train_iterator, world_size=tf.Variable(get_world_size()), rank=tf.Variable(get_rank()))
iter_manager = tf.train.CheckpointManager(
iter_checkpoint,
os.path.join(config.checkpoints_dir, 'iter_ckpt_rank_' + '{:02}'.format(get_rank())),
checkpoint_name='iter_ckpt_rank_' + '{:02}'.format(get_rank()),
max_to_keep=config.keep_checkpoint_max)
if restore_iterator and iter_manager.latest_checkpoint:
ckpt_world_size = tf.train.load_variable(
iter_manager.latest_checkpoint, 'world_size/.ATTRIBUTES/VARIABLE_VALUE')
if ckpt_world_size == get_world_size():
iter_checkpoint.restore(iter_manager.latest_checkpoint)
log(" ** Restored iterator checkpoint from {}".format(iter_manager.latest_checkpoint), all_rank=True)
utils.heading("Running training")
accumulator.reset()
train_start, start_step = time.time(), int(checkpoint.step) - 1
local_step = 0
saved_ckpt = False
while int(checkpoint.step) <= config.num_train_steps:
saved_ckpt = False
step = int(checkpoint.step)
features = next(train_iterator)
iter_start = time.time()
# if step == 200: tf.profiler.experimental.start(logdir=train_log_dir)
total_loss, eval_fn_inputs = train_one_step(config, model, optimizer, features, accumulator,
local_step==1, take_step=local_step % args.gradient_accumulation_steps == 0)
# if step == 300: tf.profiler.experimental.stop()
metrics["train_perf"].update_state(
config.train_batch_size * get_world_size() / (time.time() - iter_start))
metrics["total_loss"].update_state(values=total_loss)
metric_fn(config, metrics, eval_fn_inputs)
if (step % args.log_freq == 0) and (local_step % args.gradient_accumulation_steps == 0):
log_info_dict = {k:float(v.result().numpy() * 100) if "accuracy" in k else float(v.result().numpy()) for k, v in metrics.items()}
dllogger.log(step=(step,), data=log_info_dict, verbosity=0)
log('Step:{step:6d}, Loss:{total_loss:10.6f}, Gen_loss:{masked_lm_loss:10.6f}, Disc_loss:{disc_loss:10.6f}, Gen_acc:{masked_lm_accuracy:6.2f}, '
'Disc_acc:{disc_accuracy:6.2f}, Perf:{train_perf:4.0f}, Loss Scaler: {loss_scale}, Elapsed: {elapsed}, ETA: {eta}, '.format(
step=step, **log_info_dict,
loss_scale=optimizer.loss_scale if config.amp else 1,
elapsed=utils.get_readable_time(time.time() - train_start),
eta=utils.get_readable_time(
(time.time() - train_start) / (step - start_step) * (config.num_train_steps - step))),
all_rank=True)
with train_summary_writer.as_default():
for key, m in metrics.items():
tf.summary.scalar(key, m.result(), step=step)
if int(checkpoint.step) < config.num_train_steps:
for m in metrics.values():
m.reset_states()
#Print allreduced metrics on the last step
if (int(checkpoint.step) == config.num_train_steps and (local_step % args.gradient_accumulation_steps == 0)) or ((local_step + 1) % (config.save_checkpoints_steps * args.gradient_accumulation_steps) == 0):
log_info_dict = {k:float(hvd.allreduce(v.result()).numpy() * 100) if "accuracy" in k else float(hvd.allreduce(v.result()).numpy()) for k, v in metrics.items()}
log_info_dict["training_sequences_per_second"] = log_info_dict["train_perf"]
log_info_dict["final_loss"] = log_info_dict["total_loss"]
log_info_dict["e2e_train_time"] = time.time() - e2e_start_time
dllogger.log(step=(), data=log_info_dict, verbosity=0)
log('<FINAL STEP METRICS> Step:{step:6d}, Loss:{total_loss:10.6f}, Gen_loss:{masked_lm_loss:10.6f}, Disc_loss:{disc_loss:10.6f}, Gen_acc:{masked_lm_accuracy:6.2f}, '
'Disc_acc:{disc_accuracy:6.2f}, Perf:{train_perf:4.0f},'.format(
step=step, **log_info_dict),
all_rank=False)
if local_step % args.gradient_accumulation_steps == 0:
checkpoint.step.assign(int(optimizer.iterations))
if not config.skip_checkpoint and (local_step % (config.save_checkpoints_steps * args.gradient_accumulation_steps) == 0):
saved_ckpt = True
if is_main_process():
save_path = manager.save(checkpoint_number=step)
log(" ** Saved model checkpoint for step {}: {}".format(step, save_path))
iter_save_path = iter_manager.save(checkpoint_number=step)
log(" ** Saved iterator checkpoint for step {}: {}".format(step, iter_save_path), all_rank=True)
local_step += 1
if config.steps_this_run != -1 and (local_step % (config.steps_this_run * args.gradient_accumulation_steps) == 0):
#terminating run sooner as steps_this_run has been reached
log("terminating as steps_this_run:{} has been reached".format(config.steps_this_run))
break
step = (int(checkpoint.step) - 1)
dllogger.flush()
if not config.skip_checkpoint and not saved_ckpt:
if is_main_process():
save_path = manager.save(checkpoint_number=step)
log(" ** Saved model checkpoint for step {}: {}".format(step, save_path))
iter_save_path = iter_manager.save(checkpoint_number=step)
log(" ** Saved iterator checkpoint for step {}: {}".format(step, iter_save_path), all_rank=True)
return args
if __name__ == "__main__":
start_time = time.time()
args = main(start_time)
log("Total Time:{:.4f}".format(time.time() - start_time))
if is_main_process():
postprocess_dllog(args)
|
PaddlePaddle/Classification/RN50v1.5/utils | utils | logger | # Copyright (c) 2022 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import paddle.distributed as dist
import dllogger
def format_step(step):
"""
Define prefix for different prefix message for dllogger.
Args:
step(str|tuple): Dllogger step format.
Returns:
s(str): String to print in log.
"""
if isinstance(step, str):
return step
s = ""
if len(step) > 0:
s += f"Epoch: {step[0]} "
if len(step) > 1:
s += f"Iteration: {step[1]} "
if len(step) > 2:
s += f"Validation Iteration: {step[2]} "
if len(step) == 0:
s = "Summary:"
return s
def setup_dllogger(log_file):
"""
Setup logging and dllogger.
Args:
log_file(str): Path to log file.
"""
logging.basicConfig(
level=logging.DEBUG,
format='{asctime}:{levelname}: {message}',
style='{')
if dist.get_rank() == 0:
dllogger.init(backends=[
dllogger.StdOutBackend(
dllogger.Verbosity.DEFAULT, step_format=format_step),
dllogger.JSONStreamBackend(dllogger.Verbosity.VERBOSE, log_file),
])
else:
dllogger.init([])
|
TensorFlow/Detection/SSD/models/research/object_detection/data | data | ava_label_map_v2.1 | item {
name: "bend/bow (at the waist)"
id: 1
}
item {
name: "crouch/kneel"
id: 3
}
item {
name: "dance"
id: 4
}
item {
name: "fall down"
id: 5
}
item {
name: "get up"
id: 6
}
item {
name: "jump/leap"
id: 7
}
item {
name: "lie/sleep"
id: 8
}
item {
name: "martial art"
id: 9
}
item {
name: "run/jog"
id: 10
}
item {
name: "sit"
id: 11
}
item {
name: "stand"
id: 12
}
item {
name: "swim"
id: 13
}
item {
name: "walk"
id: 14
}
item {
name: "answer phone"
id: 15
}
item {
name: "carry/hold (an object)"
id: 17
}
item {
name: "climb (e.g., a mountain)"
id: 20
}
item {
name: "close (e.g., a door, a box)"
id: 22
}
item {
name: "cut"
id: 24
}
item {
name: "dress/put on clothing"
id: 26
}
item {
name: "drink"
id: 27
}
item {
name: "drive (e.g., a car, a truck)"
id: 28
}
item {
name: "eat"
id: 29
}
item {
name: "enter"
id: 30
}
item {
name: "hit (an object)"
id: 34
}
item {
name: "lift/pick up"
id: 36
}
item {
name: "listen (e.g., to music)"
id: 37
}
item {
name: "open (e.g., a window, a car door)"
id: 38
}
item {
name: "play musical instrument"
id: 41
}
item {
name: "point to (an object)"
id: 43
}
item {
name: "pull (an object)"
id: 45
}
item {
name: "push (an object)"
id: 46
}
item {
name: "put down"
id: 47
}
item {
name: "read"
id: 48
}
item {
name: "ride (e.g., a bike, a car, a horse)"
id: 49
}
item {
name: "sail boat"
id: 51
}
item {
name: "shoot"
id: 52
}
item {
name: "smoke"
id: 54
}
item {
name: "take a photo"
id: 56
}
item {
name: "text on/look at a cellphone"
id: 57
}
item {
name: "throw"
id: 58
}
item {
name: "touch (an object)"
id: 59
}
item {
name: "turn (e.g., a screwdriver)"
id: 60
}
item {
name: "watch (e.g., TV)"
id: 61
}
item {
name: "work on a computer"
id: 62
}
item {
name: "write"
id: 63
}
item {
name: "fight/hit (a person)"
id: 64
}
item {
name: "give/serve (an object) to (a person)"
id: 65
}
item {
name: "grab (a person)"
id: 66
}
item {
name: "hand clap"
id: 67
}
item {
name: "hand shake"
id: 68
}
item {
name: "hand wave"
id: 69
}
item {
name: "hug (a person)"
id: 70
}
item {
name: "kiss (a person)"
id: 72
}
item {
name: "lift (a person)"
id: 73
}
item {
name: "listen to (a person)"
id: 74
}
item {
name: "push (another person)"
id: 76
}
item {
name: "sing to (e.g., self, a person, a group)"
id: 77
}
item {
name: "take (an object) from (a person)"
id: 78
}
item {
name: "talk to (e.g., self, a person, a group)"
id: 79
}
item {
name: "watch (a person)"
id: 80
}
|
PyTorch/SpeechRecognition/QuartzNet/platform | platform | DGXA100_QuartzNet_TF32_8GPU | #!/bin/bash
set -a
: ${NUM_GPUS:=8}
: ${GPU_BATCH_SIZE:=72}
: ${GRAD_ACCUMULATION:=2}
: ${AMP=:false}
bash scripts/train.sh "$@"
|
PyTorch/Detection/Efficientdet/effdet | effdet | distributed | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import logging
import numpy as np
import pickle
import torch
import torch.distributed as dist
_LOCAL_PROCESS_GROUP = None
"""
A torch process group which only includes processes that on the same machine as the current process.
This variable is set when processes are spawned by `launch()` in "engine/launch.py".
"""
def get_world_size() -> int:
if not dist.is_available():
return 1
if not dist.is_initialized():
return 1
return dist.get_world_size()
def get_rank() -> int:
if not dist.is_available():
return 0
if not dist.is_initialized():
return 0
return dist.get_rank()
def get_local_rank() -> int:
"""
Returns:
The rank of the current process within the local (per-machine) process group.
"""
if not dist.is_available():
return 0
if not dist.is_initialized():
return 0
assert _LOCAL_PROCESS_GROUP is not None
return dist.get_rank(group=_LOCAL_PROCESS_GROUP)
def get_local_size() -> int:
"""
Returns:
The size of the per-machine process group,
i.e. the number of processes per machine.
"""
if not dist.is_available():
return 1
if not dist.is_initialized():
return 1
return dist.get_world_size(group=_LOCAL_PROCESS_GROUP)
def is_main_process() -> bool:
return get_rank() == 0
def synchronize():
"""
Helper function to synchronize (barrier) among all processes when
using distributed training
"""
if not dist.is_available():
return
if not dist.is_initialized():
return
world_size = dist.get_world_size()
if world_size == 1:
return
dist.barrier()
@functools.lru_cache()
def _get_global_gloo_group():
"""
Return a process group based on gloo backend, containing all the ranks
The result is cached.
"""
if dist.get_backend() == "nccl":
return dist.new_group(backend="gloo")
else:
return dist.group.WORLD
def _serialize_to_tensor(data, group):
backend = dist.get_backend(group)
assert backend in ["gloo", "nccl"]
device = torch.device("cpu" if backend == "gloo" else "cuda")
buffer = pickle.dumps(data)
if len(buffer) > 1024 ** 3:
logger = logging.getLogger(__name__)
logger.warning(
"Rank {} trying to all-gather {:.2f} GB of data on device {}".format(
get_rank(), len(buffer) / (1024 ** 3), device
)
)
storage = torch.ByteStorage.from_buffer(buffer)
tensor = torch.ByteTensor(storage).to(device=device)
return tensor
def _pad_to_largest_tensor(tensor, group):
"""
Returns:
list[int]: size of the tensor, on each rank
Tensor: padded tensor that has the max size
"""
world_size = dist.get_world_size(group=group)
assert (
world_size >= 1
), "comm.gather/all_gather must be called from ranks within the given group!"
local_size = torch.tensor([tensor.numel()], dtype=torch.int64, device=tensor.device)
size_list = [
torch.zeros([1], dtype=torch.int64, device=tensor.device) for _ in range(world_size)
]
dist.all_gather(size_list, local_size, group=group)
size_list = [int(size.item()) for size in size_list]
max_size = max(size_list)
# we pad the tensor because torch all_gather does not support
# gathering tensors of different shapes
if local_size != max_size:
padding = torch.zeros((max_size - local_size,), dtype=torch.uint8, device=tensor.device)
tensor = torch.cat((tensor, padding), dim=0)
return size_list, tensor
def all_gather(data, group=None):
"""
Run all_gather on arbitrary picklable data (not necessarily tensors).
Args:
data: any picklable object
group: a torch process group. By default, will use a group which
contains all ranks on gloo backend.
Returns:
list[data]: list of data gathered from each rank
"""
if get_world_size() == 1:
return [data]
if group is None:
group = _get_global_gloo_group()
if dist.get_world_size(group) == 1:
return [data]
tensor = _serialize_to_tensor(data, group)
size_list, tensor = _pad_to_largest_tensor(tensor, group)
max_size = max(size_list)
# receiving Tensor from all ranks
tensor_list = [torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list]
dist.all_gather(tensor_list, tensor, group=group)
data_list = []
for size, tensor in zip(size_list, tensor_list):
buffer = tensor.cpu().numpy().tobytes()[:size]
data_list.append(pickle.loads(buffer))
return data_list
def gather(data, dst=0, group=None):
"""
Run gather on arbitrary picklable data (not necessarily tensors).
Args:
data: any picklable object
dst (int): destination rank
group: a torch process group. By default, will use a group which
contains all ranks on gloo backend.
Returns:
list[data]: on dst, a list of data gathered from each rank. Otherwise,
an empty list.
"""
if get_world_size() == 1:
return [data]
if group is None:
group = _get_global_gloo_group()
if dist.get_world_size(group=group) == 1:
return [data]
rank = dist.get_rank(group=group)
tensor = _serialize_to_tensor(data, group)
size_list, tensor = _pad_to_largest_tensor(tensor, group)
# receiving Tensor from all ranks
if rank == dst:
max_size = max(size_list)
tensor_list = [torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list]
dist.gather(tensor, tensor_list, dst=dst, group=group)
data_list = []
for size, tensor in zip(size_list, tensor_list):
buffer = tensor.cpu().numpy().tobytes()[:size]
data_list.append(pickle.loads(buffer))
return data_list
else:
dist.gather(tensor, [], dst=dst, group=group)
return []
def shared_random_seed():
"""
Returns:
int: a random number that is the same across all workers.
If workers need a shared RNG, they can use this shared seed to
create one.
All workers must call this function, otherwise it will deadlock.
"""
ints = np.random.randint(2 ** 31)
all_ints = all_gather(ints)
return all_ints[0]
def reduce_dict(input_dict, average=True):
"""
Reduce the values in the dictionary from all processes so that process with rank
0 has the reduced results.
Args:
input_dict (dict): inputs to be reduced. All the values must be scalar CUDA Tensor.
average (bool): whether to do average or sum
Returns:
a dict with the same keys as input_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return input_dict
with torch.no_grad():
names = []
values = []
# sort the keys so that they are consistent across processes
for k in sorted(input_dict.keys()):
names.append(k)
values.append(input_dict[k])
values = torch.stack(values, dim=0)
dist.reduce(values, dst=0)
if dist.get_rank() == 0 and average:
# only main process gets accumulated, so only divide by
# world_size in this case
values /= world_size
reduced_dict = {k: v for k, v in zip(names, values)}
return reduced_dict
def all_gather_container(container, group=None, cat_dim=0):
group = group or dist.group.WORLD
world_size = dist.get_world_size(group)
def _do_gather(tensor):
tensor_list = [torch.empty_like(tensor) for _ in range(world_size)]
dist.all_gather(tensor_list, tensor, group=group)
return torch.cat(tensor_list, dim=cat_dim)
if isinstance(container, dict):
gathered = dict()
for k, v in container.items():
v = _do_gather(v)
gathered[k] = v
return gathered
elif isinstance(container, (list, tuple)):
gathered = [_do_gather(v) for v in container]
if isinstance(container, tuple):
gathered = tuple(gathered)
return gathered
else:
# if not a dict, list, tuple, expect a singular tensor
assert isinstance(container, torch.Tensor)
return _do_gather(container)
def gather_container(container, dst, group=None, cat_dim=0):
group = group or dist.group.WORLD
world_size = dist.get_world_size(group)
this_rank = dist.get_rank(group)
def _do_gather(tensor):
if this_rank == dst:
tensor_list = [torch.empty_like(tensor) for _ in range(world_size)]
else:
tensor_list = None
dist.gather(tensor, tensor_list, dst=dst, group=group)
return torch.cat(tensor_list, dim=cat_dim)
if isinstance(container, dict):
gathered = dict()
for k, v in container.items():
v = _do_gather(v)
gathered[k] = v
return gathered
elif isinstance(container, (list, tuple)):
gathered = [_do_gather(v) for v in container]
if isinstance(container, tuple):
gathered = tuple(gathered)
return gathered
else:
# if not a dict, list, tuple, expect a singular tensor
assert isinstance(container, torch.Tensor)
return _do_gather(container) |
TensorFlow/Detection/SSD/models/research/slim/nets | nets | i3d_test | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for networks.i3d."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import i3d
class I3DTest(tf.test.TestCase):
def testBuildClassificationNetwork(self):
batch_size = 5
num_frames = 64
height, width = 224, 224
num_classes = 1000
inputs = tf.random_uniform((batch_size, num_frames, height, width, 3))
logits, end_points = i3d.i3d(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Predictions' in end_points)
self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
[batch_size, num_classes])
def testBuildBaseNetwork(self):
batch_size = 5
num_frames = 64
height, width = 224, 224
inputs = tf.random_uniform((batch_size, num_frames, height, width, 3))
mixed_6c, end_points = i3d.i3d_base(inputs)
self.assertTrue(mixed_6c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_6c.get_shape().as_list(),
[batch_size, 8, 7, 7, 1024])
expected_endpoints = ['Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1',
'Conv2d_2c_3x3', 'MaxPool_3a_3x3', 'Mixed_3b',
'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b', 'Mixed_4c',
'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c']
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildOnlyUptoFinalEndpoint(self):
batch_size = 5
num_frames = 64
height, width = 224, 224
endpoints = ['Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1',
'Conv2d_2c_3x3', 'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c',
'MaxPool_4a_3x3', 'Mixed_4b', 'Mixed_4c', 'Mixed_4d',
'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2', 'Mixed_5b',
'Mixed_5c']
for index, endpoint in enumerate(endpoints):
with tf.Graph().as_default():
inputs = tf.random_uniform((batch_size, num_frames, height, width, 3))
out_tensor, end_points = i3d.i3d_base(
inputs, final_endpoint=endpoint)
self.assertTrue(out_tensor.op.name.startswith(
'InceptionV1/' + endpoint))
self.assertItemsEqual(endpoints[:index+1], end_points)
def testBuildAndCheckAllEndPointsUptoMixed5c(self):
batch_size = 5
num_frames = 64
height, width = 224, 224
inputs = tf.random_uniform((batch_size, num_frames, height, width, 3))
_, end_points = i3d.i3d_base(inputs,
final_endpoint='Mixed_5c')
endpoints_shapes = {'Conv2d_1a_7x7': [5, 32, 112, 112, 64],
'MaxPool_2a_3x3': [5, 32, 56, 56, 64],
'Conv2d_2b_1x1': [5, 32, 56, 56, 64],
'Conv2d_2c_3x3': [5, 32, 56, 56, 192],
'MaxPool_3a_3x3': [5, 32, 28, 28, 192],
'Mixed_3b': [5, 32, 28, 28, 256],
'Mixed_3c': [5, 32, 28, 28, 480],
'MaxPool_4a_3x3': [5, 16, 14, 14, 480],
'Mixed_4b': [5, 16, 14, 14, 512],
'Mixed_4c': [5, 16, 14, 14, 512],
'Mixed_4d': [5, 16, 14, 14, 512],
'Mixed_4e': [5, 16, 14, 14, 528],
'Mixed_4f': [5, 16, 14, 14, 832],
'MaxPool_5a_2x2': [5, 8, 7, 7, 832],
'Mixed_5b': [5, 8, 7, 7, 832],
'Mixed_5c': [5, 8, 7, 7, 1024]}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name, expected_shape in endpoints_shapes.iteritems():
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testHalfSizeImages(self):
batch_size = 5
num_frames = 64
height, width = 112, 112
inputs = tf.random_uniform((batch_size, num_frames, height, width, 3))
mixed_5c, _ = i3d.i3d_base(inputs)
self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_5c.get_shape().as_list(),
[batch_size, 8, 4, 4, 1024])
def testTenFrames(self):
batch_size = 5
num_frames = 10
height, width = 224, 224
inputs = tf.random_uniform((batch_size, num_frames, height, width, 3))
mixed_5c, _ = i3d.i3d_base(inputs)
self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_5c.get_shape().as_list(),
[batch_size, 2, 7, 7, 1024])
def testEvaluation(self):
batch_size = 2
num_frames = 64
height, width = 224, 224
num_classes = 1000
eval_inputs = tf.random_uniform((batch_size, num_frames, height, width, 3))
logits, _ = i3d.i3d(eval_inputs, num_classes,
is_training=False)
predictions = tf.argmax(logits, 1)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (batch_size,))
if __name__ == '__main__':
tf.test.main()
|
TensorFlow/Detection/SSD/models/research/object_detection/models | models | faster_rcnn_inception_v2_feature_extractor_test | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for faster_rcnn_inception_v2_feature_extractor."""
import numpy as np
import tensorflow as tf
from object_detection.models import faster_rcnn_inception_v2_feature_extractor as faster_rcnn_inception_v2
class FasterRcnnInceptionV2FeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self, first_stage_features_stride):
return faster_rcnn_inception_v2.FasterRCNNInceptionV2FeatureExtractor(
is_training=False,
first_stage_features_stride=first_stage_features_stride,
batch_norm_trainable=False,
reuse_weights=None,
weight_decay=0.0)
def test_extract_proposal_features_returns_expected_size(self):
feature_extractor = self._build_feature_extractor(
first_stage_features_stride=16)
preprocessed_inputs = tf.random_uniform(
[4, 224, 224, 3], maxval=255, dtype=tf.float32)
rpn_feature_map, _ = feature_extractor.extract_proposal_features(
preprocessed_inputs, scope='TestScope')
features_shape = tf.shape(rpn_feature_map)
init_op = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
features_shape_out = sess.run(features_shape)
self.assertAllEqual(features_shape_out, [4, 14, 14, 576])
def test_extract_proposal_features_stride_eight(self):
feature_extractor = self._build_feature_extractor(
first_stage_features_stride=8)
preprocessed_inputs = tf.random_uniform(
[4, 224, 224, 3], maxval=255, dtype=tf.float32)
rpn_feature_map, _ = feature_extractor.extract_proposal_features(
preprocessed_inputs, scope='TestScope')
features_shape = tf.shape(rpn_feature_map)
init_op = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
features_shape_out = sess.run(features_shape)
self.assertAllEqual(features_shape_out, [4, 14, 14, 576])
def test_extract_proposal_features_half_size_input(self):
feature_extractor = self._build_feature_extractor(
first_stage_features_stride=16)
preprocessed_inputs = tf.random_uniform(
[1, 112, 112, 3], maxval=255, dtype=tf.float32)
rpn_feature_map, _ = feature_extractor.extract_proposal_features(
preprocessed_inputs, scope='TestScope')
features_shape = tf.shape(rpn_feature_map)
init_op = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
features_shape_out = sess.run(features_shape)
self.assertAllEqual(features_shape_out, [1, 7, 7, 576])
def test_extract_proposal_features_dies_on_invalid_stride(self):
with self.assertRaises(ValueError):
self._build_feature_extractor(first_stage_features_stride=99)
def test_extract_proposal_features_dies_on_very_small_images(self):
feature_extractor = self._build_feature_extractor(
first_stage_features_stride=16)
preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3))
rpn_feature_map, _ = feature_extractor.extract_proposal_features(
preprocessed_inputs, scope='TestScope')
features_shape = tf.shape(rpn_feature_map)
init_op = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
with self.assertRaises(tf.errors.InvalidArgumentError):
sess.run(
features_shape,
feed_dict={preprocessed_inputs: np.random.rand(4, 32, 32, 3)})
def test_extract_proposal_features_dies_with_incorrect_rank_inputs(self):
feature_extractor = self._build_feature_extractor(
first_stage_features_stride=16)
preprocessed_inputs = tf.random_uniform(
[224, 224, 3], maxval=255, dtype=tf.float32)
with self.assertRaises(ValueError):
feature_extractor.extract_proposal_features(
preprocessed_inputs, scope='TestScope')
def test_extract_box_classifier_features_returns_expected_size(self):
feature_extractor = self._build_feature_extractor(
first_stage_features_stride=16)
proposal_feature_maps = tf.random_uniform(
[3, 14, 14, 576], maxval=255, dtype=tf.float32)
proposal_classifier_features = (
feature_extractor.extract_box_classifier_features(
proposal_feature_maps, scope='TestScope'))
features_shape = tf.shape(proposal_classifier_features)
init_op = tf.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
features_shape_out = sess.run(features_shape)
self.assertAllEqual(features_shape_out, [3, 7, 7, 1024])
if __name__ == '__main__':
tf.test.main()
|
TensorFlow/Recommendation/VAE-CF/vae/load | load | preprocessing | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from collections import defaultdict
from glob import glob
import pandas as pd
from scipy import sparse
import scipy.sparse as sp
import numpy as np
from scipy.sparse import load_npz, csr_matrix
import logging
import json
LOG = logging.getLogger("VAE")
def save_as_npz(m_sp, path):
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
sp.save_npz(path, m_sp)
def get_count(tp, id):
playcount_groupbyid = tp[[id]].groupby(id, as_index=False)
count = playcount_groupbyid.size()
return count
def filter_triplets(tp, min_uc=5, min_sc=0):
# Only keep the triplets for items which were clicked on by at least min_sc users.
if min_sc > 0:
itemcount = get_count(tp, 'movieId')
tp = tp[tp['movieId'].isin(itemcount.index[itemcount >= min_sc])]
# Only keep the triplets for users who clicked on at least min_uc items
# After doing this, some of the items will have less than min_uc users, but should only be a small proportion
if min_uc > 0:
usercount = get_count(tp, 'userId')
tp = tp[tp['userId'].isin(usercount.index[usercount >= min_uc])]
# Update both usercount and itemcount after filtering
usercount, itemcount = get_count(tp, 'userId'), get_count(tp, 'movieId')
return tp, usercount, itemcount
def save_id_mappings(cache_dir, show2id, profile2id):
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
for d, filename in [(show2id, 'show2id.json'),
(profile2id, 'profile2id.json')]:
with open(os.path.join(cache_dir, filename), 'w') as f:
d = {str(k): v for k, v in d.items()}
json.dump(d, f, indent=4)
def load_and_parse_ML_20M(data_dir, threshold=4, parse=True):
"""
Original way of processing ml-20m dataset from VAE for CF paper
Copyright [2018] [Dawen Liang, Rahul G. Krishnan, Matthew D. Hoffman, and Tony Jebara]
SPDX-License-Identifier: Apache-2.0
Modifications copyright (C) 2019 Michał Filipiuk, Albert Cieślak, Frederic Grabowski, Radosław Rowicki
"""
cache_dir = os.path.join(data_dir, "ml-20m/preprocessed")
train_data_file = os.path.join(cache_dir, "train_data.npz")
vad_data_true_file = os.path.join(cache_dir, "vad_data_true.npz")
vad_data_test_file = os.path.join(cache_dir, "vad_data_test.npz")
test_data_true_file = os.path.join(cache_dir, "test_data_true.npz")
test_data_test_file = os.path.join(cache_dir, "test_data_test.npz")
if (os.path.isfile(train_data_file)
and os.path.isfile(vad_data_true_file)
and os.path.isfile(vad_data_test_file)
and os.path.isfile(test_data_true_file)
and os.path.isfile(test_data_test_file)):
LOG.info("Already processed, skipping.")
return load_npz(train_data_file), \
load_npz(vad_data_true_file), \
load_npz(vad_data_test_file), \
load_npz(test_data_true_file), \
load_npz(test_data_test_file),
if not parse:
raise ValueError('Dataset not preprocessed. Please run python3 prepare_dataset.py first.')
LOG.info("Parsing movielens.")
source_file = os.path.join(data_dir, "ml-20m/extracted/ml-20m", "ratings.csv")
if not glob(source_file):
raise ValueError('Dataset not downloaded. Please download the ML-20m dataset from https://grouplens.org/datasets/movielens/20m/, unzip it and put it in ', source_file)
raw_data = pd.read_csv(source_file)
raw_data.drop('timestamp', axis=1, inplace=True)
raw_data = raw_data[raw_data['rating'] >= threshold]
raw_data, user_activity, item_popularity = filter_triplets(raw_data)
unique_uid = user_activity.index
idx_perm = np.random.permutation(unique_uid.size)
unique_uid = unique_uid[idx_perm]
n_users = unique_uid.size
n_heldout_users = 10000
true_users = unique_uid[:(n_users - n_heldout_users * 2)]
vd_users = unique_uid[(n_users - n_heldout_users * 2): (n_users - n_heldout_users)]
test_users = unique_uid[(n_users - n_heldout_users):]
train_plays = raw_data.loc[raw_data['userId'].isin(true_users)]
unique_sid = pd.unique(train_plays['movieId'])
show2id = dict((sid, i) for (i, sid) in enumerate(unique_sid))
profile2id = dict((pid, i) for (i, pid) in enumerate(unique_uid))
save_id_mappings(cache_dir, show2id, profile2id)
def split_train_test_proportion(data, test_prop=0.2):
data_grouped_by_user = data.groupby('userId')
true_list, test_list = list(), list()
for i, (_, group) in enumerate(data_grouped_by_user):
n_items_u = len(group)
if n_items_u >= 5:
idx = np.zeros(n_items_u, dtype='bool')
idx[np.random.choice(n_items_u, size=int(test_prop * n_items_u), replace=False).astype('int64')] = True
true_list.append(group[np.logical_not(idx)])
test_list.append(group[idx])
else:
true_list.append(group)
data_true = pd.concat(true_list)
data_test = pd.concat(test_list)
return data_true, data_test
vad_plays = raw_data.loc[raw_data['userId'].isin(vd_users)]
vad_plays = vad_plays.loc[vad_plays['movieId'].isin(unique_sid)]
vad_plays_true, vad_plays_test = split_train_test_proportion(vad_plays)
test_plays = raw_data.loc[raw_data['userId'].isin(test_users)]
test_plays = test_plays.loc[test_plays['movieId'].isin(unique_sid)]
test_plays_true, test_plays_test = split_train_test_proportion(test_plays)
def numerize(tp):
uid = tp['userId'].map(lambda x: profile2id[x])
sid = tp['movieId'].map(lambda x: show2id[x])
return pd.DataFrame(data={'uid': uid, 'sid': sid}, columns=['uid', 'sid'])
train_data = numerize(train_plays)
vad_data_true = numerize(vad_plays_true)
vad_data_test = numerize(vad_plays_test)
test_data_true = numerize(test_plays_true)
test_data_test = numerize(test_plays_test)
n_items = len(unique_sid)
def load_train_data(tp):
n_users = tp['uid'].max() + 1
rows, cols = tp['uid'], tp['sid']
data = sparse.csr_matrix((np.ones_like(rows),
(rows, cols)), dtype='float64',
shape=(n_users, n_items))
return data
train_data = load_train_data(train_data)
def load_true_test_data(tp_true, tp_test):
start_idx = min(tp_true['uid'].min(), tp_test['uid'].min())
end_idx = max(tp_true['uid'].max(), tp_test['uid'].max())
rows_true, cols_true = tp_true['uid'] - start_idx, tp_true['sid']
rows_test, cols_test = tp_test['uid'] - start_idx, tp_test['sid']
data_true = sparse.csr_matrix((np.ones_like(rows_true),
(rows_true, cols_true)), dtype='float64', shape=(end_idx - start_idx + 1, n_items))
data_test = sparse.csr_matrix((np.ones_like(rows_test),
(rows_test, cols_test)), dtype='float64', shape=(end_idx - start_idx + 1, n_items))
return data_true, data_test
vad_data_true, vad_data_test = load_true_test_data(vad_data_true, vad_data_test)
test_data_true, test_data_test = load_true_test_data(test_data_true, test_data_test)
save_as_npz(train_data, train_data_file)
save_as_npz(vad_data_true, vad_data_true_file)
save_as_npz(vad_data_test, vad_data_test_file)
save_as_npz(test_data_true, test_data_true_file)
save_as_npz(test_data_test, test_data_test_file)
return train_data, vad_data_true, vad_data_test, test_data_true, test_data_test
def filter_data(data, min_users=1, min_items=5):
"""
:param data: input matrix
:param min_users: only keep items, that were clicked by at least min_users
:param min_items: only keep users, that clicked at least min_items
:return: filtered matrix
"""
col_count = defaultdict(lambda: 0)
for col in data.nonzero()[1]:
col_count[col] += 1
filtered_col = [k for k, v in col_count.items() if v >= min_users]
filtered_data_c = data[:, filtered_col]
del data
row_count = defaultdict(lambda: 0)
for row in filtered_data_c.nonzero()[0]:
row_count[row] += 1
filtered_row = [k for k, v in row_count.items() if v >= min_items]
filtered_data_r = filtered_data_c[filtered_row, :]
del filtered_data_c
return filtered_data_r
def split_into_train_val_test(data, val_ratio, test_ratio):
"""
:param data: input matrix
:param val_ratio: Ratio of validation users to all users
:param test_ratio: Ratio of test users to all users
:return: Tuple of 3 matrices : {train_matrix, val_matrix, test_matrix}
"""
assert val_ratio + test_ratio < 1
train_ratio = 1 - val_ratio - test_ratio
rows_count = data.shape[0]
idx = np.random.permutation(range(rows_count))
train_users_count = int(np.rint(rows_count * train_ratio))
val_users_count = int(np.rint(rows_count * val_ratio))
seperator = train_users_count + val_users_count
train_matrix = data[idx[:train_users_count]]
val_matrix = data[idx[train_users_count:seperator]]
test_matrix = data[idx[seperator:]]
return train_matrix, val_matrix, test_matrix
def split_movies_into_train_test(data, train_ratio):
"""
Splits data into 2 matrices. The users stay the same, but the items are being split by train_ratio
:param data: input matrix
:param train_ratio: Ratio of input items to all items
:return: tuple of 2 matrices: {train_matrix, test_matrix}
"""
rows_count, columns_count = data.shape
train_rows = list()
train_columns = list()
test_rows = list()
test_columns = list()
for i in range(rows_count):
user_movies = data.getrow(i).nonzero()[1]
np.random.shuffle(user_movies)
movies_count = len(user_movies)
train_count = int(np.floor(movies_count * train_ratio))
test_count = movies_count - train_count
train_movies = user_movies[:train_count]
test_movies = user_movies[train_count:]
train_rows += ([i] * train_count)
train_columns += list(train_movies)
test_rows += ([i] * test_count)
test_columns += list(test_movies)
train_matrix = csr_matrix(([1] * len(train_rows), (train_rows, train_columns)), shape=(rows_count, columns_count))
test_matrix = csr_matrix(([1] * len(test_rows), (test_rows, test_columns)), shape=(rows_count, columns_count))
return train_matrix, test_matrix
def remove_items_that_doesnt_occure_in_train(train_matrix, val_matrix, test_matrix):
"""
Remove items that don't occure in train matrix
:param train_matrix: training data
:param val_matrix: validation data
:param test_matrix: test data
:return: Input matrices without some items
"""
item_occure = defaultdict(lambda: False)
for col in train_matrix.nonzero()[1]:
item_occure[col] = True
non_empty_items = [k for k, v in item_occure.items() if v == True]
return train_matrix[:, non_empty_items], val_matrix[:, non_empty_items], test_matrix[:, non_empty_items]
|
PyTorch/SpeechSynthesis/Tacotron2/notebooks/conversationalai/client/speech_ai_demo/utils/tacotron2 | tacotron2 | numbers | """ from https://github.com/keithito/tacotron """
import inflect
import re
_inflect = inflect.engine()
_comma_number_re = re.compile(r'([0-9][0-9\,]+[0-9])')
_decimal_number_re = re.compile(r'([0-9]+\.[0-9]+)')
_pounds_re = re.compile(r'£([0-9\,]*[0-9]+)')
_dollars_re = re.compile(r'\$([0-9\.\,]*[0-9]+)')
_ordinal_re = re.compile(r'[0-9]+(st|nd|rd|th)')
_number_re = re.compile(r'[0-9]+')
def _remove_commas(m):
return m.group(1).replace(',', '')
def _expand_decimal_point(m):
return m.group(1).replace('.', ' point ')
def _expand_dollars(m):
match = m.group(1)
parts = match.split('.')
if len(parts) > 2:
return match + ' dollars' # Unexpected format
dollars = int(parts[0]) if parts[0] else 0
cents = int(parts[1]) if len(parts) > 1 and parts[1] else 0
if dollars and cents:
dollar_unit = 'dollar' if dollars == 1 else 'dollars'
cent_unit = 'cent' if cents == 1 else 'cents'
return '%s %s, %s %s' % (dollars, dollar_unit, cents, cent_unit)
elif dollars:
dollar_unit = 'dollar' if dollars == 1 else 'dollars'
return '%s %s' % (dollars, dollar_unit)
elif cents:
cent_unit = 'cent' if cents == 1 else 'cents'
return '%s %s' % (cents, cent_unit)
else:
return 'zero dollars'
def _expand_ordinal(m):
return _inflect.number_to_words(m.group(0))
def _expand_number(m):
num = int(m.group(0))
if num > 1000 and num < 3000:
if num == 2000:
return 'two thousand'
elif num > 2000 and num < 2010:
return 'two thousand ' + _inflect.number_to_words(num % 100)
elif num % 100 == 0:
return _inflect.number_to_words(num // 100) + ' hundred'
else:
return _inflect.number_to_words(num, andword='', zero='oh', group=2).replace(', ', ' ')
else:
return _inflect.number_to_words(num, andword='')
def normalize_numbers(text):
text = re.sub(_comma_number_re, _remove_commas, text)
text = re.sub(_pounds_re, r'\1 pounds', text)
text = re.sub(_dollars_re, _expand_dollars, text)
text = re.sub(_decimal_number_re, _expand_decimal_point, text)
text = re.sub(_ordinal_re, _expand_ordinal, text)
text = re.sub(_number_re, _expand_number, text)
return text
|
PyTorch/SpeechSynthesis/FastPitch/fastpitch | fastpitch | model_jit | # *****************************************************************************
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NVIDIA CORPORATION nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# *****************************************************************************
from typing import Optional
import torch
from torch import nn as nn
from common import filter_warnings
from fastpitch.model import TemporalPredictor
from fastpitch.transformer_jit import FFTransformer
def regulate_len(durations, enc_out, pace: float = 1.0,
mel_max_len: Optional[int] = None):
"""If target=None, then predicted durations are applied"""
reps = torch.round(durations.float() / pace).long()
dec_lens = reps.sum(dim=1)
max_len = dec_lens.max()
bsz, _, hid = enc_out.size()
reps_padded = torch.cat([reps, (max_len - dec_lens)[:, None]], dim=1)
pad_vec = torch.zeros(bsz, 1, hid, dtype=enc_out.dtype,
device=enc_out.device)
enc_rep = torch.cat([enc_out, pad_vec], dim=1)
enc_rep = torch.repeat_interleave(
enc_rep.view(-1, hid), reps_padded.view(-1), dim=0
).view(bsz, -1, hid)
if mel_max_len is not None:
enc_rep = enc_rep[:, :mel_max_len]
dec_lens = torch.clamp_max(dec_lens, mel_max_len)
return enc_rep, dec_lens
class FastPitchJIT(nn.Module):
__constants__ = ['energy_conditioning']
def __init__(self, n_mel_channels, n_symbols, padding_idx,
symbols_embedding_dim, in_fft_n_layers, in_fft_n_heads,
in_fft_d_head,
in_fft_conv1d_kernel_size, in_fft_conv1d_filter_size,
in_fft_output_size,
p_in_fft_dropout, p_in_fft_dropatt, p_in_fft_dropemb,
out_fft_n_layers, out_fft_n_heads, out_fft_d_head,
out_fft_conv1d_kernel_size, out_fft_conv1d_filter_size,
out_fft_output_size,
p_out_fft_dropout, p_out_fft_dropatt, p_out_fft_dropemb,
dur_predictor_kernel_size, dur_predictor_filter_size,
p_dur_predictor_dropout, dur_predictor_n_layers,
pitch_predictor_kernel_size, pitch_predictor_filter_size,
p_pitch_predictor_dropout, pitch_predictor_n_layers,
pitch_embedding_kernel_size,
energy_conditioning,
energy_predictor_kernel_size, energy_predictor_filter_size,
p_energy_predictor_dropout, energy_predictor_n_layers,
energy_embedding_kernel_size,
n_speakers, speaker_emb_weight, pitch_conditioning_formants=1):
super(FastPitchJIT, self).__init__()
self.encoder = FFTransformer(
n_layer=in_fft_n_layers, n_head=in_fft_n_heads,
d_model=symbols_embedding_dim,
d_head=in_fft_d_head,
d_inner=in_fft_conv1d_filter_size,
kernel_size=in_fft_conv1d_kernel_size,
dropout=p_in_fft_dropout,
dropatt=p_in_fft_dropatt,
dropemb=p_in_fft_dropemb,
embed_input=True,
d_embed=symbols_embedding_dim,
n_embed=n_symbols,
padding_idx=padding_idx)
if n_speakers > 1:
self.speaker_emb = nn.Embedding(n_speakers, symbols_embedding_dim)
else:
self.speaker_emb = None
self.speaker_emb_weight = speaker_emb_weight
self.duration_predictor = TemporalPredictor(
in_fft_output_size,
filter_size=dur_predictor_filter_size,
kernel_size=dur_predictor_kernel_size,
dropout=p_dur_predictor_dropout, n_layers=dur_predictor_n_layers
)
self.decoder = FFTransformer(
n_layer=out_fft_n_layers, n_head=out_fft_n_heads,
d_model=symbols_embedding_dim,
d_head=out_fft_d_head,
d_inner=out_fft_conv1d_filter_size,
kernel_size=out_fft_conv1d_kernel_size,
dropout=p_out_fft_dropout,
dropatt=p_out_fft_dropatt,
dropemb=p_out_fft_dropemb,
embed_input=False,
d_embed=symbols_embedding_dim
)
self.pitch_predictor = TemporalPredictor(
in_fft_output_size,
filter_size=pitch_predictor_filter_size,
kernel_size=pitch_predictor_kernel_size,
dropout=p_pitch_predictor_dropout, n_layers=pitch_predictor_n_layers,
n_predictions=pitch_conditioning_formants
)
self.pitch_emb = nn.Conv1d(
pitch_conditioning_formants, symbols_embedding_dim,
kernel_size=pitch_embedding_kernel_size,
padding=int((pitch_embedding_kernel_size - 1) / 2))
# Store values precomputed for training data within the model
self.register_buffer('pitch_mean', torch.zeros(1))
self.register_buffer('pitch_std', torch.zeros(1))
self.energy_conditioning = energy_conditioning
if energy_conditioning:
self.energy_predictor = TemporalPredictor(
in_fft_output_size,
filter_size=energy_predictor_filter_size,
kernel_size=energy_predictor_kernel_size,
dropout=p_energy_predictor_dropout,
n_layers=energy_predictor_n_layers,
n_predictions=1
)
self.energy_emb = nn.Conv1d(
1, symbols_embedding_dim,
kernel_size=energy_embedding_kernel_size,
padding=int((energy_embedding_kernel_size - 1) / 2))
self.proj = nn.Linear(out_fft_output_size, n_mel_channels, bias=True)
# skip self.attention (used only in training)
def infer(self, inputs, pace: float = 1.0,
dur_tgt: Optional[torch.Tensor] = None,
pitch_tgt: Optional[torch.Tensor] = None,
energy_tgt: Optional[torch.Tensor] = None,
speaker: int = 0):
if self.speaker_emb is None:
spk_emb = None
else:
speaker = (torch.ones(inputs.size(0)).long().to(inputs.device)
* speaker)
spk_emb = self.speaker_emb(speaker).unsqueeze(1)
spk_emb.mul_(self.speaker_emb_weight)
# Input FFT
enc_out, enc_mask = self.encoder(inputs, conditioning=spk_emb)
# Predict durations
log_dur_pred = self.duration_predictor(enc_out, enc_mask).squeeze(-1)
dur_pred = torch.clamp(torch.exp(log_dur_pred) - 1, 0, 100.0)
# Pitch over chars
pitch_pred = self.pitch_predictor(enc_out, enc_mask).permute(0, 2, 1)
if pitch_tgt is None:
pitch_emb = self.pitch_emb(pitch_pred).transpose(1, 2)
else:
pitch_emb = self.pitch_emb(pitch_tgt).transpose(1, 2)
enc_out = enc_out + pitch_emb
# Predict energy
if self.energy_conditioning:
if energy_tgt is None:
energy_pred = self.energy_predictor(enc_out, enc_mask).squeeze(-1)
energy_emb = self.energy_emb(energy_pred.unsqueeze(1)).transpose(1, 2)
else:
energy_pred = None
energy_emb = self.energy_emb(energy_tgt).transpose(1, 2)
enc_out = enc_out + energy_emb
else:
energy_pred = None
len_regulated, dec_lens = regulate_len(
dur_pred if dur_tgt is None else dur_tgt,
enc_out, pace, mel_max_len=None)
dec_out, dec_mask = self.decoder(len_regulated, dec_lens)
mel_out = self.proj(dec_out)
# mel_lens = dec_mask.squeeze(2).sum(axis=1).long()
mel_out = mel_out.permute(0, 2, 1) # For inference.py
return mel_out, dec_lens, dur_pred, pitch_pred, energy_pred
|
PyTorch/LanguageModeling/BERT/scripts/configs | configs | squad_config | #!/usr/bin/env bash
# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
dgxa100-80g_8gpu_fp16 ()
{
init_checkpoint="/workspace/bert/checkpoints/bert_uncased.pt"
epochs="2.0"
batch_size="32"
learning_rate="4.6e-5"
warmup_proportion="0.2"
precision="fp16"
num_gpu="8"
seed="1"
squad_dir="$BERT_PREP_WORKING_DIR/download/squad/v1.1"
vocab_file="$BERT_PREP_WORKING_DIR/download/google_pretrained_weights/uncased_L-24_H-1024_A-16/vocab.txt"
OUT_DIR="/workspace/bert/results/SQuAD"
echo $init_checkpoint $epochs $batch_size $learning_rate $warmup_proportion \
$precision $num_gpu $seed $squad_dir $vocab_file \
$OUT_DIR
}
dgxa100-80g_8gpu_tf32 ()
{
init_checkpoint="/workspace/bert/checkpoints/bert_uncased.pt"
epochs="2.0"
batch_size="32"
learning_rate="4.6e-5"
warmup_proportion="0.2"
precision="tf32"
num_gpu="8"
seed="1"
squad_dir="$BERT_PREP_WORKING_DIR/download/squad/v1.1"
vocab_file="$BERT_PREP_WORKING_DIR/download/google_pretrained_weights/uncased_L-24_H-1024_A-16/vocab.txt"
OUT_DIR="/workspace/bert/results/SQuAD"
echo $init_checkpoint $epochs $batch_size $learning_rate $warmup_proportion \
$precision $num_gpu $seed $squad_dir $vocab_file \
$OUT_DIR
}
dgx1-32g_8gpu_fp16 ()
{
init_checkpoint="/workspace/bert/checkpoints/bert_uncased.pt"
epochs="2.0"
batch_size="32"
learning_rate="4.6e-5"
warmup_proportion="0.2"
precision="fp16"
num_gpu="8"
seed="1"
squad_dir="$BERT_PREP_WORKING_DIR/download/squad/v1.1"
vocab_file="$BERT_PREP_WORKING_DIR/download/google_pretrained_weights/uncased_L-24_H-1024_A-16/vocab.txt"
OUT_DIR="/workspace/bert/results/SQuAD"
echo $init_checkpoint $epochs $batch_size $learning_rate $warmup_proportion \
$precision $num_gpu $seed $squad_dir $vocab_file \
$OUT_DIR
}
dgx1-32g_8gpu_fp32 ()
{
init_checkpoint="/workspace/bert/checkpoints/bert_uncased.pt"
epochs="2.0"
batch_size="16"
learning_rate="4.6e-5"
warmup_proportion="0.2"
precision="fp32"
num_gpu="8"
seed="1"
squad_dir="$BERT_PREP_WORKING_DIR/download/squad/v1.1"
vocab_file="$BERT_PREP_WORKING_DIR/download/google_pretrained_weights/uncased_L-24_H-1024_A-16/vocab.txt"
OUT_DIR="/workspace/bert/results/SQuAD"
echo $init_checkpoint $epochs $batch_size $learning_rate $warmup_proportion \
$precision $num_gpu $seed $squad_dir $vocab_file \
$OUT_DIR
}
|
TensorFlow/Classification/ConvNets/resnext101-32x4d/training | training | DGX1_RNxt101-32x4d_AMP_250E | #!/bin/bash
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
WORKSPACE=${1:-"/workspace/rn50v15_tf"}
DATA_DIR=${2:-"/data"}
OTHER=${@:3}
if [[ ! -z "${BIND_TO_SOCKET}" ]]; then
BIND_TO_SOCKET="--bind-to socket"
fi
mpiexec --allow-run-as-root ${BIND_TO_SOCKET} -np 8 python3 main.py --arch=resnext101-32x4d \
--mode=train_and_evaluate --iter_unit=epoch --num_iter=250 --mixup=0.2 \
--batch_size=128 --warmup_steps=100 --cosine_lr --label_smoothing 0.1 \
--lr_init=0.256 --lr_warmup_epochs=8 --momentum=0.875 --weight_decay=6.103515625e-05 \
--amp --static_loss_scale 128 \
--data_dir=${DATA_DIR}/tfrecords --data_idx_dir=${DATA_DIR}/dali_idx \
--results_dir=${WORKSPACE}/results --weight_init=fan_in ${OTHER}
|
Tools/PyTorch/TimeSeriesPredictionPlatform/models/tft_pyt/scripts/autobench | autobench | ngc_traffic_HP_search | NGC: &NGC
hostname: ngc
instance: dgx1v.32g.8.norm
job_name: "ml-model.tft traffic HP search"
docker_image: nvcr.io/nvidian/swdl/jbaczek:tft_pyt
datasets:
/data: 78291
workspaces:
/ws: VUMFFB3uSv25FDlkXg80Vw
download_dir: /home/jbaczek/Downloads
jobs:
- steps:
- DATASET=traffic NGPU=8 DROPOUT=0.3 LR=5e-4 H_SIZE=128 N_HEADS=4 bash scripts/run_hp_search.sh
backend: *NGC
- steps:
- DATASET=traffic NGPU=8 DROPOUT=0.3 LR=5e-3 H_SIZE=128 N_HEADS=4 bash scripts/run_hp_search.sh
backend: *NGC
- steps:
- DATASET=traffic NGPU=8 DROPOUT=0.3 LR=1e-3 H_SIZE=128 N_HEADS=4 bash scripts/run_hp_search.sh
backend: *NGC
reports:
filename: traffoc
types:
- xls
|
PyTorch/SpeechRecognition/QuartzNet | QuartzNet | train | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import copy
import os
import random
import time
import torch
import amp_C
import numpy as np
import torch.distributed as dist
from apex.optimizers import FusedLAMB, FusedNovoGrad
from contextlib import suppress as empty_context
from common import helpers
from common.dali.data_loader import DaliDataLoader
from common.dataset import AudioDataset, get_data_loader
from common.features import BaseFeatures, FilterbankFeatures
from common.helpers import (Checkpointer, greedy_wer, num_weights, print_once,
process_evaluation_epoch)
from common.optimizers import AdamW, lr_policy, Novograd
from common.tb_dllogger import flush_log, init_log, log
from common.utils import BenchmarkStats
from quartznet import config
from quartznet.model import CTCLossNM, GreedyCTCDecoder, QuartzNet
def parse_args():
parser = argparse.ArgumentParser(description='QuartzNet')
training = parser.add_argument_group('training setup')
training.add_argument('--epochs', default=400, type=int,
help='Number of epochs for the entire training; influences the lr schedule')
training.add_argument("--warmup_epochs", default=0, type=int,
help='Initial epochs of increasing learning rate')
training.add_argument("--hold_epochs", default=0, type=int,
help='Constant max learning rate epochs after warmup')
training.add_argument('--epochs_this_job', default=0, type=int,
help=('Run for a number of epochs with no effect on the lr schedule.'
'Useful for re-starting the training.'))
training.add_argument('--cudnn_benchmark', action='store_true', default=True,
help='Enable cudnn benchmark')
training.add_argument('--amp', '--fp16', action='store_true', default=False,
help='Use pytorch native mixed precision training')
training.add_argument('--seed', default=None, type=int, help='Random seed')
training.add_argument('--local_rank', '--local-rank', default=os.getenv('LOCAL_RANK', 0), type=int,
help='GPU id used for distributed training')
training.add_argument('--pre_allocate_range', default=None, type=int, nargs=2,
help='Warmup with batches of length [min, max] before training')
optim = parser.add_argument_group('optimization setup')
optim.add_argument('--gpu_batch_size', default=32, type=int,
help='Batch size for a single forward/backward pass. '
'The Effective batch size is gpu_batch_size * grad_accumulation.')
optim.add_argument('--lr', default=1e-3, type=float,
help='Peak learning rate')
optim.add_argument("--min_lr", default=1e-5, type=float,
help='minimum learning rate')
optim.add_argument("--lr_policy", default='exponential', type=str,
choices=['exponential', 'legacy'], help='lr scheduler')
optim.add_argument("--lr_exp_gamma", default=0.99, type=float,
help='gamma factor for exponential lr scheduler')
optim.add_argument('--weight_decay', default=1e-3, type=float,
help='Weight decay for the optimizer')
optim.add_argument('--grad_accumulation', '--update-freq', default=1, type=int,
help='Number of accumulation steps')
optim.add_argument('--optimizer', default='novograd', type=str,
choices=['novograd', 'adamw', 'lamb98', 'fused_novograd'],
help='Optimization algorithm')
optim.add_argument('--ema', type=float, default=0.0,
help='Discount factor for exp averaging of model weights')
optim.add_argument('--multi_tensor_ema', action='store_true',
help='Use multi_tensor_apply for EMA')
io = parser.add_argument_group('feature and checkpointing setup')
io.add_argument('--dali_device', type=str, choices=['none', 'cpu', 'gpu'],
default='gpu', help='Use DALI pipeline for fast data processing')
io.add_argument('--resume', action='store_true',
help='Try to resume from last saved checkpoint.')
io.add_argument('--ckpt', default=None, type=str,
help='Path to a checkpoint for resuming training')
io.add_argument('--save_frequency', default=10, type=int,
help='Checkpoint saving frequency in epochs')
io.add_argument('--keep_milestones', default=[100, 200, 300], type=int, nargs='+',
help='Milestone checkpoints to keep from removing')
io.add_argument('--save_best_from', default=380, type=int,
help='Epoch on which to begin tracking best checkpoint (dev WER)')
io.add_argument('--eval_frequency', default=200, type=int,
help='Number of steps between evaluations on dev set')
io.add_argument('--log_frequency', default=25, type=int,
help='Number of steps between printing training stats')
io.add_argument('--prediction_frequency', default=100, type=int,
help='Number of steps between printing sample decodings')
io.add_argument('--model_config', type=str, required=True,
help='Path of the model configuration file')
io.add_argument('--train_manifests', type=str, required=True, nargs='+',
help='Paths of the training dataset manifest file')
io.add_argument('--val_manifests', type=str, required=True, nargs='+',
help='Paths of the evaluation datasets manifest files')
io.add_argument('--dataset_dir', required=True, type=str,
help='Root dir of dataset')
io.add_argument('--output_dir', type=str, required=True,
help='Directory for logs and checkpoints')
io.add_argument('--log_file', type=str, default=None,
help='Path to save the training logfile.')
io.add_argument('--benchmark_epochs_num', type=int, default=1,
help='Number of epochs accounted in final average throughput.')
io.add_argument('--override_config', type=str, action='append',
help='Overrides arbitrary config value.'
' Syntax: `--override_config nested.config.key=val`.')
return parser.parse_args()
def reduce_tensor(tensor, num_gpus):
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
return rt.true_divide(num_gpus)
def init_multi_tensor_ema(model, ema_model):
model_weights = list(model.state_dict().values())
ema_model_weights = list(ema_model.state_dict().values())
ema_overflow_buf = torch.cuda.IntTensor([0])
return model_weights, ema_model_weights, ema_overflow_buf
def apply_multi_tensor_ema(decay, model_weights, ema_model_weights, overflow_buf):
amp_C.multi_tensor_axpby(
65536, overflow_buf,
[ema_model_weights, model_weights, ema_model_weights],
decay, 1-decay, -1)
def apply_ema(model, ema_model, decay):
if not decay:
return
sd = getattr(model, 'module', model).state_dict()
for k, v in ema_model.state_dict().items():
v.copy_(decay * v + (1 - decay) * sd[k])
@torch.no_grad()
def evaluate(epoch, step, val_loader, val_feat_proc, labels, model,
ema_model, ctc_loss, greedy_decoder, use_amp, use_dali=False):
for model, subset in [(model, 'dev'), (ema_model, 'dev_ema')]:
if model is None:
continue
model.eval()
torch.cuda.synchronize()
start_time = time.time()
agg = {'losses': [], 'preds': [], 'txts': []}
for batch in val_loader:
if use_dali:
# with DALI, the data is already on GPU
feat, feat_lens, txt, txt_lens = batch
if val_feat_proc is not None:
feat, feat_lens = val_feat_proc(feat, feat_lens)
else:
batch = [t.cuda(non_blocking=True) for t in batch]
audio, audio_lens, txt, txt_lens = batch
feat, feat_lens = val_feat_proc(audio, audio_lens)
with torch.cuda.amp.autocast(enabled=use_amp):
log_probs, enc_lens = model(feat, feat_lens)
loss = ctc_loss(log_probs, txt, enc_lens, txt_lens)
pred = greedy_decoder(log_probs)
agg['losses'] += helpers.gather_losses([loss])
agg['preds'] += helpers.gather_predictions([pred], labels)
agg['txts'] += helpers.gather_transcripts([txt], [txt_lens], labels)
wer, loss = process_evaluation_epoch(agg)
torch.cuda.synchronize()
log(() if epoch is None else (epoch,),
step, subset, {'loss': loss, 'wer': 100.0 * wer,
'took': time.time() - start_time})
model.train()
return wer
def main():
args = parse_args()
assert(torch.cuda.is_available())
assert args.prediction_frequency % args.log_frequency == 0
torch.backends.cudnn.benchmark = args.cudnn_benchmark
# set up distributed training
multi_gpu = int(os.environ.get('WORLD_SIZE', 1)) > 1
if multi_gpu:
torch.cuda.set_device(args.local_rank)
dist.init_process_group(backend='nccl', init_method='env://')
world_size = dist.get_world_size()
print_once(f'Distributed training with {world_size} GPUs\n')
else:
world_size = 1
if args.seed is not None:
torch.manual_seed(args.seed + args.local_rank)
np.random.seed(args.seed + args.local_rank)
random.seed(args.seed + args.local_rank)
init_log(args)
cfg = config.load(args.model_config)
config.apply_config_overrides(cfg, args)
symbols = helpers.add_ctc_blank(cfg['labels'])
assert args.grad_accumulation >= 1
batch_size = args.gpu_batch_size
print_once('Setting up datasets...')
train_dataset_kw, train_features_kw = config.input(cfg, 'train')
val_dataset_kw, val_features_kw = config.input(cfg, 'val')
use_dali = args.dali_device in ('cpu', 'gpu')
if use_dali:
assert train_dataset_kw['ignore_offline_speed_perturbation'], \
"DALI doesn't support offline speed perturbation"
# pad_to_max_duration is not supported by DALI - have simple padders
if train_features_kw['pad_to_max_duration']:
train_feat_proc = BaseFeatures(
pad_align=train_features_kw['pad_align'],
pad_to_max_duration=True,
max_duration=train_features_kw['max_duration'],
sample_rate=train_features_kw['sample_rate'],
window_size=train_features_kw['window_size'],
window_stride=train_features_kw['window_stride'])
train_features_kw['pad_to_max_duration'] = False
else:
train_feat_proc = None
if val_features_kw['pad_to_max_duration']:
val_feat_proc = BaseFeatures(
pad_align=val_features_kw['pad_align'],
pad_to_max_duration=True,
max_duration=val_features_kw['max_duration'],
sample_rate=val_features_kw['sample_rate'],
window_size=val_features_kw['window_size'],
window_stride=val_features_kw['window_stride'])
val_features_kw['pad_to_max_duration'] = False
else:
val_feat_proc = None
train_loader = DaliDataLoader(gpu_id=args.local_rank,
dataset_path=args.dataset_dir,
config_data=train_dataset_kw,
config_features=train_features_kw,
json_names=args.train_manifests,
batch_size=batch_size,
grad_accumulation_steps=args.grad_accumulation,
pipeline_type="train",
device_type=args.dali_device,
symbols=symbols)
val_loader = DaliDataLoader(gpu_id=args.local_rank,
dataset_path=args.dataset_dir,
config_data=val_dataset_kw,
config_features=val_features_kw,
json_names=args.val_manifests,
batch_size=batch_size,
pipeline_type="val",
device_type=args.dali_device,
symbols=symbols)
else:
train_dataset_kw, train_features_kw = config.input(cfg, 'train')
train_dataset = AudioDataset(args.dataset_dir,
args.train_manifests,
symbols,
**train_dataset_kw)
train_loader = get_data_loader(train_dataset,
batch_size,
multi_gpu=multi_gpu,
shuffle=True,
num_workers=4)
train_feat_proc = FilterbankFeatures(**train_features_kw)
val_dataset_kw, val_features_kw = config.input(cfg, 'val')
val_dataset = AudioDataset(args.dataset_dir,
args.val_manifests,
symbols,
**val_dataset_kw)
val_loader = get_data_loader(val_dataset,
batch_size,
multi_gpu=multi_gpu,
shuffle=False,
num_workers=4,
drop_last=False)
val_feat_proc = FilterbankFeatures(**val_features_kw)
dur = train_dataset.duration / 3600
dur_f = train_dataset.duration_filtered / 3600
nsampl = len(train_dataset)
print_once(f'Training samples: {nsampl} ({dur:.1f}h, '
f'filtered {dur_f:.1f}h)')
if train_feat_proc is not None:
train_feat_proc.cuda()
if val_feat_proc is not None:
val_feat_proc.cuda()
steps_per_epoch = len(train_loader) // args.grad_accumulation
# set up the model
model = QuartzNet(encoder_kw=config.encoder(cfg),
decoder_kw=config.decoder(cfg, n_classes=len(symbols)))
model.cuda()
ctc_loss = CTCLossNM(n_classes=len(symbols))
greedy_decoder = GreedyCTCDecoder()
print_once(f'Model size: {num_weights(model) / 10**6:.1f}M params\n')
# optimization
kw = {'lr': args.lr, 'weight_decay': args.weight_decay}
if args.optimizer == "novograd":
optimizer = Novograd(model.parameters(), **kw)
elif args.optimizer == "adamw":
optimizer = AdamW(model.parameters(), **kw)
elif args.optimizer == 'lamb98':
optimizer = FusedLAMB(model.parameters(), betas=(0.9, 0.98), eps=1e-9,
**kw)
elif args.optimizer == 'fused_novograd':
optimizer = FusedNovoGrad(model.parameters(), betas=(0.95, 0),
bias_correction=False, reg_inside_moment=True,
grad_averaging=False, **kw)
else:
raise ValueError(f'Invalid optimizer "{args.optimizer}"')
scaler = torch.cuda.amp.GradScaler(enabled=args.amp)
adjust_lr = lambda step, epoch, optimizer: lr_policy(
step, epoch, args.lr, optimizer, steps_per_epoch=steps_per_epoch,
warmup_epochs=args.warmup_epochs, hold_epochs=args.hold_epochs,
num_epochs=args.epochs, policy=args.lr_policy, min_lr=args.min_lr,
exp_gamma=args.lr_exp_gamma)
if args.ema > 0:
ema_model = copy.deepcopy(model)
else:
ema_model = None
if multi_gpu:
model = torch.nn.parallel.DistributedDataParallel(
model, device_ids=[args.local_rank], output_device=args.local_rank)
# load checkpoint
meta = {'best_wer': 10**6, 'start_epoch': 0}
checkpointer = Checkpointer(args.output_dir, 'QuartzNet',
args.keep_milestones)
if args.resume:
args.ckpt = checkpointer.last_checkpoint() or args.ckpt
if args.ckpt is not None:
checkpointer.load(args.ckpt, model, ema_model, optimizer, scaler, meta)
start_epoch = meta['start_epoch']
best_wer = meta['best_wer']
epoch = 1
step = start_epoch * steps_per_epoch + 1
# training loop
model.train()
if args.ema > 0.0:
mt_ema_params = init_multi_tensor_ema(model, ema_model)
# ema_model_weight_list, model_weight_list, overflow_buf_for_ema = ema_
# pre-allocate
if args.pre_allocate_range is not None:
n_feats = train_features_kw['n_filt']
pad_align = train_features_kw['pad_align']
a, b = args.pre_allocate_range
for n_frames in range(a, b + pad_align, pad_align):
print_once(f'Pre-allocation ({batch_size}x{n_feats}x{n_frames})...')
feat = torch.randn(batch_size, n_feats, n_frames, device='cuda')
feat_lens = torch.ones(batch_size, device='cuda').fill_(n_frames)
txt = torch.randint(high=len(symbols)-1, size=(batch_size, 100),
device='cuda')
txt_lens = torch.ones(batch_size, device='cuda').fill_(100)
with torch.cuda.amp.autocast(enabled=args.amp):
log_probs, enc_lens = model(feat, feat_lens)
del feat
loss = ctc_loss(log_probs, txt, enc_lens, txt_lens)
loss.backward()
model.zero_grad()
torch.cuda.empty_cache()
bmark_stats = BenchmarkStats()
for epoch in range(start_epoch + 1, args.epochs + 1):
if multi_gpu and not use_dali:
train_loader.sampler.set_epoch(epoch)
torch.cuda.synchronize()
epoch_start_time = time.time()
epoch_utts = 0
epoch_loss = 0
accumulated_batches = 0
for batch in train_loader:
if accumulated_batches == 0:
step_loss = 0
step_utts = 0
step_start_time = time.time()
if use_dali:
# with DALI, the data is already on GPU
feat, feat_lens, txt, txt_lens = batch
if train_feat_proc is not None:
feat, feat_lens = train_feat_proc(feat, feat_lens)
else:
batch = [t.cuda(non_blocking=True) for t in batch]
audio, audio_lens, txt, txt_lens = batch
feat, feat_lens = train_feat_proc(audio, audio_lens)
# Use context manager to prevent redundant accumulation of gradients
if (multi_gpu and accumulated_batches + 1 < args.grad_accumulation):
ctx = model.no_sync()
else:
ctx = empty_context()
with ctx:
with torch.cuda.amp.autocast(enabled=args.amp):
log_probs, enc_lens = model(feat, feat_lens)
loss = ctc_loss(log_probs, txt, enc_lens, txt_lens)
loss /= args.grad_accumulation
if multi_gpu:
reduced_loss = reduce_tensor(loss.data, world_size)
else:
reduced_loss = loss
if torch.isnan(reduced_loss).any():
print_once(f'WARNING: loss is NaN; skipping update')
continue
else:
step_loss += reduced_loss.item()
step_utts += batch[0].size(0) * world_size
epoch_utts += batch[0].size(0) * world_size
accumulated_batches += 1
scaler.scale(loss).backward()
if accumulated_batches % args.grad_accumulation == 0:
epoch_loss += step_loss
scaler.step(optimizer)
scaler.update()
adjust_lr(step, epoch, optimizer)
optimizer.zero_grad()
if args.ema > 0.0:
apply_multi_tensor_ema(args.ema, *mt_ema_params)
if step % args.log_frequency == 0:
preds = greedy_decoder(log_probs)
wer, pred_utt, ref = greedy_wer(preds, txt, txt_lens, symbols)
if step % args.prediction_frequency == 0:
print_once(f' Decoded: {pred_utt[:90]}')
print_once(f' Reference: {ref[:90]}')
step_time = time.time() - step_start_time
log((epoch, step % steps_per_epoch or steps_per_epoch, steps_per_epoch),
step, 'train',
{'loss': step_loss,
'wer': 100.0 * wer,
'throughput': step_utts / step_time,
'took': step_time,
'lrate': optimizer.param_groups[0]['lr']})
step_start_time = time.time()
if step % args.eval_frequency == 0:
wer = evaluate(epoch, step, val_loader, val_feat_proc,
symbols, model, ema_model, ctc_loss,
greedy_decoder, args.amp, use_dali)
if wer < best_wer and epoch >= args.save_best_from:
checkpointer.save(model, ema_model, optimizer, scaler,
epoch, step, best_wer, is_best=True)
best_wer = wer
step += 1
accumulated_batches = 0
# end of step
# DALI iterator need to be exhausted;
# if not using DALI, simulate drop_last=True with grad accumulation
if not use_dali and step > steps_per_epoch * epoch:
break
torch.cuda.synchronize()
epoch_time = time.time() - epoch_start_time
epoch_loss /= steps_per_epoch
log((epoch,), None, 'train_avg', {'throughput': epoch_utts / epoch_time,
'took': epoch_time,
'loss': epoch_loss})
bmark_stats.update(epoch_utts, epoch_time, epoch_loss)
if epoch % args.save_frequency == 0 or epoch in args.keep_milestones:
checkpointer.save(model, ema_model, optimizer, scaler, epoch, step,
best_wer)
if 0 < args.epochs_this_job <= epoch - start_epoch:
print_once(f'Finished after {args.epochs_this_job} epochs.')
break
# end of epoch
log((), None, 'train_avg', bmark_stats.get(args.benchmark_epochs_num))
evaluate(None, step, val_loader, val_feat_proc, symbols, model,
ema_model, ctc_loss, greedy_decoder, args.amp, use_dali)
if epoch == args.epochs:
checkpointer.save(model, ema_model, optimizer, scaler, epoch, step,
best_wer)
flush_log()
if __name__ == "__main__":
main()
|
TensorFlow/Recommendation/NCF | NCF | .gitignore | data/
|
TensorFlow/Segmentation/UNet_Medical/examples | examples | unet_TRAIN_TF-AMP_1GPU | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script launches U-Net run in TF-AMP on 1 GPU and runs 5-fold cross-validation training for 6400 iterations.
# Usage:
# bash unet_TRAIN_TF-AMP_1GPU.sh <path to dataset> <path to results directory> <batch size>
horovodrun -np 1 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 0 --augment --xla --amp > $2/log_TF-AMP_1GPU_fold0.txt
horovodrun -np 1 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 1 --augment --xla --amp > $2/log_TF-AMP_1GPU_fold1.txt
horovodrun -np 1 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 2 --augment --xla --amp > $2/log_TF-AMP_1GPU_fold2.txt
horovodrun -np 1 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 3 --augment --xla --amp > $2/log_TF-AMP_1GPU_fold3.txt
horovodrun -np 1 python main.py --data_dir $1 --model_dir $2 --log_every 100 --max_steps 6400 --batch_size $3 --exec_mode train_and_evaluate --crossvalidation_idx 4 --augment --xla --amp > $2/log_TF-AMP_1GPU_fold4.txt
python utils/parse_results.py --model_dir $2 --exec_mode convergence --env TF-AMP_1GPU |
TensorFlow2/LanguageModeling/BERT/official/utils/flags | flags | _distribution | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Flags related to distributed execution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import tensorflow as tf
from official.utils.flags._conventions import help_wrap
def define_distribution(worker_hosts=True, task_index=True):
"""Register distributed execution flags.
Args:
worker_hosts: Create a flag for specifying comma-separated list of workers.
task_index: Create a flag for specifying index of task.
Returns:
A list of flags for core.py to marks as key flags.
"""
key_flags = []
if worker_hosts:
flags.DEFINE_string(
name='worker_hosts', default=None,
help=help_wrap(
'Comma-separated list of worker ip:port pairs for running '
'multi-worker models with DistributionStrategy. The user would '
'start the program on each host with identical value for this '
'flag.'))
if task_index:
flags.DEFINE_integer(
name='task_index', default=-1,
help=help_wrap('If multi-worker training, the task_index of this '
'worker.'))
return key_flags
|
TensorFlow/Detection/SSD/models/research/object_detection/utils | utils | np_box_list_ops | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Bounding Box List operations for Numpy BoxLists.
Example box operations that are supported:
* Areas: compute bounding box areas
* IOU: pairwise intersection-over-union scores
"""
import numpy as np
from object_detection.utils import np_box_list
from object_detection.utils import np_box_ops
class SortOrder(object):
"""Enum class for sort order.
Attributes:
ascend: ascend order.
descend: descend order.
"""
ASCEND = 1
DESCEND = 2
def area(boxlist):
"""Computes area of boxes.
Args:
boxlist: BoxList holding N boxes
Returns:
a numpy array with shape [N*1] representing box areas
"""
y_min, x_min, y_max, x_max = boxlist.get_coordinates()
return (y_max - y_min) * (x_max - x_min)
def intersection(boxlist1, boxlist2):
"""Compute pairwise intersection areas between boxes.
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding M boxes
Returns:
a numpy array with shape [N*M] representing pairwise intersection area
"""
return np_box_ops.intersection(boxlist1.get(), boxlist2.get())
def iou(boxlist1, boxlist2):
"""Computes pairwise intersection-over-union between box collections.
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding M boxes
Returns:
a numpy array with shape [N, M] representing pairwise iou scores.
"""
return np_box_ops.iou(boxlist1.get(), boxlist2.get())
def ioa(boxlist1, boxlist2):
"""Computes pairwise intersection-over-area between box collections.
Intersection-over-area (ioa) between two boxes box1 and box2 is defined as
their intersection area over box2's area. Note that ioa is not symmetric,
that is, IOA(box1, box2) != IOA(box2, box1).
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding M boxes
Returns:
a numpy array with shape [N, M] representing pairwise ioa scores.
"""
return np_box_ops.ioa(boxlist1.get(), boxlist2.get())
def gather(boxlist, indices, fields=None):
"""Gather boxes from BoxList according to indices and return new BoxList.
By default, gather returns boxes corresponding to the input index list, as
well as all additional fields stored in the boxlist (indexing into the
first dimension). However one can optionally only gather from a
subset of fields.
Args:
boxlist: BoxList holding N boxes
indices: a 1-d numpy array of type int_
fields: (optional) list of fields to also gather from. If None (default),
all fields are gathered from. Pass an empty fields list to only gather
the box coordinates.
Returns:
subboxlist: a BoxList corresponding to the subset of the input BoxList
specified by indices
Raises:
ValueError: if specified field is not contained in boxlist or if the
indices are not of type int_
"""
if indices.size:
if np.amax(indices) >= boxlist.num_boxes() or np.amin(indices) < 0:
raise ValueError('indices are out of valid range.')
subboxlist = np_box_list.BoxList(boxlist.get()[indices, :])
if fields is None:
fields = boxlist.get_extra_fields()
for field in fields:
extra_field_data = boxlist.get_field(field)
subboxlist.add_field(field, extra_field_data[indices, ...])
return subboxlist
def sort_by_field(boxlist, field, order=SortOrder.DESCEND):
"""Sort boxes and associated fields according to a scalar field.
A common use case is reordering the boxes according to descending scores.
Args:
boxlist: BoxList holding N boxes.
field: A BoxList field for sorting and reordering the BoxList.
order: (Optional) 'descend' or 'ascend'. Default is descend.
Returns:
sorted_boxlist: A sorted BoxList with the field in the specified order.
Raises:
ValueError: if specified field does not exist or is not of single dimension.
ValueError: if the order is not either descend or ascend.
"""
if not boxlist.has_field(field):
raise ValueError('Field ' + field + ' does not exist')
if len(boxlist.get_field(field).shape) != 1:
raise ValueError('Field ' + field + 'should be single dimension.')
if order != SortOrder.DESCEND and order != SortOrder.ASCEND:
raise ValueError('Invalid sort order')
field_to_sort = boxlist.get_field(field)
sorted_indices = np.argsort(field_to_sort)
if order == SortOrder.DESCEND:
sorted_indices = sorted_indices[::-1]
return gather(boxlist, sorted_indices)
def non_max_suppression(boxlist,
max_output_size=10000,
iou_threshold=1.0,
score_threshold=-10.0):
"""Non maximum suppression.
This op greedily selects a subset of detection bounding boxes, pruning
away boxes that have high IOU (intersection over union) overlap (> thresh)
with already selected boxes. In each iteration, the detected bounding box with
highest score in the available pool is selected.
Args:
boxlist: BoxList holding N boxes. Must contain a 'scores' field
representing detection scores. All scores belong to the same class.
max_output_size: maximum number of retained boxes
iou_threshold: intersection over union threshold.
score_threshold: minimum score threshold. Remove the boxes with scores
less than this value. Default value is set to -10. A very
low threshold to pass pretty much all the boxes, unless
the user sets a different score threshold.
Returns:
a BoxList holding M boxes where M <= max_output_size
Raises:
ValueError: if 'scores' field does not exist
ValueError: if threshold is not in [0, 1]
ValueError: if max_output_size < 0
"""
if not boxlist.has_field('scores'):
raise ValueError('Field scores does not exist')
if iou_threshold < 0. or iou_threshold > 1.0:
raise ValueError('IOU threshold must be in [0, 1]')
if max_output_size < 0:
raise ValueError('max_output_size must be bigger than 0.')
boxlist = filter_scores_greater_than(boxlist, score_threshold)
if boxlist.num_boxes() == 0:
return boxlist
boxlist = sort_by_field(boxlist, 'scores')
# Prevent further computation if NMS is disabled.
if iou_threshold == 1.0:
if boxlist.num_boxes() > max_output_size:
selected_indices = np.arange(max_output_size)
return gather(boxlist, selected_indices)
else:
return boxlist
boxes = boxlist.get()
num_boxes = boxlist.num_boxes()
# is_index_valid is True only for all remaining valid boxes,
is_index_valid = np.full(num_boxes, 1, dtype=bool)
selected_indices = []
num_output = 0
for i in range(num_boxes):
if num_output < max_output_size:
if is_index_valid[i]:
num_output += 1
selected_indices.append(i)
is_index_valid[i] = False
valid_indices = np.where(is_index_valid)[0]
if valid_indices.size == 0:
break
intersect_over_union = np_box_ops.iou(
np.expand_dims(boxes[i, :], axis=0), boxes[valid_indices, :])
intersect_over_union = np.squeeze(intersect_over_union, axis=0)
is_index_valid[valid_indices] = np.logical_and(
is_index_valid[valid_indices],
intersect_over_union <= iou_threshold)
return gather(boxlist, np.array(selected_indices))
def multi_class_non_max_suppression(boxlist, score_thresh, iou_thresh,
max_output_size):
"""Multi-class version of non maximum suppression.
This op greedily selects a subset of detection bounding boxes, pruning
away boxes that have high IOU (intersection over union) overlap (> thresh)
with already selected boxes. It operates independently for each class for
which scores are provided (via the scores field of the input box_list),
pruning boxes with score less than a provided threshold prior to
applying NMS.
Args:
boxlist: BoxList holding N boxes. Must contain a 'scores' field
representing detection scores. This scores field is a tensor that can
be 1 dimensional (in the case of a single class) or 2-dimensional, which
which case we assume that it takes the shape [num_boxes, num_classes].
We further assume that this rank is known statically and that
scores.shape[1] is also known (i.e., the number of classes is fixed
and known at graph construction time).
score_thresh: scalar threshold for score (low scoring boxes are removed).
iou_thresh: scalar threshold for IOU (boxes that that high IOU overlap
with previously selected boxes are removed).
max_output_size: maximum number of retained boxes per class.
Returns:
a BoxList holding M boxes with a rank-1 scores field representing
corresponding scores for each box with scores sorted in decreasing order
and a rank-1 classes field representing a class label for each box.
Raises:
ValueError: if iou_thresh is not in [0, 1] or if input boxlist does not have
a valid scores field.
"""
if not 0 <= iou_thresh <= 1.0:
raise ValueError('thresh must be between 0 and 1')
if not isinstance(boxlist, np_box_list.BoxList):
raise ValueError('boxlist must be a BoxList')
if not boxlist.has_field('scores'):
raise ValueError('input boxlist must have \'scores\' field')
scores = boxlist.get_field('scores')
if len(scores.shape) == 1:
scores = np.reshape(scores, [-1, 1])
elif len(scores.shape) == 2:
if scores.shape[1] is None:
raise ValueError('scores field must have statically defined second '
'dimension')
else:
raise ValueError('scores field must be of rank 1 or 2')
num_boxes = boxlist.num_boxes()
num_scores = scores.shape[0]
num_classes = scores.shape[1]
if num_boxes != num_scores:
raise ValueError('Incorrect scores field length: actual vs expected.')
selected_boxes_list = []
for class_idx in range(num_classes):
boxlist_and_class_scores = np_box_list.BoxList(boxlist.get())
class_scores = np.reshape(scores[0:num_scores, class_idx], [-1])
boxlist_and_class_scores.add_field('scores', class_scores)
boxlist_filt = filter_scores_greater_than(boxlist_and_class_scores,
score_thresh)
nms_result = non_max_suppression(boxlist_filt,
max_output_size=max_output_size,
iou_threshold=iou_thresh,
score_threshold=score_thresh)
nms_result.add_field(
'classes', np.zeros_like(nms_result.get_field('scores')) + class_idx)
selected_boxes_list.append(nms_result)
selected_boxes = concatenate(selected_boxes_list)
sorted_boxes = sort_by_field(selected_boxes, 'scores')
return sorted_boxes
def scale(boxlist, y_scale, x_scale):
"""Scale box coordinates in x and y dimensions.
Args:
boxlist: BoxList holding N boxes
y_scale: float
x_scale: float
Returns:
boxlist: BoxList holding N boxes
"""
y_min, x_min, y_max, x_max = np.array_split(boxlist.get(), 4, axis=1)
y_min = y_scale * y_min
y_max = y_scale * y_max
x_min = x_scale * x_min
x_max = x_scale * x_max
scaled_boxlist = np_box_list.BoxList(np.hstack([y_min, x_min, y_max, x_max]))
fields = boxlist.get_extra_fields()
for field in fields:
extra_field_data = boxlist.get_field(field)
scaled_boxlist.add_field(field, extra_field_data)
return scaled_boxlist
def clip_to_window(boxlist, window):
"""Clip bounding boxes to a window.
This op clips input bounding boxes (represented by bounding box
corners) to a window, optionally filtering out boxes that do not
overlap at all with the window.
Args:
boxlist: BoxList holding M_in boxes
window: a numpy array of shape [4] representing the
[y_min, x_min, y_max, x_max] window to which the op
should clip boxes.
Returns:
a BoxList holding M_out boxes where M_out <= M_in
"""
y_min, x_min, y_max, x_max = np.array_split(boxlist.get(), 4, axis=1)
win_y_min = window[0]
win_x_min = window[1]
win_y_max = window[2]
win_x_max = window[3]
y_min_clipped = np.fmax(np.fmin(y_min, win_y_max), win_y_min)
y_max_clipped = np.fmax(np.fmin(y_max, win_y_max), win_y_min)
x_min_clipped = np.fmax(np.fmin(x_min, win_x_max), win_x_min)
x_max_clipped = np.fmax(np.fmin(x_max, win_x_max), win_x_min)
clipped = np_box_list.BoxList(
np.hstack([y_min_clipped, x_min_clipped, y_max_clipped, x_max_clipped]))
clipped = _copy_extra_fields(clipped, boxlist)
areas = area(clipped)
nonzero_area_indices = np.reshape(np.nonzero(np.greater(areas, 0.0)),
[-1]).astype(np.int32)
return gather(clipped, nonzero_area_indices)
def prune_non_overlapping_boxes(boxlist1, boxlist2, minoverlap=0.0):
"""Prunes the boxes in boxlist1 that overlap less than thresh with boxlist2.
For each box in boxlist1, we want its IOA to be more than minoverlap with
at least one of the boxes in boxlist2. If it does not, we remove it.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
minoverlap: Minimum required overlap between boxes, to count them as
overlapping.
Returns:
A pruned boxlist with size [N', 4].
"""
intersection_over_area = ioa(boxlist2, boxlist1) # [M, N] tensor
intersection_over_area = np.amax(intersection_over_area, axis=0) # [N] tensor
keep_bool = np.greater_equal(intersection_over_area, np.array(minoverlap))
keep_inds = np.nonzero(keep_bool)[0]
new_boxlist1 = gather(boxlist1, keep_inds)
return new_boxlist1
def prune_outside_window(boxlist, window):
"""Prunes bounding boxes that fall outside a given window.
This function prunes bounding boxes that even partially fall outside the given
window. See also ClipToWindow which only prunes bounding boxes that fall
completely outside the window, and clips any bounding boxes that partially
overflow.
Args:
boxlist: a BoxList holding M_in boxes.
window: a numpy array of size 4, representing [ymin, xmin, ymax, xmax]
of the window.
Returns:
pruned_corners: a tensor with shape [M_out, 4] where M_out <= M_in.
valid_indices: a tensor with shape [M_out] indexing the valid bounding boxes
in the input tensor.
"""
y_min, x_min, y_max, x_max = np.array_split(boxlist.get(), 4, axis=1)
win_y_min = window[0]
win_x_min = window[1]
win_y_max = window[2]
win_x_max = window[3]
coordinate_violations = np.hstack([np.less(y_min, win_y_min),
np.less(x_min, win_x_min),
np.greater(y_max, win_y_max),
np.greater(x_max, win_x_max)])
valid_indices = np.reshape(
np.where(np.logical_not(np.max(coordinate_violations, axis=1))), [-1])
return gather(boxlist, valid_indices), valid_indices
def concatenate(boxlists, fields=None):
"""Concatenate list of BoxLists.
This op concatenates a list of input BoxLists into a larger BoxList. It also
handles concatenation of BoxList fields as long as the field tensor shapes
are equal except for the first dimension.
Args:
boxlists: list of BoxList objects
fields: optional list of fields to also concatenate. By default, all
fields from the first BoxList in the list are included in the
concatenation.
Returns:
a BoxList with number of boxes equal to
sum([boxlist.num_boxes() for boxlist in BoxList])
Raises:
ValueError: if boxlists is invalid (i.e., is not a list, is empty, or
contains non BoxList objects), or if requested fields are not contained in
all boxlists
"""
if not isinstance(boxlists, list):
raise ValueError('boxlists should be a list')
if not boxlists:
raise ValueError('boxlists should have nonzero length')
for boxlist in boxlists:
if not isinstance(boxlist, np_box_list.BoxList):
raise ValueError('all elements of boxlists should be BoxList objects')
concatenated = np_box_list.BoxList(
np.vstack([boxlist.get() for boxlist in boxlists]))
if fields is None:
fields = boxlists[0].get_extra_fields()
for field in fields:
first_field_shape = boxlists[0].get_field(field).shape
first_field_shape = first_field_shape[1:]
for boxlist in boxlists:
if not boxlist.has_field(field):
raise ValueError('boxlist must contain all requested fields')
field_shape = boxlist.get_field(field).shape
field_shape = field_shape[1:]
if field_shape != first_field_shape:
raise ValueError('field %s must have same shape for all boxlists '
'except for the 0th dimension.' % field)
concatenated_field = np.concatenate(
[boxlist.get_field(field) for boxlist in boxlists], axis=0)
concatenated.add_field(field, concatenated_field)
return concatenated
def filter_scores_greater_than(boxlist, thresh):
"""Filter to keep only boxes with score exceeding a given threshold.
This op keeps the collection of boxes whose corresponding scores are
greater than the input threshold.
Args:
boxlist: BoxList holding N boxes. Must contain a 'scores' field
representing detection scores.
thresh: scalar threshold
Returns:
a BoxList holding M boxes where M <= N
Raises:
ValueError: if boxlist not a BoxList object or if it does not
have a scores field
"""
if not isinstance(boxlist, np_box_list.BoxList):
raise ValueError('boxlist must be a BoxList')
if not boxlist.has_field('scores'):
raise ValueError('input boxlist must have \'scores\' field')
scores = boxlist.get_field('scores')
if len(scores.shape) > 2:
raise ValueError('Scores should have rank 1 or 2')
if len(scores.shape) == 2 and scores.shape[1] != 1:
raise ValueError('Scores should have rank 1 or have shape '
'consistent with [None, 1]')
high_score_indices = np.reshape(np.where(np.greater(scores, thresh)),
[-1]).astype(np.int32)
return gather(boxlist, high_score_indices)
def change_coordinate_frame(boxlist, window):
"""Change coordinate frame of the boxlist to be relative to window's frame.
Given a window of the form [ymin, xmin, ymax, xmax],
changes bounding box coordinates from boxlist to be relative to this window
(e.g., the min corner maps to (0,0) and the max corner maps to (1,1)).
An example use case is data augmentation: where we are given groundtruth
boxes (boxlist) and would like to randomly crop the image to some
window (window). In this case we need to change the coordinate frame of
each groundtruth box to be relative to this new window.
Args:
boxlist: A BoxList object holding N boxes.
window: a size 4 1-D numpy array.
Returns:
Returns a BoxList object with N boxes.
"""
win_height = window[2] - window[0]
win_width = window[3] - window[1]
boxlist_new = scale(
np_box_list.BoxList(boxlist.get() -
[window[0], window[1], window[0], window[1]]),
1.0 / win_height, 1.0 / win_width)
_copy_extra_fields(boxlist_new, boxlist)
return boxlist_new
def _copy_extra_fields(boxlist_to_copy_to, boxlist_to_copy_from):
"""Copies the extra fields of boxlist_to_copy_from to boxlist_to_copy_to.
Args:
boxlist_to_copy_to: BoxList to which extra fields are copied.
boxlist_to_copy_from: BoxList from which fields are copied.
Returns:
boxlist_to_copy_to with extra fields.
"""
for field in boxlist_to_copy_from.get_extra_fields():
boxlist_to_copy_to.add_field(field, boxlist_to_copy_from.get_field(field))
return boxlist_to_copy_to
def _update_valid_indices_by_removing_high_iou_boxes(
selected_indices, is_index_valid, intersect_over_union, threshold):
max_iou = np.max(intersect_over_union[:, selected_indices], axis=1)
return np.logical_and(is_index_valid, max_iou <= threshold)
|
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/modeling/rpn | rpn | anchor_generator | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
import math
import numpy as np
import torch
from torch import nn
from maskrcnn_benchmark.structures.bounding_box import BoxList
class BufferList(nn.Module):
"""
Similar to nn.ParameterList, but for buffers
"""
def __init__(self, buffers=None):
super(BufferList, self).__init__()
if buffers is not None:
self.extend(buffers)
def extend(self, buffers):
offset = len(self)
for i, buffer in enumerate(buffers):
self.register_buffer(str(offset + i), buffer)
return self
def __len__(self):
return len(self._buffers)
def __iter__(self):
return iter(self._buffers.values())
class AnchorGenerator(nn.Module):
"""
For a set of image sizes and feature maps, computes a set
of anchors
"""
def __init__(
self,
sizes=(128, 256, 512),
aspect_ratios=(0.5, 1.0, 2.0),
anchor_strides=(8, 16, 32),
straddle_thresh=0,
):
super(AnchorGenerator, self).__init__()
if len(anchor_strides) == 1:
anchor_stride = anchor_strides[0]
cell_anchors = [
generate_anchors(anchor_stride, sizes, aspect_ratios).float()
]
else:
if len(anchor_strides) != len(sizes):
raise RuntimeError("FPN should have #anchor_strides == #sizes")
cell_anchors = [
generate_anchors(anchor_stride, (size,), aspect_ratios).float()
for anchor_stride, size in zip(anchor_strides, sizes)
]
self.strides = anchor_strides
self.cell_anchors = BufferList(cell_anchors)
self.straddle_thresh = straddle_thresh
def num_anchors_per_location(self):
return [len(cell_anchors) for cell_anchors in self.cell_anchors]
def grid_anchors(self, grid_sizes):
anchors = []
for size, stride, base_anchors in zip(
grid_sizes, self.strides, self.cell_anchors
):
grid_height, grid_width = size
device = base_anchors.device
shifts_x = torch.arange(
0, grid_width * stride, step=stride, dtype=torch.float32, device=device
)
shifts_y = torch.arange(
0, grid_height * stride, step=stride, dtype=torch.float32, device=device
)
shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x)
shift_x = shift_x.reshape(-1)
shift_y = shift_y.reshape(-1)
shifts = torch.stack((shift_x, shift_y, shift_x, shift_y), dim=1)
anchors.append(
(shifts.view(-1, 1, 4) + base_anchors.view(1, -1, 4)).reshape(-1, 4)
)
return anchors
def add_visibility_to(self, boxlist):
image_width, image_height = boxlist.size
anchors = boxlist.bbox
if self.straddle_thresh >= 0:
inds_inside = (
(anchors[..., 0] >= -self.straddle_thresh)
& (anchors[..., 1] >= -self.straddle_thresh)
& (anchors[..., 2] < image_width + self.straddle_thresh)
& (anchors[..., 3] < image_height + self.straddle_thresh)
)
else:
device = anchors.device
inds_inside = torch.ones(anchors.shape[0], dtype=torch.bool, device=device)
boxlist.add_field("visibility", inds_inside)
def forward(self, image_list, feature_maps):
grid_sizes = [feature_map.shape[-2:] for feature_map in feature_maps]
anchors_over_all_feature_maps = self.grid_anchors(grid_sizes)
anchors = []
for i, (image_height, image_width) in enumerate(image_list.image_sizes):
anchors_in_image = []
for anchors_per_feature_map in anchors_over_all_feature_maps:
boxlist = BoxList(
anchors_per_feature_map, (image_width, image_height), mode="xyxy"
)
self.add_visibility_to(boxlist)
anchors_in_image.append(boxlist)
anchors.append(anchors_in_image)
return anchors
def make_anchor_generator(config):
anchor_sizes = config.MODEL.RPN.ANCHOR_SIZES
aspect_ratios = config.MODEL.RPN.ASPECT_RATIOS
anchor_stride = config.MODEL.RPN.ANCHOR_STRIDE
straddle_thresh = config.MODEL.RPN.STRADDLE_THRESH
if config.MODEL.RPN.USE_FPN:
assert len(anchor_stride) == len(
anchor_sizes
), "FPN should have len(ANCHOR_STRIDE) == len(ANCHOR_SIZES)"
else:
assert len(anchor_stride) == 1, "Non-FPN should have a single ANCHOR_STRIDE"
anchor_generator = AnchorGenerator(
anchor_sizes, aspect_ratios, anchor_stride, straddle_thresh
)
return anchor_generator
# Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
#
# Based on:
# --------------------------------------------------------
# Faster R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick and Sean Bell
# --------------------------------------------------------
# Verify that we compute the same anchors as Shaoqing's matlab implementation:
#
# >> load output/rpn_cachedir/faster_rcnn_VOC2007_ZF_stage1_rpn/anchors.mat
# >> anchors
#
# anchors =
#
# -83 -39 100 56
# -175 -87 192 104
# -359 -183 376 200
# -55 -55 72 72
# -119 -119 136 136
# -247 -247 264 264
# -35 -79 52 96
# -79 -167 96 184
# -167 -343 184 360
# array([[ -83., -39., 100., 56.],
# [-175., -87., 192., 104.],
# [-359., -183., 376., 200.],
# [ -55., -55., 72., 72.],
# [-119., -119., 136., 136.],
# [-247., -247., 264., 264.],
# [ -35., -79., 52., 96.],
# [ -79., -167., 96., 184.],
# [-167., -343., 184., 360.]])
def generate_anchors(
stride=16, sizes=(32, 64, 128, 256, 512), aspect_ratios=(0.5, 1, 2)
):
"""Generates a matrix of anchor boxes in (x1, y1, x2, y2) format. Anchors
are centered on stride / 2, have (approximate) sqrt areas of the specified
sizes, and aspect ratios as given.
"""
return _generate_anchors(
stride,
np.array(sizes, dtype=np.float) / stride,
np.array(aspect_ratios, dtype=np.float),
)
def _generate_anchors(base_size, scales, aspect_ratios):
"""Generate anchor (reference) windows by enumerating aspect ratios X
scales wrt a reference (0, 0, base_size - 1, base_size - 1) window.
"""
anchor = np.array([1, 1, base_size, base_size], dtype=np.float) - 1
anchors = _ratio_enum(anchor, aspect_ratios)
anchors = np.vstack(
[_scale_enum(anchors[i, :], scales) for i in range(anchors.shape[0])]
)
return torch.from_numpy(anchors)
def _whctrs(anchor):
"""Return width, height, x center, and y center for an anchor (window)."""
w = anchor[2] - anchor[0] + 1
h = anchor[3] - anchor[1] + 1
x_ctr = anchor[0] + 0.5 * (w - 1)
y_ctr = anchor[1] + 0.5 * (h - 1)
return w, h, x_ctr, y_ctr
def _mkanchors(ws, hs, x_ctr, y_ctr):
"""Given a vector of widths (ws) and heights (hs) around a center
(x_ctr, y_ctr), output a set of anchors (windows).
"""
ws = ws[:, np.newaxis]
hs = hs[:, np.newaxis]
anchors = np.hstack(
(
x_ctr - 0.5 * (ws - 1),
y_ctr - 0.5 * (hs - 1),
x_ctr + 0.5 * (ws - 1),
y_ctr + 0.5 * (hs - 1),
)
)
return anchors
def _ratio_enum(anchor, ratios):
"""Enumerate a set of anchors for each aspect ratio wrt an anchor."""
w, h, x_ctr, y_ctr = _whctrs(anchor)
size = w * h
size_ratios = size / ratios
ws = np.round(np.sqrt(size_ratios))
hs = np.round(ws * ratios)
anchors = _mkanchors(ws, hs, x_ctr, y_ctr)
return anchors
def _scale_enum(anchor, scales):
"""Enumerate a set of anchors for each scale wrt an anchor."""
w, h, x_ctr, y_ctr = _whctrs(anchor)
ws = w * scales
hs = h * scales
anchors = _mkanchors(ws, hs, x_ctr, y_ctr)
return anchors
|
PyTorch/Translation/Transformer/fairseq | fairseq | ddp_trainer | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
#
#-------------------------------------------------------------------------
#
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Train a network across multiple GPUs.
"""
import math
from collections import defaultdict
from itertools import chain
import torch
import torch.nn.functional as F
from torch.cuda import amp
from apex.parallel import DistributedDataParallel as DDP
from fairseq import distributed_utils, optim, utils
from fairseq.optim import lr_scheduler
from fairseq.meters import TimeMeter, AverageMeter
from fairseq.criterions import CRITERION_REGISTRY
import dllogger as DLLogger
class DDPTrainer():
"""Main class for data parallel training.
This class supports data parallel training, where multiple workers each
have a full model replica and gradients are accumulated synchronously via
torch.distributed.all_reduce.
"""
def __init__(self, args, model):
if not torch.cuda.is_available():
raise NotImplementedError('Training on CPU is not supported')
self.args = args
self.model = model.cuda()
self.criterion = CRITERION_REGISTRY[args.criterion](args).cuda()
self.optimizer = optim.build_optimizer(self.args, self.model.parameters())
self.lr_scheduler = lr_scheduler.build_lr_scheduler(self.args, self.optimizer)
self.scaler = amp.GradScaler(enabled=self.args.amp, init_scale=2**15)
if self.args.distributed_world_size > 1:
self.model = DDP(model)
self._buffered_stats = defaultdict(lambda: [])
self._num_updates = 0
self._optim_history = None
self.throughput_meter = TimeMeter()
self.avg_loss_meter = AverageMeter()
def save_checkpoint(self, filename, extra_state):
"""Save all training state in a checkpoint file."""
if distributed_utils.is_master(self.args): # only save one checkpoint
utils.save_state(
filename, self.args, self.get_model(), self.criterion, self.optimizer,
self.lr_scheduler, self._num_updates, self._optim_history, extra_state,
)
def load_checkpoint(self, filename, load_optim=True):
"""Load all training state from a checkpoint file."""
extra_state, optim_history, last_optim_state = \
utils.load_model_state(filename, self.get_model())
if last_optim_state is not None:
# rebuild optimizer after loading model, since params may have changed
#self.optimizer = optim.build_optimizer(self.args, self.model.parameters())
self.lr_scheduler = lr_scheduler.build_lr_scheduler(self.args, self.optimizer)
if load_optim:
self._optim_history = optim_history
# only reload optimizer and lr_scheduler if they match
last_optim = self._optim_history[-1]
if last_optim['criterion_name'] == self.criterion.__class__.__name__:
self.lr_scheduler.load_state_dict(last_optim['lr_scheduler_state'])
if last_optim['optimizer_name'] == self.optimizer.__class__.__name__:
self.optimizer.load_state_dict(last_optim_state)
self._num_updates = last_optim['num_updates']
return extra_state
def train_step(self, sample, update_params=True, last_step=False):
"""Do forward, backward and parameter update."""
# Set seed based on args.seed and the update number so that we get
# reproducible results when resuming from checkpoints
seed = self.args.seed + self.get_num_updates()
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
self.model.train()
if isinstance(self.model, DDP):
if last_step:
self.model.disable_allreduce()
else:
self.model.enable_allreduce()
# forward and backward pass
sample = self._prepare_sample(sample)
loss, oom_fwd = self._forward(sample)
# If this is a last batch forward pass is skipped on some workers
# Batch with sample_size 0 is not accounted for in weighted loss
logging_output = {
'ntokens': sample['ntokens'] if sample is not None else 0,
'nsentences': sample['target'].size(0) if sample is not None else 0,
'loss': utils.item(loss.data) if loss is not None else 0,
}
sample_size = sample['ntokens'] if sample is not None else 0
oom_bwd = self._backward(loss)
# buffer stats and logging outputs
self._buffered_stats['sample_sizes'].append(sample_size)
self._buffered_stats['logging_outputs'].append(logging_output)
self._buffered_stats['ooms_fwd'].append(oom_fwd)
self._buffered_stats['ooms_bwd'].append(oom_bwd)
# update parameters
if update_params and not last_step:
# gather logging outputs from all replicas
sample_sizes = self._buffered_stats['sample_sizes']
logging_outputs = self._buffered_stats['logging_outputs']
ooms_fwd = self._buffered_stats['ooms_fwd']
ooms_bwd = self._buffered_stats['ooms_bwd']
if self.args.distributed_world_size > 1:
sample_sizes, logging_outputs, ooms_fwd, ooms_bwd = map(
lambda l: list(chain.from_iterable(l)),
zip(*distributed_utils.all_gather_list(
(sample_sizes, logging_outputs, ooms_fwd, ooms_bwd)
))
)
ooms_fwd = sum(ooms_fwd)
ooms_bwd = sum(ooms_bwd)
ooms = ooms_fwd + ooms_bwd # this is always <= distributed_world_size
if ooms == self.args.distributed_world_size:
print('| WARNING: OOM in all workers, skipping batch')
self.zero_grad()
return
# aggregate stats and logging outputs
grad_denom = sum(sample_sizes)
for p in self.model.parameters():
if p.requires_grad and p.grad is not None:
p.grad /= grad_denom
self._opt()
# Handle logging
ntokens = sum(log.get('ntokens', 0) for log in logging_outputs)
self.throughput_meter.update(ntokens)
info_log_data = {
'tokens/s': self.throughput_meter.avg,
'tokens': ntokens,
'loss': sum(log.get('loss', 0) for log in logging_outputs) / ntokens / math.log(2)
}
self.avg_loss_meter.update(info_log_data['loss'])
debug_log_data = {
'batch_size': sum(log.get('nsentences', 0) for log in logging_outputs),
'lr': self.get_lr(),
'grad_denom': grad_denom,
'updates': 1
}
DLLogger.log(step=self._num_updates, data=info_log_data, verbosity=0)
DLLogger.log(step=self._num_updates, data=debug_log_data, verbosity=1)
self.clear_buffered_stats()
def _forward(self, sample):
loss = None
oom = 0
try:
if sample is not None:
with amp.autocast(enabled=self.args.amp):
# calculate loss and sample size
logits, _ = self.model(**sample['net_input'])
target = sample['target']
probs = F.log_softmax(logits, dim=-1, dtype=torch.float32)
loss = self.criterion(probs, target)
except RuntimeError as e:
if 'out of memory' in str(e):
print('| WARNING: ran out of memory in worker {}, skipping batch'.format(
self.args.distributed_rank), force=True)
oom = 1
loss = None
else:
raise e
return loss, oom
def _backward(self, loss):
oom = 0
if loss is not None:
try:
self.scaler.scale(loss).backward()
except RuntimeError as e:
if 'out of memory' in str(e):
print('| WARNING: ran out of memory in worker {}, skipping batch'.format(
self.args.distributed_rank), force=True)
oom = 1
self.zero_grad()
else:
raise e
return oom
def _opt(self):
# take an optimization step
self.scaler.step(self.optimizer.optimizer)
self.scaler.update()
self.zero_grad()
self._num_updates += 1
# update learning rate
self.lr_scheduler.step_update(self._num_updates)
def valid_step(self, sample):
"""Do forward pass in evaluation mode."""
self.model.eval()
# forward pass
sample = self._prepare_sample(sample)
with torch.no_grad():
loss, oom_fwd = self._forward(sample)
logging_output = {
'ntokens': sample['ntokens'] if sample is not None else 0,
'nsentences': sample['target'].size(0) if sample is not None else 0,
}
loss = loss.item() if loss is not None else 0
assert not oom_fwd, 'Ran out of memory during validation'
# gather logging outputs from all GPUs
if self.args.distributed_world_size > 1:
losses, logging_outputs = zip(*distributed_utils.all_gather_list(
(loss, logging_output)
))
else:
losses = [loss]
logging_outputs = [logging_output]
weight = sum(log.get('ntokens', 0) for log in logging_outputs)
scaled_loss = sum(losses) / weight / math.log(2)
return scaled_loss
def dummy_train_step(self, dummy_batch):
"""Dummy training step for warming caching allocator."""
self.train_step(dummy_batch, update_params=False)
self.zero_grad()
self.clear_buffered_stats()
def zero_grad(self):
self.optimizer.zero_grad()
def clear_buffered_stats(self):
self._buffered_stats.clear()
def lr_step(self, epoch, val_loss=None):
"""Adjust the learning rate based on the validation loss."""
return self.lr_scheduler.step(epoch, val_loss)
def lr_step_update(self, num_updates):
"""Update the learning rate after each update."""
return self.lr_scheduler.step_update(num_updates)
def get_lr(self):
"""Get the current learning rate."""
return self.optimizer.get_lr()
def get_throughput_meter(self):
"""Get the throughput meter"""
return self.throughput_meter
def get_model(self):
"""Get the model replica."""
return self.model.module if isinstance(self.model, DDP) else self.model
def get_num_updates(self):
"""Get the number of parameters updates."""
return self._num_updates
def _prepare_sample(self, sample):
if not sample:
return None
return utils.move_to_cuda(sample)
|
PyTorch/Recommendation/DLRM/notebooks | notebooks | README | <!-- #region -->
# DLRM Jupyter demo notebooks
This folder contains the demo notebooks for DLRM. The most convenient way to use these notebooks is via using a docker container, which provides a self-contained, isolated and re-producible environment for all experiments. Refer to the [Quick Start Guide section](../README.md) of the Readme documentation for a comprehensive guide.
First, clone the repository:
```
git clone https://github.com/NVIDIA/DeepLearningExamples
cd DeepLearningExamples/PyTorch/Recommendation/DLRM
```
## Notebook list
### 1. Pytorch_DLRM_pyt_train_and_inference.ipynb: training and inference demo
To execute this notebook, first build the DLRM container:
```
docker build . -t nvidia_dlrm_pyt
```
Make a directory for storing DLRM data and start a docker containerexport PYTHONPATH=/workspace/dlrm with:
```
mkdir -p data
docker run --runtime=nvidia -it --rm --ipc=host -v ${PWD}/data:/data nvidia_dlrm_pyt bash
```
Within the docker interactive bash session, start Jupyter with
```
export PYTHONPATH=/workspace/dlrm
jupyter notebook --ip 0.0.0.0 --port 8888
```
Then open the Jupyter GUI interface on your host machine at http://localhost:8888. Within the container, this demo notebook is located at `/workspace/dlrm/notebooks`.
<!-- #endregion -->
### 2. DLRM_Triton_inference_demo.ipynb: inference demo with the NVIDIA Triton Inference server.
To execute this notebook, first build the following inference container:
```
docker build -t dlrm-inference . -f triton/Dockerfile
```
Start in interactive docker session with:
```
docker run -it --rm --gpus device=0 --shm-size=1g --ulimit memlock=-1 --ulimit stack=67108864 --net=host -v <PATH_TO_SAVED_MODEL>:/models -v <PATH_TO_EXPORT_MODEL>:/repository dlrm-inference bash
```
where:
- PATH_TO_SAVED_MODEL: directory containing the trained DLRM models.
- PATH_TO_EXPORT_MODEL: directory which will contain the converted model to be used with the NVIDIA Triton inference server.
Within the docker interactive bash session, start Jupyter with
```
export PYTHONPATH=/workspace/dlrm
jupyter notebook --ip 0.0.0.0 --port 8888
```
Then open the Jupyter GUI interface on your host machine at http://localhost:8888. Within the container, this demo notebook is located at `/workspace/dlrm/notebooks`.
```python
```
|
PyTorch/Classification/ConvNets/efficientnet/quantization | quantization | DGX1V-32G_efficientnet-quant-b4_FP32 | python ./multiproc.py \
--nproc_per_node 8 \
./quant_main.py /imagenet \
--arch efficientnet-quant-b4 \
--epochs 2 \
-j5 -p 500 \
--data-backend pytorch \
--optimizer rmsprop \
-b 32 \
--lr 4.09e-06 \
--momentum 0.9 \
--weight-decay 9.714e-04 \
--lr-schedule linear \
--rmsprop-alpha 0.853 \
--rmsprop-eps 0.00422 \
--pretrained-from-file "${1}"
|
PyTorch/SpeechSynthesis/Tacotron2/exports | exports | export_waveglow_trt_config | # *****************************************************************************
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the NVIDIA CORPORATION nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# *****************************************************************************
import os
import argparse
def parse_args(parser):
"""
Parse commandline arguments.
"""
parser.add_argument("--trtis_model_name",
type=str,
default='waveglow',
help="exports to appropriate directory for TRTIS")
parser.add_argument("--trtis_model_version",
type=int,
default=1,
help="exports to appropriate directory for TRTIS")
parser.add_argument('--fp16', action='store_true',
help='inference with mixed precision')
return parser
def main():
parser = argparse.ArgumentParser(
description='PyTorch WaveGlow TRTIS config exporter')
parser = parse_args(parser)
args = parser.parse_args()
# prepare repository
model_folder = os.path.join('./trtis_repo', args.trtis_model_name)
version_folder = os.path.join(model_folder, str(args.trtis_model_version))
if not os.path.exists(version_folder):
os.makedirs(version_folder)
# build the config for TRTIS
config_filename = os.path.join(model_folder, "config.pbtxt")
config_template = r"""
name: "{model_name}"
platform: "tensorrt_plan"
default_model_filename: "waveglow_fp16.engine"
max_batch_size: 1
input {{
name: "mel"
data_type: {fp_type}
dims: [80, -1, 1]
}}
input {{
name: "z"
data_type: {fp_type}
dims: [8, -1, 1]
}}
output {{
name: "audio"
data_type: {fp_type}
dims: [-1]
}}
"""
config_values = {
"model_name": args.trtis_model_name,
"fp_type": "TYPE_FP16" if args.fp16 else "TYPE_FP32"
}
with open(model_folder + "/config.pbtxt", "w") as file:
final_config_str = config_template.format_map(config_values)
file.write(final_config_str)
if __name__ == '__main__':
main()
|
Kaldi/SpeechRecognition/scripts/docker | docker | run_client | #!/bin/bash
set -e
results_dir=/data/results
if [ -d "$results_dir" ]
then
rm -rf $results_dir
fi
mkdir $results_dir
kaldi-asr-parallel-client $@
echo "Computing WER..."
/workspace/scripts/compute_wer.sh
rm -rf $results_dir
|
Tools/PyTorch/TimeSeriesPredictionPlatform/models/tft_pyt/triton/runner | runner | finalizer | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import pathlib
import shutil
from typing import Dict, List
import yaml
# method from PEP-366 to support relative import in executed modules
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from .experiment import ExperimentResult
from .logger import LOGGER
from .stages import ResultsType
from .summary import load_results, save_summary
from .task import Task
class Finalizer(abc.ABC):
@abc.abstractmethod
def exec(self, workspace: pathlib.Path, task: Task, results: List[ExperimentResult]):
pass
class ExperimentFinalizer(Finalizer):
"""
Public runner finalizer object.
"""
def exec(self, workspace: pathlib.Path, task: Task, results: List[ExperimentResult]):
results_path = workspace / task.results_dir
self._generate_summary(results_path, results)
self._finalize_task(results_path, task)
def _finalize_task(self, results_path: pathlib.Path, task: Task) -> None:
"""
Finalize task information
Args:
task: Task object
Returns:
None
"""
task.end()
file_path = results_path / task.filename
LOGGER.debug(f"Saving task details to file {file_path}")
task.to_file(file_path)
LOGGER.debug("Done")
LOGGER.info(f"Task details and results stored in {results_path}")
def _generate_summary(self, results_path: pathlib.Path, experiment_results: List[ExperimentResult]):
"""
Generate summary for results collected in all experiments
Args:
results_path: Path where results should be stored
experiment_results: Results collected from experiments
Returns:
"""
performance_offline_results = list()
performance_online_results = list()
results_mapping = {
ResultsType.TRITON_PERFORMANCE_OFFLINE: performance_offline_results,
ResultsType.TRITON_PERFORMANCE_ONLINE: performance_online_results,
}
self._collect_summary_results(experiment_results, results_mapping)
self._prepare_final_results(results_path, results_mapping)
def _collect_summary_results(self, experiment_results: List[ExperimentResult], results_mapping: Dict):
for experiment_result in experiment_results:
experiment = experiment_result.experiment
for result_type, result_path in experiment_result.results.items():
if not result_path.is_file() and not result_path.is_dir():
raise FileNotFoundError(f"Expected file {result_path} not found")
LOGGER.debug(f"Found {result_type} in {result_path} file.")
if result_type not in results_mapping:
LOGGER.debug(f"Results {result_type} for {experiment.experiment_id} are ignored in final summary.")
return
LOGGER.debug(f"Collecting {result_type} results from {result_path} for summary")
result = load_results(
results_path=result_path,
parameters=experiment.parameters,
result_type=result_type,
)
results_mapping[result_type].extend(result)
LOGGER.debug(f"Done.")
def _prepare_final_results(self, results_path: pathlib.Path, results_mapping: Dict) -> None:
"""
Prepare summary files for offline and online performance
Args:
results_path: Path where results should be stored
results_mapping: Mapping with results type and collected results for given stage
Returns:
None
"""
for results_type, results in results_mapping.items():
save_summary(
result_type=results_type,
results=results,
summary_dir=results_path,
)
|
TensorFlow2/Recommendation/DLRM_and_DCNv2/nn | nn | lr_scheduler | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# author: Tomasz Grel ([email protected])
import tensorflow as tf
class LearningRateScheduler:
"""
LR Scheduler combining Polynomial Decay with Warmup at the beginning.
TF-based cond operations necessary for performance in graph mode.
"""
def __init__(self, optimizers, base_lr, warmup_steps, decay_start_step, decay_steps):
self.optimizers = optimizers
self.warmup_steps = tf.constant(warmup_steps, dtype=tf.int32)
self.decay_start_step = tf.constant(decay_start_step, dtype=tf.int32)
self.decay_steps = tf.constant(decay_steps)
self.decay_end_step = decay_start_step + decay_steps
self.poly_power = 2
self.base_lr = base_lr
with tf.device('/CPU:0'):
self.step = tf.Variable(0)
@tf.function
def __call__(self):
with tf.device('/CPU:0'):
# used for the warmup stage
warmup_step = tf.cast(1 / self.warmup_steps, tf.float32)
lr_factor_warmup = 1 - tf.cast(self.warmup_steps - self.step, tf.float32) * warmup_step
lr_factor_warmup = tf.cast(lr_factor_warmup, tf.float32)
# used for the constant stage
lr_factor_constant = tf.cast(1., tf.float32)
# used for the decay stage
lr_factor_decay = (self.decay_end_step - self.step) / self.decay_steps
lr_factor_decay = tf.math.pow(lr_factor_decay, self.poly_power)
lr_factor_decay = tf.cast(lr_factor_decay, tf.float32)
poly_schedule = tf.cond(self.step < self.decay_start_step, lambda: lr_factor_constant,
lambda: lr_factor_decay)
lr_factor = tf.cond(self.step < self.warmup_steps, lambda: lr_factor_warmup,
lambda: poly_schedule)
lr = self.base_lr * lr_factor
for optimizer in self.optimizers:
optimizer.lr.assign(lr)
self.step.assign(self.step + 1)
|
TensorFlow/Segmentation/UNet_Medical/examples | examples | unet_INFER_BENCHMARK_TF-TRT | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script launches U-Net training in FP32 on 1 GPUs using 2 batch size
# Usage ./unet_INFER_BENCHMARK_FP32.sh <path to this repository> <path to dataset> <path to results directory> <batch size>
python $1/main.py --data_dir $2 --model_dir $3 --batch_size $4 --benchmark --exec_mode predict --augment --warmup_steps 200 --log_every 100 --max_steps 300 --xla
|
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/plugins/taco2AttentionPlugin | taco2AttentionPlugin | CMakeLists | #
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
file(GLOB SRCS *.cpp *.cu)
set(PLUGIN_SOURCES ${PLUGIN_SOURCES} ${SRCS})
set(PLUGIN_SOURCES ${PLUGIN_SOURCES} PARENT_SCOPE)
|
Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/utils/types | types | __init__ | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa
from .array_type import NDArray
from .column_type import ColumnType
from .dataframe_type import DataFrameType
from .metadata import MetaData
from .data_source_input_type import DataSourceInputType
|
PyTorch/Forecasting/TFT/triton/runner/maintainer | maintainer | maintainer_factory | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from .docker.maintainer import DockerMaintainer
class MaintainerFactory:
@staticmethod
def create_docker_maintainer():
return DockerMaintainer()
|
PyTorch/Forecasting/TFT/triton/runner | runner | requirements | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
tqdm>=4.44.1
docker==5.0.0
colorama==0.4.4
pytz==2021.1
coloredlogs==15.0.1
py-cpuinfo==8.0.0
psutil==5.8.0
retrying>=1.3.3 |
TensorFlow2/Classification/ConvNets/utils | utils | tf_utils | import tensorflow as tf
import numpy as np
from tensorflow.python.profiler.model_analyzer import profile
from tensorflow.python.profiler.option_builder import ProfileOptionBuilder
def get_num_params(model, readable_format=True):
"""Return number of parameters and flops."""
nparams = np.sum([
np.prod(v.get_shape().as_list())
for v in model.trainable_weights
])
if readable_format:
nparams = float(nparams) * 1e-6
return nparams
def get_num_flops(model, input_shape, readable_format=True):
if hasattr(model,'model'):
model = model.model
forward_pass = tf.function(model.call, input_signature=[tf.TensorSpec(shape=(1,) + input_shape)])
graph_info = profile(forward_pass.get_concrete_function().graph,
options=ProfileOptionBuilder.float_operation())
# The //2 is necessary since `profile` counts multiply and accumulate
# as two flops, here we report the total number of multiply accumulate ops
flops = graph_info.total_float_ops // 2
if readable_format:
flops = float(flops) * 1e-9
return flops |
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/modeling/roi_heads/mask_head | mask_head | inference | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import numpy as np
import torch
from torch import nn
import torch.nn.functional as F
from maskrcnn_benchmark.structures.bounding_box import BoxList
# TODO check if want to return a single BoxList or a composite
# object
class MaskPostProcessor(nn.Module):
"""
From the results of the CNN, post process the masks
by taking the mask corresponding to the class with max
probability (which are of fixed size and directly output
by the CNN) and return the masks in the mask field of the BoxList.
If a masker object is passed, it will additionally
project the masks in the image according to the locations in boxes,
"""
def __init__(self, masker=None):
super(MaskPostProcessor, self).__init__()
self.masker = masker
def forward(self, x, boxes):
"""
Arguments:
x (Tensor): the mask logits
boxes (list[BoxList]): bounding boxes that are used as
reference, one for ech image
Returns:
results (list[BoxList]): one BoxList for each image, containing
the extra field mask
"""
mask_prob = x.sigmoid()
# select masks coresponding to the predicted classes
num_masks = x.shape[0]
labels = [bbox.get_field("labels") for bbox in boxes]
labels = torch.cat(labels)
index = torch.arange(num_masks, device=labels.device)
mask_prob = mask_prob[index, labels][:, None]
boxes_per_image = [len(box) for box in boxes]
mask_prob = mask_prob.split(boxes_per_image, dim=0)
if self.masker:
mask_prob = self.masker(mask_prob, boxes)
results = []
for prob, box in zip(mask_prob, boxes):
bbox = BoxList(box.bbox, box.size, mode="xyxy")
for field in box.fields():
bbox.add_field(field, box.get_field(field))
bbox.add_field("mask", prob)
results.append(bbox)
return results
class MaskPostProcessorCOCOFormat(MaskPostProcessor):
"""
From the results of the CNN, post process the results
so that the masks are pasted in the image, and
additionally convert the results to COCO format.
"""
def forward(self, x, boxes):
import pycocotools.mask as mask_util
import numpy as np
results = super(MaskPostProcessorCOCOFormat, self).forward(x, boxes)
for result in results:
masks = result.get_field("mask").cpu()
rles = [
mask_util.encode(np.array(mask[0, :, :, np.newaxis], order="F"))[0]
for mask in masks
]
for rle in rles:
rle["counts"] = rle["counts"].decode("utf-8")
result.add_field("mask", rles)
return results
# the next two functions should be merged inside Masker
# but are kept here for the moment while we need them
# temporarily gor paste_mask_in_image
def expand_boxes(boxes, scale):
w_half = (boxes[:, 2] - boxes[:, 0]) * .5
h_half = (boxes[:, 3] - boxes[:, 1]) * .5
x_c = (boxes[:, 2] + boxes[:, 0]) * .5
y_c = (boxes[:, 3] + boxes[:, 1]) * .5
w_half *= scale
h_half *= scale
boxes_exp = torch.zeros_like(boxes)
boxes_exp[:, 0] = x_c - w_half
boxes_exp[:, 2] = x_c + w_half
boxes_exp[:, 1] = y_c - h_half
boxes_exp[:, 3] = y_c + h_half
return boxes_exp
def expand_masks(mask, padding):
N = mask.shape[0]
M = mask.shape[-1]
pad2 = 2 * padding
scale = float(M + pad2) / M
padded_mask = mask.new_zeros((N, 1, M + pad2, M + pad2))
padded_mask[:, :, padding:-padding, padding:-padding] = mask
return padded_mask, scale
def paste_mask_in_image(mask, box, im_h, im_w, thresh=0.5, padding=1):
# Need to work on the CPU, where fp16 isn't supported - cast to float to avoid this
mask = mask.float()
box = box.float()
padded_mask, scale = expand_masks(mask[None], padding=padding)
mask = padded_mask[0, 0]
box = expand_boxes(box[None], scale)[0]
box = box.to(dtype=torch.int32)
TO_REMOVE = 1
w = int(box[2] - box[0] + TO_REMOVE)
h = int(box[3] - box[1] + TO_REMOVE)
w = max(w, 1)
h = max(h, 1)
# Set shape to [batchxCxHxW]
mask = mask.expand((1, 1, -1, -1))
# Resize mask
mask = mask.to(torch.float32)
mask = F.interpolate(mask, size=(h, w), mode='bilinear', align_corners=False)
mask = mask[0][0]
if thresh >= 0:
mask = mask > thresh
else:
# for visualization and debugging, we also
# allow it to return an unmodified mask
mask = (mask * 255).to(torch.uint8)
im_mask = torch.zeros((im_h, im_w), dtype=torch.uint8)
x_0 = max(box[0], 0)
x_1 = min(box[2] + 1, im_w)
y_0 = max(box[1], 0)
y_1 = min(box[3] + 1, im_h)
im_mask[y_0:y_1, x_0:x_1] = mask[
(y_0 - box[1]) : (y_1 - box[1]), (x_0 - box[0]) : (x_1 - box[0])
]
return im_mask
class Masker(object):
"""
Projects a set of masks in an image on the locations
specified by the bounding boxes
"""
def __init__(self, threshold=0.5, padding=1):
self.threshold = threshold
self.padding = padding
def forward_single_image(self, masks, boxes):
boxes = boxes.convert("xyxy")
im_w, im_h = boxes.size
res = [
paste_mask_in_image(mask[0], box, im_h, im_w, self.threshold, self.padding)
for mask, box in zip(masks, boxes.bbox)
]
if len(res) > 0:
res = torch.stack(res, dim=0)[:, None]
else:
res = masks.new_empty((0, 1, masks.shape[-2], masks.shape[-1]))
return res
def __call__(self, masks, boxes):
if isinstance(boxes, BoxList):
boxes = [boxes]
# Make some sanity check
assert len(boxes) == len(masks), "Masks and boxes should have the same length."
# TODO: Is this JIT compatible?
# If not we should make it compatible.
results = []
for mask, box in zip(masks, boxes):
assert mask.shape[0] == len(box), "Number of objects should be the same."
result = self.forward_single_image(mask, box)
results.append(result)
return results
def make_roi_mask_post_processor(cfg):
if cfg.MODEL.ROI_MASK_HEAD.POSTPROCESS_MASKS:
mask_threshold = cfg.MODEL.ROI_MASK_HEAD.POSTPROCESS_MASKS_THRESHOLD
masker = Masker(threshold=mask_threshold, padding=1)
else:
masker = None
mask_post_processor = MaskPostProcessor(masker)
return mask_post_processor
|
PyTorch/Classification/GPUNet/triton/125ms-D/runner | runner | __main__ | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import pathlib
from typing import List
if __name__ == "__main__" and __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from ...runner.config import Config
from ...runner.executor import Executor
from ...runner.finalizer import ExperimentFinalizer
from ...runner.maintainer import DockerMaintainer
from ...runner.preparer import ExperimentPreparer
from ...runner.runner_proxy import RunnerProxy
from .pipeline_impl import pipeline
class ExperimentRunner(RunnerProxy):
"""
Experiment Runner proxy for runner wrapper
"""
maintainer_cls = DockerMaintainer
executor_cls = Executor
preparer_cls = ExperimentPreparer
finalizer_cls = ExperimentFinalizer
def execute(config_path: str, devices: List[str]):
if len(devices) == 0:
devices = ["0"]
config = Config.from_file(config_path)
runner = ExperimentRunner(config=config, pipeline=pipeline, devices=devices)
runner.start()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--config-path", type=str, required=True, help="Path to configuration file with details.")
parser.add_argument(
"--devices", type=str, nargs="*", required=False, help="Path to configuration file with details."
)
args = parser.parse_args()
config_path = args.config_path
devices = args.devices
execute(config_path, devices) |
TensorFlow/LanguageModeling/BERT | BERT | predicting_movie_reviews_with_bert_on_tf_hub | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# #Predicting Movie Review Sentiment with BERT on TF Hub
# If you’ve been following Natural Language Processing over the past year, you’ve probably heard of BERT: Bidirectional Encoder Representations from Transformers. It’s a neural network architecture designed by Google researchers that’s totally transformed what’s state-of-the-art for NLP tasks, like text classification, translation, summarization, and question answering.
#
# Now that BERT's been added to [TF Hub](https://www.tensorflow.org/hub) as a loadable module, it's easy(ish) to add into existing Tensorflow text pipelines. In an existing pipeline, BERT can replace text embedding layers like ELMO and GloVE. Alternatively, [finetuning](http://wiki.fast.ai/index.php/Fine_tuning) BERT can provide both an accuracy boost and faster training time in many cases.
#
# Here, we'll train a model to predict whether an IMDB movie review is positive or negative using BERT in Tensorflow with tf hub. Some code was adapted from [this colab notebook](https://colab.sandbox.google.com/github/tensorflow/tpu/blob/master/tools/colab/bert_finetuning_with_cloud_tpus.ipynb). Let's get started!
# In[ ]:
from sklearn.model_selection import train_test_split
import pandas as pd
import tensorflow as tf
import tensorflow_hub as hub
from datetime import datetime
# In addition to the standard libraries we imported above, we'll need to install BERT's python package.
# In[38]:
get_ipython().system('pip install bert-tensorflow')
# In[ ]:
import bert
from bert import run_classifier
from bert import optimization
from bert import tokenization
# Below, we'll set an output directory location to store our model output and checkpoints. This can be a local directory, in which case you'd set OUTPUT_DIR to the name of the directory you'd like to create. If you're running this code in Google's hosted Colab, the directory won't persist after the Colab session ends.
#
# Alternatively, if you're a GCP user, you can store output in a GCP bucket. To do that, set a directory name in OUTPUT_DIR and the name of the GCP bucket in the BUCKET field.
#
# Set DO_DELETE to rewrite the OUTPUT_DIR if it exists. Otherwise, Tensorflow will load existing model checkpoints from that directory (if they exist).
# In[40]:
# Set the output directory for saving model file
# Optionally, set a GCP bucket location
OUTPUT_DIR = 'OUTPUT_DIR_NAME'#@param {type:"string"}
#@markdown Whether or not to clear/delete the directory and create a new one
DO_DELETE = False #@param {type:"boolean"}
#@markdown Set USE_BUCKET and BUCKET if you want to (optionally) store model output on GCP bucket.
USE_BUCKET = True #@param {type:"boolean"}
BUCKET = 'BUCKET_NAME' #@param {type:"string"}
if USE_BUCKET:
OUTPUT_DIR = 'gs://{}/{}'.format(BUCKET, OUTPUT_DIR)
from google.colab import auth
auth.authenticate_user()
if DO_DELETE:
try:
tf.gfile.DeleteRecursively(OUTPUT_DIR)
except:
# Doesn't matter if the directory didn't exist
pass
tf.gfile.MakeDirs(OUTPUT_DIR)
print('***** Model output directory: {} *****'.format(OUTPUT_DIR))
# #Data
# First, let's download the dataset, hosted by Stanford. The code below, which downloads, extracts, and imports the IMDB Large Movie Review Dataset, is borrowed from [this Tensorflow tutorial](https://www.tensorflow.org/hub/tutorials/text_classification_with_tf_hub).
# In[ ]:
from tensorflow import keras
import os
import re
# Load all files from a directory in a DataFrame.
def load_directory_data(directory):
data = {}
data["sentence"] = []
data["sentiment"] = []
for file_path in os.listdir(directory):
with tf.gfile.GFile(os.path.join(directory, file_path), "r") as f:
data["sentence"].append(f.read())
data["sentiment"].append(re.match("\d+_(\d+)\.txt", file_path).group(1))
return pd.DataFrame.from_dict(data)
# Merge positive and negative examples, add a polarity column and shuffle.
def load_dataset(directory):
pos_df = load_directory_data(os.path.join(directory, "pos"))
neg_df = load_directory_data(os.path.join(directory, "neg"))
pos_df["polarity"] = 1
neg_df["polarity"] = 0
return pd.concat([pos_df, neg_df]).sample(frac=1).reset_index(drop=True)
# Download and process the dataset files.
def download_and_load_datasets(force_download=False):
dataset = tf.keras.utils.get_file(
fname="aclImdb.tar.gz",
origin="http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz",
extract=True)
train_df = load_dataset(os.path.join(os.path.dirname(dataset),
"aclImdb", "train"))
test_df = load_dataset(os.path.join(os.path.dirname(dataset),
"aclImdb", "test"))
return train_df, test_df
# In[ ]:
train, test = download_and_load_datasets()
# To keep training fast, we'll take a sample of 5000 train and test examples, respectively.
# In[ ]:
train = train.sample(5000)
test = test.sample(5000)
# In[44]:
train.columns
# For us, our input data is the 'sentence' column and our label is the 'polarity' column (0, 1 for negative and positive, respecitvely)
# In[ ]:
DATA_COLUMN = 'sentence'
LABEL_COLUMN = 'polarity'
# label_list is the list of labels, i.e. True, False or 0, 1 or 'dog', 'cat'
label_list = [0, 1]
# #Data Preprocessing
# We'll need to transform our data into a format BERT understands. This involves two steps. First, we create `InputExample`'s using the constructor provided in the BERT library.
#
# - `text_a` is the text we want to classify, which in this case, is the `Request` field in our Dataframe.
# - `text_b` is used if we're training a model to understand the relationship between sentences (i.e. is `text_b` a translation of `text_a`? Is `text_b` an answer to the question asked by `text_a`?). This doesn't apply to our task, so we can leave `text_b` blank.
# - `label` is the label for our example, i.e. True, False
# In[ ]:
# Use the InputExample class from BERT's run_classifier code to create examples from the data
train_InputExamples = train.apply(lambda x: bert.run_classifier.InputExample(guid=None, # Globally unique ID for bookkeeping, unused in this example
text_a = x[DATA_COLUMN],
text_b = None,
label = x[LABEL_COLUMN]), axis = 1)
test_InputExamples = test.apply(lambda x: bert.run_classifier.InputExample(guid=None,
text_a = x[DATA_COLUMN],
text_b = None,
label = x[LABEL_COLUMN]), axis = 1)
# Next, we need to preprocess our data so that it matches the data BERT was trained on. For this, we'll need to do a couple of things (but don't worry--this is also included in the Python library):
#
#
# 1. Lowercase our text (if we're using a BERT lowercase model)
# 2. Tokenize it (i.e. "sally says hi" -> ["sally", "says", "hi"])
# 3. Break words into WordPieces (i.e. "calling" -> ["call", "##ing"])
# 4. Map our words to indexes using a vocab file that BERT provides
# 5. Add special "CLS" and "SEP" tokens (see the [readme](https://github.com/google-research/bert))
# 6. Append "index" and "segment" tokens to each input (see the [BERT paper](https://arxiv.org/pdf/1810.04805.pdf))
#
# Happily, we don't have to worry about most of these details.
#
#
#
# To start, we'll need to load a vocabulary file and lowercasing information directly from the BERT tf hub module:
# In[47]:
# This is a path to an uncased (all lowercase) version of BERT
BERT_MODEL_HUB = "https://tfhub.dev/google/bert_uncased_L-12_H-768_A-12/1"
def create_tokenizer_from_hub_module():
"""Get the vocab file and casing info from the Hub module."""
with tf.Graph().as_default():
bert_module = hub.Module(BERT_MODEL_HUB)
tokenization_info = bert_module(signature="tokenization_info", as_dict=True)
with tf.Session() as sess:
vocab_file, do_lower_case = sess.run([tokenization_info["vocab_file"],
tokenization_info["do_lower_case"]])
return bert.tokenization.FullTokenizer(
vocab_file=vocab_file, do_lower_case=do_lower_case)
tokenizer = create_tokenizer_from_hub_module()
# Great--we just learned that the BERT model we're using expects lowercase data (that's what stored in tokenization_info["do_lower_case"]) and we also loaded BERT's vocab file. We also created a tokenizer, which breaks words into word pieces:
# In[48]:
tokenizer.tokenize("This here's an example of using the BERT tokenizer")
# Using our tokenizer, we'll call `run_classifier.convert_examples_to_features` on our InputExamples to convert them into features BERT understands.
# In[49]:
# We'll set sequences to be at most 128 tokens long.
MAX_SEQ_LENGTH = 128
# Convert our train and test features to InputFeatures that BERT understands.
train_features = bert.run_classifier.convert_examples_to_features(train_InputExamples, label_list, MAX_SEQ_LENGTH, tokenizer)
test_features = bert.run_classifier.convert_examples_to_features(test_InputExamples, label_list, MAX_SEQ_LENGTH, tokenizer)
# #Creating a model
#
# Now that we've prepared our data, let's focus on building a model. `create_model` does just this below. First, it loads the BERT tf hub module again (this time to extract the computation graph). Next, it creates a single new layer that will be trained to adapt BERT to our sentiment task (i.e. classifying whether a movie review is positive or negative). This strategy of using a mostly trained model is called [fine-tuning](http://wiki.fast.ai/index.php/Fine_tuning).
# In[ ]:
def create_model(is_predicting, input_ids, input_mask, segment_ids, labels,
num_labels):
"""Creates a classification model."""
bert_module = hub.Module(
BERT_MODEL_HUB,
trainable=True)
bert_inputs = dict(
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids)
bert_outputs = bert_module(
inputs=bert_inputs,
signature="tokens",
as_dict=True)
# Use "pooled_output" for classification tasks on an entire sentence.
# Use "sequence_outputs" for token-level output.
output_layer = bert_outputs["pooled_output"]
hidden_size = output_layer.shape[-1].value
# Create our own layer to tune for politeness data.
output_weights = tf.get_variable(
"output_weights", [num_labels, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"output_bias", [num_labels], initializer=tf.zeros_initializer())
with tf.variable_scope("loss"):
# Dropout helps prevent overfitting
output_layer = tf.nn.dropout(output_layer, keep_prob=0.9)
logits = tf.matmul(output_layer, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
# Convert labels into one-hot encoding
one_hot_labels = tf.one_hot(labels, depth=num_labels, dtype=tf.float32)
predicted_labels = tf.squeeze(tf.argmax(log_probs, axis=-1, output_type=tf.int32))
# If we're predicting, we want predicted labels and the probabiltiies.
if is_predicting:
return (predicted_labels, log_probs)
# If we're train/eval, compute loss between predicted and actual label
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, predicted_labels, log_probs)
# Next we'll wrap our model function in a `model_fn_builder` function that adapts our model to work for training, evaluation, and prediction.
# In[ ]:
# model_fn_builder actually creates our model function
# using the passed parameters for num_labels, learning_rate, etc.
def model_fn_builder(num_labels, learning_rate, num_train_steps,
num_warmup_steps):
"""Returns `model_fn` closure for TPUEstimator."""
def model_fn(features, labels, mode, params): # pylint: disable=unused-argument
"""The `model_fn` for TPUEstimator."""
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
label_ids = features["label_ids"]
is_predicting = (mode == tf.estimator.ModeKeys.PREDICT)
# TRAIN and EVAL
if not is_predicting:
(loss, predicted_labels, log_probs) = create_model(
is_predicting, input_ids, input_mask, segment_ids, label_ids, num_labels)
train_op = bert.optimization.create_optimizer(
loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu=False)
# Calculate evaluation metrics.
def metric_fn(label_ids, predicted_labels):
accuracy = tf.metrics.accuracy(label_ids, predicted_labels)
f1_score = tf.contrib.metrics.f1_score(
label_ids,
predicted_labels)
auc = tf.metrics.auc(
label_ids,
predicted_labels)
recall = tf.metrics.recall(
label_ids,
predicted_labels)
precision = tf.metrics.precision(
label_ids,
predicted_labels)
true_pos = tf.metrics.true_positives(
label_ids,
predicted_labels)
true_neg = tf.metrics.true_negatives(
label_ids,
predicted_labels)
false_pos = tf.metrics.false_positives(
label_ids,
predicted_labels)
false_neg = tf.metrics.false_negatives(
label_ids,
predicted_labels)
return {
"eval_accuracy": accuracy,
"f1_score": f1_score,
"auc": auc,
"precision": precision,
"recall": recall,
"true_positives": true_pos,
"true_negatives": true_neg,
"false_positives": false_pos,
"false_negatives": false_neg
}
eval_metrics = metric_fn(label_ids, predicted_labels)
if mode == tf.estimator.ModeKeys.TRAIN:
return tf.estimator.EstimatorSpec(mode=mode,
loss=loss,
train_op=train_op)
else:
return tf.estimator.EstimatorSpec(mode=mode,
loss=loss,
eval_metric_ops=eval_metrics)
else:
(predicted_labels, log_probs) = create_model(
is_predicting, input_ids, input_mask, segment_ids, label_ids, num_labels)
predictions = {
'probabilities': log_probs,
'labels': predicted_labels
}
return tf.estimator.EstimatorSpec(mode, predictions=predictions)
# Return the actual model function in the closure
return model_fn
# In[ ]:
# Compute train and warmup steps from batch size
# These hyperparameters are copied from this colab notebook (https://colab.sandbox.google.com/github/tensorflow/tpu/blob/master/tools/colab/bert_finetuning_with_cloud_tpus.ipynb)
BATCH_SIZE = 32
LEARNING_RATE = 2e-5
NUM_TRAIN_EPOCHS = 3.0
# Warmup is a period of time where hte learning rate
# is small and gradually increases--usually helps training.
WARMUP_PROPORTION = 0.1
# Model configs
SAVE_CHECKPOINTS_STEPS = 500
SAVE_SUMMARY_STEPS = 100
# In[ ]:
# Compute # train and warmup steps from batch size
num_train_steps = int(len(train_features) / BATCH_SIZE * NUM_TRAIN_EPOCHS)
num_warmup_steps = int(num_train_steps * WARMUP_PROPORTION)
# In[ ]:
# Specify outpit directory and number of checkpoint steps to save
run_config = tf.estimator.RunConfig(
model_dir=OUTPUT_DIR,
save_summary_steps=SAVE_SUMMARY_STEPS,
save_checkpoints_steps=SAVE_CHECKPOINTS_STEPS)
# In[55]:
model_fn = model_fn_builder(
num_labels=len(label_list),
learning_rate=LEARNING_RATE,
num_train_steps=num_train_steps,
num_warmup_steps=num_warmup_steps)
estimator = tf.estimator.Estimator(
model_fn=model_fn,
config=run_config,
params={"batch_size": BATCH_SIZE})
# Next we create an input builder function that takes our training feature set (`train_features`) and produces a generator. This is a pretty standard design pattern for working with Tensorflow [Estimators](https://www.tensorflow.org/guide/estimators).
# In[ ]:
# Create an input function for training. drop_remainder = True for using TPUs.
train_input_fn = bert.run_classifier.input_fn_builder(
features=train_features,
seq_length=MAX_SEQ_LENGTH,
is_training=True,
drop_remainder=False)
# Now we train our model! For me, using a Colab notebook running on Google's GPUs, my training time was about 14 minutes.
# In[57]:
print(f'Beginning Training!')
current_time = datetime.now()
estimator.train(input_fn=train_input_fn, max_steps=num_train_steps)
print("Training took time ", datetime.now() - current_time)
# Now let's use our test data to see how well our model did:
# In[ ]:
test_input_fn = run_classifier.input_fn_builder(
features=test_features,
seq_length=MAX_SEQ_LENGTH,
is_training=False,
drop_remainder=False)
# In[59]:
estimator.evaluate(input_fn=test_input_fn, steps=None)
# Now let's write code to make predictions on new sentences:
# In[ ]:
def getPrediction(in_sentences):
labels = ["Negative", "Positive"]
input_examples = [run_classifier.InputExample(guid="", text_a = x, text_b = None, label = 0) for x in in_sentences] # here, "" is just a dummy label
input_features = run_classifier.convert_examples_to_features(input_examples, label_list, MAX_SEQ_LENGTH, tokenizer)
predict_input_fn = run_classifier.input_fn_builder(features=input_features, seq_length=MAX_SEQ_LENGTH, is_training=False, drop_remainder=False)
predictions = estimator.predict(predict_input_fn)
return [(sentence, prediction['probabilities'], labels[prediction['labels']]) for sentence, prediction in zip(in_sentences, predictions)]
# In[ ]:
pred_sentences = [
"That movie was absolutely awful",
"The acting was a bit lacking",
"The film was creative and surprising",
"Absolutely fantastic!"
]
# In[72]:
predictions = getPrediction(pred_sentences)
# Voila! We have a sentiment classifier!
# In[73]:
predictions
|
PyTorch/SpeechSynthesis/Tacotron2/trtis_cpp/src/trt/util | util | taco2Utils | /*
* Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the NVIDIA CORPORATION nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef TT2I_TACO2UTILS_H
#define TT2I_TACO2UTILS_H
#include "NvInfer.h"
#include <stdexcept>
#include <string>
#include <vector>
namespace taco2
{
class Taco2Utils
{
public:
/**
* @brief Compute the number of blocks of size `blockSize` to cover the `num`
* input times.
*
* @param num The number of items to cover (must be >= 0).
* @param blockSize The number of items handled by each block (must be > 0).
*
* @return The number of block required to cover all items.
*/
static int roundUpBlocks(int num, int blockSize);
/**
* @brief Create a string representation of a Dims object.
*
* @param dim The object to create a string representation of.
*
* @return The string represenetation.
*/
static std::string dimsToString(const nvinfer1::Dims& dim);
/**
* @brief Get the total volume of a set of Dimensions (number of elements).
*
* @param dims The dimensions.
*
* @return The volume/total number of elements.
*/
static size_t getDimensionsSize(const nvinfer1::Dims& dims);
/**
* @brief Get a Dims object with all 1's removed down to minLength.
* If the length of dims is less than minLength, than just dims will be
* returned. Leading 1's will be insert to reach minLength.
*
* @param dims The dimensions to compact.
*
* @return The compacted dimensions.
*/
static nvinfer1::Dims
getCompactedDims(const nvinfer1::Dims& dims, const int minLength = 1);
/**
* @brief Convert a set of floats on the GPU to a set of halves on the GPU.
*
* @param floats The floats.
* @param halves The halves (output).
* @param num The number of floats (must be an even number).
*/
static void
floatsToHalves(const float* floats, float* halves, const size_t num);
/**
* @brief Convert a set of weights to a vector.
*
* @param weights The weights.
*
* @return The vector.
*/
static std::vector<float> toFloatVector(const nvinfer1::Weights& weights);
/**
* @brief Copy data from the host to the device.
*
* @tparam T The data type.
* @param dst The destination address on the device.
* @param src The source address on the host.
* @param count The number of elements to copy.
*/
template <typename T>
static void
copyHostToDevice(T* const dst, const T* const src, const size_t count)
{
copy(dst, src, count, cudaMemcpyHostToDevice);
}
/**
* @brief Copy data from the host to the device asynchronously.
*
* @tparam T The data type.
* @param dst The destination address on the device.
* @param src The source address on the host.
* @param count The number of elements to copy.
* @param stream The stream to operate on.
*/
template <typename T>
static void copyHostToDeviceAsync(
T* const dst, const T* const src, const size_t count, cudaStream_t stream)
{
copyAsync(dst, src, count, cudaMemcpyHostToDevice, stream);
}
/**
* @brief Copy data from the device to the host.
*
* @tparam T The data type.
* @param dst The destination address on the host.
* @param src The source address on the device.
* @param count The number of elements to copy.
*/
template <typename T>
static void
copyDeviceToHost(T* const dst, const T* const src, const size_t count)
{
copy(dst, src, count, cudaMemcpyDeviceToHost);
}
/**
* @brief Copy data from the device to the host asynchronously.
*
* @tparam T The data type.
* @param dst The destination address on the host.
* @param src The source address on the device.
* @param count The number of elements to copy.
* @param stream The stream to operate on.
*/
template <typename T>
static void copyDeviceToHostAsync(
T* const dst, const T* const src, const size_t count, cudaStream_t stream)
{
copyAsync(dst, src, count, cudaMemcpyDeviceToHost, stream);
}
/**
* @brief Copy data from the device to the device asynchronously.
*
* @tparam T The data type.
* @param dst The destination address on the device.
* @param src The source address on the device.
* @param count The number of elements to copy.
* @param stream The stream to operate on.
*/
template <typename T>
static void copyDeviceToDeviceAsync(
T* const dst, const T* const src, const size_t count, cudaStream_t stream)
{
copyAsync(dst, src, count, cudaMemcpyDeviceToDevice, stream);
}
/**
* @brief Copy data from the device to the device.
*
* @tparam T The data type.
* @param dst The destination address on the device.
* @param src The source address on the device.
* @param count The number of elements to copy.
*/
template <typename T>
static void
copyDeviceToDevice(T* const dst, const T* const src, const size_t count)
{
copy(dst, src, count, cudaMemcpyDeviceToDevice);
}
private:
/**
* @brief Convert cuda errors into exceptions. Will throw an exception
* unless `err == cudaSuccess`.
*
* @param err The error.
* @param msg The message to attach to the exception.
*/
static void check(const cudaError_t err, const std::string& msg = "")
{
if (err != cudaSuccess) {
throw std::runtime_error(
"Encountered error: " + std::to_string(static_cast<int>(err)) + ": "
+ msg);
}
}
/**
* @brief Perform checked asynchronous memcpy.
*
* @tparam T The data type.
* @param dst The destination address.
* @param src The source address.
* @param count The number of elements to copy.
* @param kind The direction of the copy.
* @param stream THe stream to operate on.
*/
template <typename T>
static void copyAsync(
T* const dst,
const T* const src,
const size_t count,
const enum cudaMemcpyKind kind,
cudaStream_t stream)
{
check(
cudaMemcpyAsync(dst, src, sizeof(T) * count, kind, stream),
"CheckedCopy::copyAsync(dst, src, count, kind, stream)");
}
/**
* @brief Perform a synchronous memcpy.
*
* @tparam T The data type.
* @param dst The destination address.
* @param src The source address.
* @param count The number of elements to copy.
* @param kind The direction of the copy.
*/
template <typename T>
static void copy(
T* const dst,
const T* const src,
const size_t count,
const enum cudaMemcpyKind kind)
{
check(
cudaMemcpy(dst, src, sizeof(T) * count, kind),
"CheckedCopy::copy(dst, src, count, kind)");
}
};
} // namespace taco2
#endif
|
TensorFlow/Detection/SSD/models/research/object_detection/samples/configs | configs | faster_rcnn_resnet50_coco | # Faster R-CNN with Resnet-50 (v1), configuration for MSCOCO Dataset.
# Users should configure the fine_tune_checkpoint field in the train config as
# well as the label_map_path and input_path fields in the train_input_reader and
# eval_input_reader. Search for "PATH_TO_BE_CONFIGURED" to find the fields that
# should be configured.
model {
faster_rcnn {
num_classes: 90
image_resizer {
keep_aspect_ratio_resizer {
min_dimension: 600
max_dimension: 1024
}
}
feature_extractor {
type: 'faster_rcnn_resnet50'
first_stage_features_stride: 16
}
first_stage_anchor_generator {
grid_anchor_generator {
scales: [0.25, 0.5, 1.0, 2.0]
aspect_ratios: [0.5, 1.0, 2.0]
height_stride: 16
width_stride: 16
}
}
first_stage_box_predictor_conv_hyperparams {
op: CONV
regularizer {
l2_regularizer {
weight: 0.0
}
}
initializer {
truncated_normal_initializer {
stddev: 0.01
}
}
}
first_stage_nms_score_threshold: 0.0
first_stage_nms_iou_threshold: 0.7
first_stage_max_proposals: 300
first_stage_localization_loss_weight: 2.0
first_stage_objectness_loss_weight: 1.0
initial_crop_size: 14
maxpool_kernel_size: 2
maxpool_stride: 2
second_stage_box_predictor {
mask_rcnn_box_predictor {
use_dropout: false
dropout_keep_probability: 1.0
fc_hyperparams {
op: FC
regularizer {
l2_regularizer {
weight: 0.0
}
}
initializer {
variance_scaling_initializer {
factor: 1.0
uniform: true
mode: FAN_AVG
}
}
}
}
}
second_stage_post_processing {
batch_non_max_suppression {
score_threshold: 0.0
iou_threshold: 0.6
max_detections_per_class: 100
max_total_detections: 300
}
score_converter: SOFTMAX
}
second_stage_localization_loss_weight: 2.0
second_stage_classification_loss_weight: 1.0
}
}
train_config: {
batch_size: 1
optimizer {
momentum_optimizer: {
learning_rate: {
manual_step_learning_rate {
initial_learning_rate: 0.0003
schedule {
step: 900000
learning_rate: .00003
}
schedule {
step: 1200000
learning_rate: .000003
}
}
}
momentum_optimizer_value: 0.9
}
use_moving_average: false
}
gradient_clipping_by_norm: 10.0
fine_tune_checkpoint: "PATH_TO_BE_CONFIGURED/model.ckpt"
from_detection_checkpoint: true
# Note: The below line limits the training process to 200K steps, which we
# empirically found to be sufficient enough to train the pets dataset. This
# effectively bypasses the learning rate schedule (the learning rate will
# never decay). Remove the below line to train indefinitely.
num_steps: 200000
data_augmentation_options {
random_horizontal_flip {
}
}
}
train_input_reader: {
tf_record_input_reader {
input_path: "PATH_TO_BE_CONFIGURED/mscoco_train.record-?????-of-00100"
}
label_map_path: "PATH_TO_BE_CONFIGURED/mscoco_label_map.pbtxt"
}
eval_config: {
num_examples: 8000
# Note: The below line limits the evaluation process to 10 evaluations.
# Remove the below line to evaluate indefinitely.
max_evals: 10
}
eval_input_reader: {
tf_record_input_reader {
input_path: "PATH_TO_BE_CONFIGURED/mscoco_val.record-?????-of-00010"
}
label_map_path: "PATH_TO_BE_CONFIGURED/mscoco_label_map.pbtxt"
shuffle: false
num_readers: 1
}
|
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/data/datasets | datasets | __init__ | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from .coco import COCODataset
from .voc import PascalVOCDataset
from .concat_dataset import ConcatDataset
__all__ = ["COCODataset", "ConcatDataset", "PascalVOCDataset"]
|
Tools/PyTorch/TimeSeriesPredictionPlatform/callbacks | callbacks | callbacks | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Callback(object):
"""
Base class for building new callbacks.
"""
def __init__(self):
pass
class CallbackContainer(object):
"""
Base class for callbacks storage.
"""
def __init__(self):
pass
|
TensorFlow2/Segmentation/nnUNet/runtime | runtime | checkpoint | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from time import time
import tensorflow as tf
from models.nn_unet import NNUnet
from runtime.utils import rank_zero_only
class CheckpointManager:
def __init__(self, ckpt_dir, strategy, variables, step_counter=None, resume_training=False):
self.dir = Path(ckpt_dir)
self.strategy = strategy
self.vars = variables
self.ckpt = tf.train.Checkpoint(**variables)
self.creation_time = time()
self.latest_save_time = time()
if "last" in strategy:
self.last_manager = tf.train.CheckpointManager(
self.ckpt, self.dir, max_to_keep=1, checkpoint_name="ckpt-last", step_counter=step_counter
)
if resume_training:
self.ckpt.restore(self.last_manager.latest_checkpoint)
if "best" in strategy:
self.best_manager = tf.train.CheckpointManager(
self.ckpt, self.dir / "best", max_to_keep=1, checkpoint_name="ckpt-best", step_counter=step_counter
)
self.best_metric = None
@rank_zero_only
def update(self, metric_value=None):
if "last" in self.strategy:
self.last_manager.save()
if (
metric_value is not None
and "best" in self.strategy
and (self.best_metric is None or self.best_metric < metric_value)
):
self.latest_save_time = time()
if self.best_metric is not None:
print(
f"({int(self.latest_save_time - self.creation_time)}s)",
f"New best metric value achieved ({float(metric_value):.4f} > {float(self.best_metric):.4f}).",
)
print("Saving new checkpoint.")
self.best_metric = metric_value
self.best_manager.save()
def load_best(self):
self.ckpt.restore(self.best_manager.latest_checkpoint)
return self.best_metric, int(self.latest_save_time - self.creation_time)
def load_model(args):
if args.saved_model_dir is not None:
print(f"Loading SavedModel from {str(args.saved_model_dir)}")
model = tf.saved_model.load(str(args.saved_model_dir))
model = NNUnet(args, loaded_model=model)
else:
if not (Path(args.ckpt_dir).is_dir() and (Path(args.ckpt_dir) / "checkpoint").exists()):
raise ValueError(f"Could not find checkpoint directory {args.ckpt_dir}")
model = NNUnet(args)
checkpoint = tf.train.Checkpoint(model=model)
checkpoint.restore(tf.train.latest_checkpoint(args.ckpt_dir)).expect_partial()
return model
|
PyTorch/Segmentation/MaskRCNN/pytorch/maskrcnn_benchmark/solver | solver | lr_scheduler | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
from bisect import bisect_right
import torch
# FIXME ideally this would be achieved with a CombinedLRScheduler,
# separating MultiStepLR with WarmupLR
# but the current LRScheduler design doesn't allow it
class WarmupMultiStepLR(torch.optim.lr_scheduler._LRScheduler):
def __init__(
self,
optimizer,
milestones,
gamma=0.1,
warmup_factor=1.0 / 3,
warmup_iters=500,
warmup_method="linear",
last_epoch=-1,
):
if not list(milestones) == sorted(milestones):
raise ValueError(
"Milestones should be a list of" " increasing integers. Got {}",
milestones,
)
#adding mlperf consistent warmpup routine
if warmup_method not in ("constant", "linear", "mlperf_linear"):
raise ValueError(
"Only 'constant' or 'linear' warmup_method accepted"
"got {}".format(warmup_method)
)
self.milestones = milestones
self.gamma = gamma
self.warmup_factor = warmup_factor
self.warmup_iters = warmup_iters
self.warmup_method = warmup_method
super(WarmupMultiStepLR, self).__init__(optimizer, last_epoch)
def get_lr(self):
warmup_factor = 1
if self.last_epoch < self.warmup_iters:
if self.warmup_method == "constant":
warmup_factor = self.warmup_factor
elif self.warmup_method == "linear":
alpha = self.last_epoch / self.warmup_iters
warmup_factor = self.warmup_factor * (1 - alpha) + alpha
#define mlperf warmup routine
elif self.warmup_method == "mlperf_linear":
# alpha = self.last_epoch / self.warmup_iters
# warmup_factor = self.warmup_factor * (1 - alpha) + alpha
delta = (self.warmup_iters - self.last_epoch) * self.warmup_factor
return [
(base_lr - delta)
* self.gamma ** bisect_right(self.milestones, self.last_epoch)
for base_lr in self.base_lrs
]
return [
base_lr
* warmup_factor
* self.gamma ** bisect_right(self.milestones, self.last_epoch)
for base_lr in self.base_lrs
]
|
TensorFlow/Segmentation/UNet_3D_Medical/model | model | layers | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" High level definition of layers for model construction """
import tensorflow as tf
def _normalization(inputs, name, mode):
""" Choose a normalization layer
:param inputs: Input node from the graph
:param name: Name of layer
:param mode: Estimator's execution mode
:return: Normalized output
"""
training = mode == tf.estimator.ModeKeys.TRAIN
if name == 'instancenorm':
gamma_initializer = tf.constant_initializer(1.0)
return tf.contrib.layers.instance_norm(
inputs,
center=True,
scale=True,
epsilon=1e-6,
param_initializers={'gamma': gamma_initializer},
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
data_format='NHWC',
scope=None)
if name == 'groupnorm':
return tf.contrib.layers.group_norm(inputs=inputs,
groups=16,
channels_axis=-1,
reduction_axes=(-4, -3, -2),
activation_fn=None,
trainable=True)
if name == 'batchnorm':
return tf.keras.layers.BatchNormalization(axis=-1,
trainable=True,
virtual_batch_size=None)(inputs, training=training)
if name == 'none':
return inputs
raise ValueError('Invalid normalization layer')
def _activation(out, activation):
""" Choose an activation layer
:param out: Input node from the graph
:param activation: Name of layer
:return: Activation output
"""
if activation == 'relu':
return tf.nn.relu(out)
if activation == 'leaky_relu':
return tf.nn.leaky_relu(out, alpha=0.01)
if activation == 'sigmoid':
return tf.nn.sigmoid(out)
if activation == 'softmax':
return tf.nn.softmax(out, axis=-1)
if activation == 'none':
return out
raise ValueError("Unknown activation {}".format(activation))
def convolution(inputs, # pylint: disable=R0913
out_channels,
kernel_size=3,
stride=1,
mode=tf.estimator.ModeKeys.TRAIN,
normalization='batchnorm',
activation='leaky_relu',
transpose=False):
""" Create a convolution layer
:param inputs: Input node from graph
:param out_channels: Output number of channels
:param kernel_size: Size of the kernel
:param stride: Stride of the kernel
:param mode: Estimator's execution mode
:param normalization: Name of the normalization layer
:param activation: Name of the activation layer
:param transpose: Select between regular and transposed convolution
:return: Convolution output
"""
if transpose:
conv = tf.keras.layers.Conv3DTranspose
else:
conv = tf.keras.layers.Conv3D
regularizer = None # tf.keras.regularizers.l2(1e-5)
use_bias = normalization == "none"
inputs = conv(filters=out_channels,
kernel_size=kernel_size,
strides=stride,
activation=None,
padding='same',
data_format='channels_last',
kernel_initializer=tf.compat.v1.glorot_uniform_initializer(),
kernel_regularizer=regularizer,
bias_initializer=tf.zeros_initializer(),
bias_regularizer=regularizer,
use_bias=use_bias)(inputs)
inputs = _normalization(inputs, normalization, mode)
return _activation(inputs, activation)
def upsample_block(inputs, skip_connection, out_channels, normalization, mode):
""" Create a block for upsampling
:param inputs: Input node from the graph
:param skip_connection: Choose whether or not to use skip connection
:param out_channels: Number of output channels
:param normalization: Name of the normalizaiton layer
:param mode: Estimator's execution mode
:return: Output from the upsample block
"""
inputs = convolution(inputs, kernel_size=2, out_channels=out_channels, stride=2,
normalization='none', activation='none', transpose=True)
inputs = tf.keras.layers.Concatenate(axis=-1)([inputs, skip_connection])
inputs = convolution(inputs, out_channels=out_channels, normalization=normalization, mode=mode)
inputs = convolution(inputs, out_channels=out_channels, normalization=normalization, mode=mode)
return inputs
def input_block(inputs, out_channels, normalization, mode):
""" Create the input block
:param inputs: Input node from the graph
:param out_channels: Number of output channels
:param normalization: Name of the normalization layer
:param mode: Estimator's execution mode
:return: Output from the input block
"""
inputs = convolution(inputs, out_channels=out_channels, normalization=normalization, mode=mode)
inputs = convolution(inputs, out_channels=out_channels, normalization=normalization, mode=mode)
return inputs
def downsample_block(inputs, out_channels, normalization, mode):
""" Create a downsample block
:param inputs: Input node from the graph
:param out_channels: Number of output channels
:param normalization: Name of the normalization layer
:param mode: Estimator's execution mode
:return: Output from the downsample block
"""
inputs = convolution(inputs, out_channels=out_channels, normalization=normalization, mode=mode, stride=2)
return convolution(inputs, out_channels=out_channels, normalization=normalization, mode=mode)
def output_layer(inputs, out_channels, activation):
""" Create the output layer
:param inputs: Input node from the graph
:param out_channels: Number of output channels
:param activation: Name of the activation layer
:return: Output from the output block
"""
return convolution(inputs, out_channels=out_channels, kernel_size=3, normalization='none', activation=activation)
|
PyTorch/Classification/ConvNets/se-resnext101-32x4d/training/AMP | AMP | DGXA100_se-resnext101-32x4d_AMP_90E | python ./multiproc.py --nproc_per_node 8 ./launch.py --model se-resnext101-32x4d --precision AMP --mode convergence --platform DGXA100 /imagenet --epochs 90 --mixup 0.0 --workspace ${1:-./} --raport-file raport.json
|
PyTorch/LanguageModeling/BERT/triton/deployment_toolkit/model_analyzer | model_analyzer | model_analyzer | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import subprocess
from subprocess import CalledProcessError
from .exceptions import ModelAnalyzerException
SERVER_OUTPUT_TIMEOUT_SECS = 5
LOGGER = logging.getLogger(__name__)
class ModelAnalyzerMode:
PROFILE = "profile"
ANALYZE = "analyze"
REPORT = "report"
class ModelAnalyzerReportMode:
OFFLINE = "offline"
ONLINE = "online"
class ModelAnalyzer:
"""
Concrete Implementation of Model Analyzer interface that runs
analyzer locally as as subprocess.
"""
_analyzer_path = "model-analyzer"
def __init__(self, config):
"""
Parameters
----------
config : AnalyzerConfig
the config object containing arguments for this server instance
"""
self._analyzer_process = None
self._analyzer_config = config
self._log = None
def run(self, mode: str, verbose: bool = False, quiet: bool = False, report_mode: str = None):
"""
Starts the model analyzer locally
"""
if self._analyzer_path:
cmd = [self._analyzer_path]
if verbose:
cmd += ["--verbose"]
if quiet:
cmd += ["--quiet"]
if report_mode:
cmd += ["-m"]
cmd += [report_mode]
cmd += [mode]
cmd += self._analyzer_config.to_cli_string().split()
LOGGER.debug(f"Model Analyze command: {cmd}")
try:
subprocess.run(cmd, check=True, start_new_session=True)
except CalledProcessError as e:
raise ModelAnalyzerException(
f"Running {self._analyzer_path} with {e.cmd} failed with"
f" exit status {e.returncode} : {e.output}"
)
|
PyTorch/Classification/ConvNets/efficientnet/inference/AMP | AMP | DGXA100_efficientnet-b0_AMP |
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 1 --workspace ${1:-./} --raport-file raport_1.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 2 --workspace ${1:-./} --raport-file raport_2.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 4 --workspace ${1:-./} --raport-file raport_4.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 8 --workspace ${1:-./} --raport-file raport_8.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 16 --workspace ${1:-./} --raport-file raport_16.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 32 --workspace ${1:-./} --raport-file raport_32.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 64 --workspace ${1:-./} --raport-file raport_64.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 128 --workspace ${1:-./} --raport-file raport_128.json
python ./multiproc.py --nproc_per_node 8 ./launch.py --model efficientnet-b0 --precision AMP --mode benchmark_inference --platform DGXA100 /imagenet -b 256 --workspace ${1:-./} --raport-file raport_256.json
|
Tools/PyTorch/TimeSeriesPredictionPlatform/conf/inference | inference | triton | # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_target_: inference.inference_triton.run_inference_triton
config:
checkpoint: ???
batch_size: 64
evaluator:
_target_: evaluators.triton_evaluator.TritonEvaluator |
TensorFlow/LanguageModeling/BERT/scripts | scripts | finetune_train_benchmark | #!/bin/bash
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bert_model=${1:-"large"}
use_xla=${2:-"true"}
num_gpu=${3:-"8"}
task=${4:-"squad"}
if [ "$bert_model" = "large" ] ; then
export BERT_DIR=data/download/nvidia_pretrained/bert_tf_pretraining_large_lamb
else
export BERT_DIR=data/download/nvidia_pretrained/bert_tf_squad11_base_128
fi
echo "BERT directory set as " $BERT_DIR
init_checkpoint="$BERT_DIR/bert_model.ckpt"
learning_rate=5e-6
#Edit to save logs & checkpoints in a different directory
RESULTS_DIR=/results
if [ ! -d "$RESULTS_DIR" ] ; then
echo "Error! $RESULTS_DIR directory missing."
exit -1
fi
echo "Results directory set as " $RESULTS_DIR
if [ "$use_xla" = "true" ] ; then
use_xla_tag="--use_xla"
else
use_xla_tag="--nouse_xla"
fi
if [ $num_gpu -gt 1 ] ; then
mpi_command="mpirun -np $num_gpu -H localhost:$num_gpu \
--allow-run-as-root -bind-to none -map-by slot \
-x NCCL_DEBUG=INFO \
-x LD_LIBRARY_PATH \
-x PATH -mca pml ob1 -mca btl ^openib"
use_hvd="--horovod"
else
mpi_command=""
use_hvd=""
fi
LOGFILE="${RESULTS_DIR}/${task}_training_benchmark_bert_${bert_model}_gpu_${num_gpu}.log"
if [ "$task" = "squad" ] ; then
export SQUAD_DIR=data/download/squad/v1.1
epochs="2.0"
echo "Squad directory set as " $SQUAD_DIR
echo "Training performance benchmarking for BERT $bert_model from $BERT_DIR" >> $LOGFILE
echo "Precision Sequence Length Batch size Performance(sent/sec)" >> $LOGFILE
for seq_len in 128 384; do
if [ "$seq_len" = "128" ] ; then
doc_stride=64
else
doc_stride=128
fi
for batch_size in 1 2 4; do
for use_fp16 in "--amp" "--noamp"; do
res_dir=${RESULTS_DIR}/bert_${bert_model}_gpu_${num_gpu}_sl_${seq_len}_prec_${use_fp16}_bs_${batch_size}
mkdir -p $res_dir
tmp_file="${res_dir}/${task}_training_benchmark.log"
$mpi_command python run_squad.py \
--vocab_file=$BERT_DIR/vocab.txt \
--bert_config_file=$BERT_DIR/bert_config.json \
--init_checkpoint=$init_checkpoint \
--do_train=True \
--train_file=$SQUAD_DIR/train-v1.1.json \
--train_batch_size=$batch_size \
--learning_rate=$learning_rate \
--num_train_epochs=$epochs \
--max_seq_length=$seq_len \
--doc_stride=$doc_stride \
--output_dir=$res_dir \
"$use_hvd" \
"$use_fp16" \
$use_xla_tag |& tee $tmp_file
perf=`cat $tmp_file | grep -F 'Throughput Average (sentences/sec) =' | head -1 | awk -F'= ' '{print $2}' | awk -F' sen' '{print $1}'`
echo "$use_fp16 $seq_len $batch_size $perf" >> $LOGFILE
done
done
done
else
echo "Benchmarking for " $task "currently not supported. Sorry!"
fi |
PyTorch/Translation/Transformer/fairseq/optim | optim | sgd | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import torch.optim
from . import FairseqOptimizer, register_optimizer
@register_optimizer('sgd')
class SGD(FairseqOptimizer):
def __init__(self, args, params):
super().__init__(args, params)
self._optimizer = torch.optim.SGD(params, **self.optimizer_config)
@property
def optimizer_config(self):
"""
Return a kwarg dictionary that will be used to override optimizer
args stored in checkpoints. This allows us to load a checkpoint and
resume training using a different set of optimizer args, e.g., with a
different learning rate.
"""
return {
'lr': self.args.lr[0],
'momentum': self.args.momentum,
'weight_decay': self.args.weight_decay,
}
|
TensorFlow2/Recommendation/SIM | SIM | setup | # Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages, setup
setup(name="sim",
package_dir={'sim': 'sim'},
version="1.0.0",
description="Reimplementation of Search-based User Interest Modeling",
packages=find_packages()
)
|
PyTorch/SpeechSynthesis/Tacotron2/filelists | filelists | ljs_audio_text_train_subset_1250_filelist | LJSpeech-1.1/wavs/LJ040-0100.wav|she would sometimes take Lee with her, apparently leaving him alone in the car while she transacted her business.
LJSpeech-1.1/wavs/LJ011-0248.wav|Howard, strange to say, making no attempt to detain him; probably because Mullay promised to return a few days later, and to bring more money.
LJSpeech-1.1/wavs/LJ016-0442.wav|made a determined effort to burn himself to death by throwing himself bodily on to the fire in the condemned ward.
LJSpeech-1.1/wavs/LJ026-0036.wav|and then a balance must be struck and the doubtful form placed in the kingdom with which it has, on the whole, most points in common.
LJSpeech-1.1/wavs/LJ042-0176.wav|One offers oppression, the other poverty. Both offer imperialistic injustice, tinted with two brands of slavery, end quote.
LJSpeech-1.1/wavs/LJ003-0323.wav|Drunkenness, if it ever occurred, should be visited with severe punishment;
LJSpeech-1.1/wavs/LJ045-0161.wav|He was upset over the fact that I would not answer him.
LJSpeech-1.1/wavs/LJ028-0187.wav|Cyrus decided that Babylon must be taken.
LJSpeech-1.1/wavs/LJ037-0178.wav|or one used Remington-Peters cartridge case, which may have been in the revolver before the shooting,
LJSpeech-1.1/wavs/LJ010-0164.wav|Oxford, who was only nineteen at the time his offense was committed, had been born at Birmingham,
LJSpeech-1.1/wavs/LJ019-0178.wav|and abandoned because of the expense. As to the entire reconstruction of Newgate, nothing had been done as yet.
LJSpeech-1.1/wavs/LJ050-0117.wav|particularly those arising from organized groups, within their special jurisdiction.
LJSpeech-1.1/wavs/LJ033-0128.wav|that the bag Oswald carried contained the assassination weapon and has concluded that Frazier and Randle are mistaken as to the length of the bag.
LJSpeech-1.1/wavs/LJ007-0179.wav|defeats the ends of justice, and disgraces the profession of a Christian country.
LJSpeech-1.1/wavs/LJ033-0067.wav|She pointed to the blanket which was on the floor very close to where Ruth Paine was standing.
LJSpeech-1.1/wavs/LJ004-0139.wav|"In the morning the stench and heat were so oppressive that he and every one else on waking rushed unclothed into the yard;"
LJSpeech-1.1/wavs/LJ009-0208.wav|erected on the cart, about four feet high at the head, and gradually sloping towards the horse, giving a full view of the body,
LJSpeech-1.1/wavs/LJ012-0144.wav|and passed it on to Solomons by his daughter, a widow named Abrahams.
LJSpeech-1.1/wavs/LJ001-0020.wav|the "lower-case" being in fact invented in the early Middle Ages.
LJSpeech-1.1/wavs/LJ014-0227.wav|One of these was Mobbs, who lived in the Minories,
LJSpeech-1.1/wavs/LJ040-0146.wav|He noted that Lee liked to give the impression that he did not care for other people but preferred to keep to himself,
LJSpeech-1.1/wavs/LJ001-0149.wav|From the time when books first took their present shape till the end of the sixteenth century, or indeed later,
LJSpeech-1.1/wavs/LJ002-0143.wav|The commissioners who presided were, quote, little otherwise than self-elected
LJSpeech-1.1/wavs/LJ014-0217.wav|Dwyer managed to overpower his assailant, and got to his feet; but Cannon butted at him with his head, and again threw him to the ground,
LJSpeech-1.1/wavs/LJ005-0250.wav|The prisoners were crowded together in the jail, contrary to the requirements of the four George the fourth
LJSpeech-1.1/wavs/LJ042-0049.wav|I never believed I would find more material advantages at this stage of development in the Soviet Union than I might of had in the U.S.
LJSpeech-1.1/wavs/LJ014-0198.wav|Marley at his trial was undefended, and the sheriffs offered him counsel; but he declined. The witnesses against him all spoke the truth, he said;
LJSpeech-1.1/wavs/LJ034-0093.wav|Brennan also testified that Lee Harvey Oswald,
LJSpeech-1.1/wavs/LJ016-0237.wav|With Calcraft's method there were undoubtedly many failures, and it was a common custom for him to go below the gallows
LJSpeech-1.1/wavs/LJ015-0156.wav|Down at Weybridge, where he had a country place, his name was long remembered with gratitude by the poor.
LJSpeech-1.1/wavs/LJ018-0047.wav|He adhered to this almost to the very last. His case had been warmly espoused by the Society for the Protection of Germans in this country,
LJSpeech-1.1/wavs/LJ013-0020.wav|he acted in a manner which excited the suspicions of the crew.
LJSpeech-1.1/wavs/LJ002-0041.wav|Two other wards were appropriated to the master's side debtors; they were each twenty-three feet by fourteen and a half,
LJSpeech-1.1/wavs/LJ008-0227.wav|slipshod and slovenly, in crushed bonnet and dirty shawl, the gown fastened by a single hook,
LJSpeech-1.1/wavs/LJ007-0029.wav|The condition of the capitally-convicted prisoners after sentence was still very disgraceful. The side they occupied, still known as the press-yard,
LJSpeech-1.1/wavs/LJ018-0358.wav|Christina Edmunds had resort to strychnia, the same lethal drug that Palmer used;
LJSpeech-1.1/wavs/LJ007-0198.wav|The windows were to be glazed and painted to prevent prisoners from looking out;
LJSpeech-1.1/wavs/LJ043-0032.wav|After about a two-week separation, Marina Oswald returned to her husband.
LJSpeech-1.1/wavs/LJ035-0071.wav|At a given signal, they reenacted the event. Baker's movements were timed with a stopwatch.
LJSpeech-1.1/wavs/LJ009-0092.wav|his legs give way, he utters a faint groan, and sinks on the floor.
LJSpeech-1.1/wavs/LJ019-0310.wav|which had long been admitted as indispensable, and had never as yet been properly obtained.
LJSpeech-1.1/wavs/LJ038-0071.wav|When he entered the homicide and robbery bureau office, he saw two detectives standing there with Sgt. Gerald L. Hill,
LJSpeech-1.1/wavs/LJ014-0291.wav|he showed symptoms of delirium tremens, and admitted that he had been addicted to the excessive use of stimulants.
LJSpeech-1.1/wavs/LJ014-0283.wav|The jury found him guilty of the latter only, with a point of law reserved. This was fully argued before three judges,
LJSpeech-1.1/wavs/LJ021-0096.wav|under the able and energetic leadership of General Johnson.
LJSpeech-1.1/wavs/LJ045-0075.wav|She was, quote, sorry that I had not married him (the Russian boyfriend) instead, that it would have been much easier for me, end quote.
LJSpeech-1.1/wavs/LJ022-0203.wav|For that we can be thankful to the God who watches over America.
LJSpeech-1.1/wavs/LJ029-0073.wav|that the President would arrive and depart from Dallas' Love Field; that a motorcade through the downtown area of Dallas to the luncheon site should be arranged;
LJSpeech-1.1/wavs/LJ040-0187.wav|According to Sokolow, this indicated a, quote, present intellectual functioning in the upper range of bright normal intelligence, end quote.
LJSpeech-1.1/wavs/LJ016-0101.wav|One of the three, shamming ill, remained all day in his ward, where he employed himself unraveling the rope from the sleeping-mats.
LJSpeech-1.1/wavs/LJ015-0086.wav|He kept open house at Kilburn Priory;
LJSpeech-1.1/wavs/LJ028-0427.wav|The enormous amount of debris which buried the palaces and temples and walls of Nebuchadnezzar's city, in places to the depth of a hundred feet,
LJSpeech-1.1/wavs/LJ048-0248.wav|President Kennedy was scheduled to speak across the street from his hotel in Fort Worth at eight:thirty a.m.
LJSpeech-1.1/wavs/LJ021-0095.wav|We are now prepared to move into this second phase, on the basis of our experience in the first phase
LJSpeech-1.1/wavs/LJ030-0081.wav|They were instructed to watch particularly for thrown objects, sudden actions in the crowd, and any movements toward the Presidential car.
LJSpeech-1.1/wavs/LJ032-0176.wav|Moreover, the bus transfer which he obtained as he left the bus was still in the pocket when he was arrested.
LJSpeech-1.1/wavs/LJ044-0129.wav|and often it is advisable for some people to remain in the background, not underground, end quote.
LJSpeech-1.1/wavs/LJ018-0177.wav|But as there was no independent corroboration of the informer's evidence, according to the custom of the British law,
LJSpeech-1.1/wavs/LJ049-0113.wav|This point was ably made in the nineteen oh two debate by Senator George F. Hoar, the sponsor of the Senate bill, quote,
LJSpeech-1.1/wavs/LJ050-0141.wav|As a beginning step to improve liaison with local law enforcement officials, the Secret Service on August twenty-six, nineteen sixty-four,
LJSpeech-1.1/wavs/LJ013-0156.wav|a scion of the ducal house of Bedford, by his confidential valet and personal attendant.
LJSpeech-1.1/wavs/LJ032-0222.wav|Moreover, Shaneyfelt testified that in his opinion the photographs were not composites of two different photographs
LJSpeech-1.1/wavs/LJ004-0052.wav|which Howard had eulogized some forty years before.
LJSpeech-1.1/wavs/LJ006-0017.wav|with those who made the selection of the first inspectors, and the two gentlemen appointed were probably the most fitted in England to be so employed.
LJSpeech-1.1/wavs/LJ049-0046.wav|Even so, analysis of the motion picture films taken by amateur photographer Zapruder
LJSpeech-1.1/wavs/LJ017-0124.wav|He frequently declared before and during the trial that it would be impossible to find him guilty.
LJSpeech-1.1/wavs/LJ048-0150.wav|while the Secret Service representatives in Dallas
LJSpeech-1.1/wavs/LJ017-0082.wav|He fixed upon a sporting friend, Mr. John Parsons Cook, who had been in luck at Shrewsbury races, both as a winner and a backer,
LJSpeech-1.1/wavs/LJ041-0095.wav|Oswald read a good deal, said Powers, but, quote, he would never be reading any of the shoot-em-up westerns or anything like that.
LJSpeech-1.1/wavs/LJ002-0089.wav|eight. The female felons were deprived of part of the space which the architect had intended for them.
LJSpeech-1.1/wavs/LJ050-0264.wav|The Commission recommends that the present arrangements
LJSpeech-1.1/wavs/LJ039-0177.wav|was greater than from the second to the third shot and required a movement in the basic firing position of the marksmen.
LJSpeech-1.1/wavs/LJ047-0016.wav|The FBI opened a file on Oswald in October nineteen fifty-nine, when news reports appeared of his defection to the Soviet Union.
LJSpeech-1.1/wavs/LJ028-0036.wav|But in those very early days Babylon was little more than a shrine, surrounded with mud huts and date palms.
LJSpeech-1.1/wavs/LJ013-0173.wav|The researches of the police soon laid bare other suspicious facts.
LJSpeech-1.1/wavs/LJ014-0138.wav|Mrs. Manning became still more violent, shouting, "No, no, I will not stand it! You ought to be ashamed of yourselves!"
LJSpeech-1.1/wavs/LJ028-0165.wav|There is, however, a second inner wall, of less thickness than the first, but very little inferior to it in strength.
LJSpeech-1.1/wavs/LJ006-0048.wav|To these were still added an average of about fifty expecting the last penalty of the law; a certain number of transports awaiting removal to the colonies;
LJSpeech-1.1/wavs/LJ032-0133.wav|Lieutenant Day of the Dallas Police Department had "lifted" a palmprint from the underside of the gun barrel
LJSpeech-1.1/wavs/LJ038-0093.wav|Frequently, however, he was confronted with evidence which he could not explain, and he resorted to statements which are known to be lies.
LJSpeech-1.1/wavs/LJ018-0228.wav|Five or six years later, William Roupell minutely described how he had effected the fraud.
LJSpeech-1.1/wavs/LJ046-0084.wav|for the President soon after the assassination, quote,
LJSpeech-1.1/wavs/LJ033-0109.wav|the Commission has carefully considered the testimony of these two witnesses with regard to the length of the bag.
LJSpeech-1.1/wavs/LJ013-0158.wav|One morning in May his lordship was found dead in his bed with his throat cut.
LJSpeech-1.1/wavs/LJ036-0111.wav|Whaley's memory of the lineup is inaccurate. There were four men altogether, not six men, in the lineup with Oswald.
LJSpeech-1.1/wavs/LJ044-0082.wav|His attempt to express himself through his Fair Play for Cuba activities, however,
LJSpeech-1.1/wavs/LJ036-0208.wav|white male, approximately thirty, slender build, height five foot ten inches, weight one hundred sixty-five pounds, end quote.
LJSpeech-1.1/wavs/LJ038-0255.wav|Firearms identification.
LJSpeech-1.1/wavs/LJ031-0111.wav|The elliptical wound in the Governor's back, located slightly to the left of the Governor's right armpit approximately five-eighths inch (a centimeter and a half)
LJSpeech-1.1/wavs/LJ006-0246.wav|On another occasion a young man, who was being violently teased, seized a knife and stabbed his tormentor in the back.
LJSpeech-1.1/wavs/LJ027-0167.wav|Then the gills gradually dry up, as the lungs develop, and they now breathe wholly by lungs, but still retain the tail.
LJSpeech-1.1/wavs/LJ033-0187.wav|However, the complete identity of characteristics between the paper and tape in the bag found on the sixth floor
LJSpeech-1.1/wavs/LJ009-0284.wav|It was stated in evidence before the Commission on Capital Punishment in eighteen sixty-four,
LJSpeech-1.1/wavs/LJ009-0249.wav|When Charles White was executed in eighteen twenty-three for arson, he arranged a handkerchief
LJSpeech-1.1/wavs/LJ015-0149.wav|peas at ten shillings a quart, five-guinea pines, and early asparagus were to be found on his table.
LJSpeech-1.1/wavs/LJ019-0330.wav|Dietaries were drawn up for adoption on the recommendation of a committee of experts.
LJSpeech-1.1/wavs/LJ012-0118.wav|It was a large gold brooch set in pearls, but a portion of the mounting had melted with the heat.
LJSpeech-1.1/wavs/LJ008-0071.wav|In the few years which elapsed between the establishment of the gallows at Newgate
LJSpeech-1.1/wavs/LJ015-0253.wav|he handed over to Pierce a sum of three thousand pounds, his own, whether rightly or wrongly acquired never came out,
LJSpeech-1.1/wavs/LJ045-0102.wav|things apparently went quite smoothly from the time Oswald returned from Mexico until the weekend of November sixteen to seventeen, nineteen sixty-three.
LJSpeech-1.1/wavs/LJ009-0256.wav|Still he resisted.
LJSpeech-1.1/wavs/LJ050-0055.wav|that the PRS files can no longer be limited largely to persons communicating actual threats to the President.
LJSpeech-1.1/wavs/LJ034-0037.wav|Someone sitting on the box facing the window would have his palm in this position if he placed his hand alongside his right hip.
LJSpeech-1.1/wavs/LJ020-0081.wav|and knead for ten minutes, carefully at first, lest the liquids should be wasted, and more boldly when they are absorbed by the paste.
LJSpeech-1.1/wavs/LJ009-0077.wav|The ordinary of Newgate is an orthodox, unaffected, Church of England divine,
LJSpeech-1.1/wavs/LJ008-0107.wav|in his canonicals, and with his head as stiffly erect as a sheriff's coachman.
LJSpeech-1.1/wavs/LJ043-0013.wav|Part of the problem resulted from the fact that, as Jeanne De Mohrenschildt testified,
LJSpeech-1.1/wavs/LJ037-0225.wav|five foot eight inches, black hair, slender, wearing a white jacket, white shirt and dark slacks, end quote,
LJSpeech-1.1/wavs/LJ012-0294.wav|without hesitation brought in a verdict of willful murder.
LJSpeech-1.1/wavs/LJ042-0192.wav|are preferred rather than loud and useless manifestations of protest, end quote, Oswald went on to note, quote,
LJSpeech-1.1/wavs/LJ016-0078.wav|but had to come down again covered with soot and filth just as the officers entered the ward.
LJSpeech-1.1/wavs/LJ028-0174.wav|Other ancient descriptions of the walls have been left us by Ctesias of the fifth century B.C., and by Strabo of the beginning of the Christian era,
LJSpeech-1.1/wavs/LJ019-0002.wav|The time at length approached when a radical and complete change was to come over the old city jail.
LJSpeech-1.1/wavs/LJ032-0271.wav|(two) Oswald's palmprint was on the rifle in a position which shows that he had handled it while it was disassembled,
LJSpeech-1.1/wavs/LJ018-0325.wav|But extra precautions and close supervision have so far proved effectual, and the prisoners are still in custody after a lapse of ten years.
LJSpeech-1.1/wavs/LJ048-0259.wav|However, Chief Rowley did not condone the action of the off-duty agents, particularly since it violated a regulation of the Secret Service,
LJSpeech-1.1/wavs/LJ009-0099.wav|Meanwhile the clergyman, still bent into the form of a sleeping dog,
LJSpeech-1.1/wavs/LJ034-0180.wav|The man was dressed in a light-colored, open-neck shirt which could have been either a sports shirt or a T-shirt,
LJSpeech-1.1/wavs/LJ024-0057.wav|Why then should we leave the fulfillment of this public policy to chance
LJSpeech-1.1/wavs/LJ018-0260.wav|Mr. Justice Byles, in passing sentence, commented severely upon the commission of such crimes by a man in Roupell's position in life,
LJSpeech-1.1/wavs/LJ007-0095.wav|Prisoners indeed were known to boast that they had saved their necks by feigning insanity.
LJSpeech-1.1/wavs/LJ005-0117.wav|Numbers of the jails were still unprovided with chaplains, and the prisoners never heard Divine service.
LJSpeech-1.1/wavs/LJ006-0168.wav|to taking the descriptions of newly-arrived prisoners.
LJSpeech-1.1/wavs/LJ011-0117.wav|devoted its efforts first to a mitigation of the forgery statute, but could not immediately accomplish much.
LJSpeech-1.1/wavs/LJ007-0223.wav|The prison officials appear to be on the side of the inspectors, to the great dissatisfaction of the corporation, who claimed the full allegiance and support of its servants.
LJSpeech-1.1/wavs/LJ009-0176.wav|Seven other crimes, however, were still capital by law, and so continued till the passing of the Criminal Consolidation Acts of eighteen sixty-one.
LJSpeech-1.1/wavs/LJ034-0119.wav|Approximately seven or eight minutes later
LJSpeech-1.1/wavs/LJ014-0226.wav|Only a few have vied with Cannon in fiendish cruelty and brutality.
LJSpeech-1.1/wavs/LJ045-0074.wav|In the letter Marina Oswald stated that her husband had changed a great deal and that she was very lonely in the United States.
LJSpeech-1.1/wavs/LJ012-0044.wav|When his trade was busiest he set up a second establishment, at the head of which, although he was married,
LJSpeech-1.1/wavs/LJ027-0012.wav|All have the same ultimate substance
LJSpeech-1.1/wavs/LJ028-0254.wav|The people, enjoying the greater freedom which Cyrus permitted them, were contented, and life in Babylon went on about as before.
LJSpeech-1.1/wavs/LJ002-0326.wav|The poor debtors were not supplied with beds. Those who could pay the price might hire them from each other,
LJSpeech-1.1/wavs/LJ014-0259.wav|Watts led two lives.
LJSpeech-1.1/wavs/LJ035-0067.wav|from the sixth floor by the time Baker and Truly arrived, Commission counsel asked Baker and Truly to repeat their movements from the time of the shot
LJSpeech-1.1/wavs/LJ010-0146.wav|Attacks upon the sovereign, as I have said, became more common after the accession of the young Queen Victoria in eighteen thirty-eight.
LJSpeech-1.1/wavs/LJ007-0084.wav|The inspectors in the following year, on examining the facts, found that some of these poor creatures had been in confinement for long periods:
LJSpeech-1.1/wavs/LJ049-0204.wav|While in accordance with its mandate
LJSpeech-1.1/wavs/LJ011-0035.wav|Every endeavor was used, however, to obtain a commutation of sentence. His case was twice argued before the judges on points of law,
LJSpeech-1.1/wavs/LJ021-0001.wav|The Fireside Chats of Franklin Delano Roosevelt, by Franklin D Roosevelt, Section six.
LJSpeech-1.1/wavs/LJ008-0148.wav|One night he was missing
LJSpeech-1.1/wavs/LJ011-0237.wav|The jewelers were always a favorite prey of the London thieves.
LJSpeech-1.1/wavs/LJ017-0272.wav|"Ah!" he remarked, "they will have to wait for us then till eight."
LJSpeech-1.1/wavs/LJ049-0067.wav|the radio net in use in motorcades is elaborate and permits a number of different means of communication with various local points.
LJSpeech-1.1/wavs/LJ032-0171.wav|and that this was the same shirt which Oswald wore on the morning of the assassination.
LJSpeech-1.1/wavs/LJ048-0132.wav|which would bring to bear the judgment and experience of members of the White House detail other than the advance agent.
LJSpeech-1.1/wavs/LJ006-0025.wav|France had sent Misseurs Beaumont and De Tocqueville, who subsequently published several interesting works on the subject.
LJSpeech-1.1/wavs/LJ043-0176.wav|If the attack had succeeded and Oswald had been caught, the pictures showing him with his rifle
LJSpeech-1.1/wavs/LJ044-0191.wav|Now there appeared to be no chance to get to Cuba, where he had thought he might find his communist ideal. The U.S. Government would not permit travel there
LJSpeech-1.1/wavs/LJ038-0011.wav|A police car made a U-turn, and as the sirens grew fainter,
LJSpeech-1.1/wavs/LJ002-0244.wav|but its business was much reduced by the extension of the Courts of Conscience.
LJSpeech-1.1/wavs/LJ031-0209.wav|X-rays and photographs were taken preliminarily and the pathological examination began at about eight p.m.
LJSpeech-1.1/wavs/LJ042-0032.wav|and of his initial commitment to that country can best be understood, however, in the context
LJSpeech-1.1/wavs/LJ009-0132.wav|Although this misapplication of religious services still went on,
LJSpeech-1.1/wavs/LJ034-0048.wav|The freshness of prints developed in this manner cannot be estimated,
LJSpeech-1.1/wavs/LJ043-0023.wav|and helped to move the personal effects of Marina Oswald and the baby.
LJSpeech-1.1/wavs/LJ015-0216.wav|This was an important step, and they might easily be robbed some day when Burgess was the guard, provided only that they could be opened.
LJSpeech-1.1/wavs/LJ006-0180.wav|the interior of the jail was more like a bear-garden or the noisy purlieus of a public-house than a prison.
LJSpeech-1.1/wavs/LJ016-0342.wav|The first private execution under the new law took place within the precincts of Maidstone Jail.
LJSpeech-1.1/wavs/LJ025-0170.wav|for it is only the green parts of the plant which, under the influence of sunlight, have the marvelous power of decomposing carbonic acid,
LJSpeech-1.1/wavs/LJ047-0076.wav|In New Orleans. In the middle of May of nineteen sixty-three, Agent Hosty checked Oswald's last known residence and found that he had moved.
LJSpeech-1.1/wavs/LJ005-0011.wav|were first made use of about eighteen twenty-seven. That the need for prison reform was imperative may be gathered from the few out of many instances I have adduced,
LJSpeech-1.1/wavs/LJ033-0142.wav|because the cartons stacked around the southeast corner would shield him.
LJSpeech-1.1/wavs/LJ018-0005.wav|the public mind was greatly agitated by the affair for several months. The story of the murder must be pretty familiar to most of my readers.
LJSpeech-1.1/wavs/LJ049-0183.wav|regarding such threats and that its Protective Research Section is not adequately staffed or equipped
LJSpeech-1.1/wavs/LJ036-0031.wav|and requested a transfer which she might use if she got through the traffic.
LJSpeech-1.1/wavs/LJ011-0285.wav|The door of his place of durance stood open, and Mr. Gee began to consider whether he might not escape.
LJSpeech-1.1/wavs/LJ041-0114.wav|three months prior to his regularly scheduled separation date, ostensibly to care for his mother who had been injured in an accident at her work.
LJSpeech-1.1/wavs/LJ012-0134.wav|Presently the proper person arrived from the consignees, but found the gold-dust gone.
LJSpeech-1.1/wavs/LJ011-0005.wav|A lady in the country, who had thirteen thousand pounds in the stocks, desired her London agent to sell them out.
LJSpeech-1.1/wavs/LJ028-0087.wav|Such was the appearance of the builder of the walls of Babylon.
LJSpeech-1.1/wavs/LJ016-0329.wav|a bill was introduced by Mr. Hibbert, M.P., and accepted by the Government, providing for the future carrying out of executions within prisons.
LJSpeech-1.1/wavs/LJ034-0017.wav|could look southwesterly down Elm Street over the top of the "Rolling Readers" cartons.
LJSpeech-1.1/wavs/LJ044-0086.wav|executive director of the Information Council of the Americas, who also appeared on the program.
LJSpeech-1.1/wavs/LJ038-0100.wav|On November twenty-three, Fritz confronted Oswald with the evidence that he had purchased a rifle under the fictitious name of "Hidell."
LJSpeech-1.1/wavs/LJ049-0019.wav|The last Presidential vehicle with any protection against small-arms fire left the White House in nineteen fifty-three.
LJSpeech-1.1/wavs/LJ021-0125.wav|it was natural that the workers should seek and obtain a statutory declaration of their constitutional right
LJSpeech-1.1/wavs/LJ019-0294.wav|The prison buildings were in many places out of repair; other houses often overlooked them.
LJSpeech-1.1/wavs/LJ009-0211.wav|and on the right the ripping chisel, with which the murders had been committed, were exposed to view.
LJSpeech-1.1/wavs/LJ044-0172.wav|and left for Irving with Marina Oswald and June and most of the Oswalds' effects three days later.
LJSpeech-1.1/wavs/LJ047-0129.wav|FBI informants in the New Orleans area, familiar with pro-Castro or Communist Party activity there,
LJSpeech-1.1/wavs/LJ024-0139.wav|has been tipped out of balance by the courts in direct contradiction of the high purposes of the framers of the Constitution.
LJSpeech-1.1/wavs/LJ005-0106.wav|Jails, of which the old prison at Reading was a specimen, were still left intact.
LJSpeech-1.1/wavs/LJ042-0247.wav|In August of nineteen sixty-three, he gave the New Orleans police as a reason for refusing to permit his family to learn English,
LJSpeech-1.1/wavs/LJ047-0092.wav|On August nine, nineteen sixty-three,
LJSpeech-1.1/wavs/LJ026-0166.wav|back to starch usable as food and the comparison of the green plant and the animal would be complete.
LJSpeech-1.1/wavs/LJ033-0019.wav|According to the testimony of Frazier, Marina Oswald, and Ruth Paine, it appears that Oswald never returned to Irving in midweek
LJSpeech-1.1/wavs/LJ042-0172.wav|must have as its nucleus the traditional ideological best of both systems, and yet be utterly opposed to both systems.
LJSpeech-1.1/wavs/LJ027-0018.wav|All are forced to make concession after concession to their surroundings, and in these concessions all progress in life consists.
LJSpeech-1.1/wavs/LJ041-0187.wav|and he wanted to be on the winning side so that ten thousand years from-now people would look in the history books and say, "Well, this man was ahead of his time."
LJSpeech-1.1/wavs/LJ048-0286.wav|Nor is this goal served when agents remain out until early morning hours, and lose the opportunity to get a reasonable amount of sleep.
LJSpeech-1.1/wavs/LJ018-0037.wav|In searching the prisoner's box, Mr. Briggs' watch was found wrapped up in a piece of leather,
LJSpeech-1.1/wavs/LJ009-0044.wav|His features have no felonious cast;
LJSpeech-1.1/wavs/LJ045-0100.wav|She thought that he might not have become involved in the assassination if people had been kinder to him.
LJSpeech-1.1/wavs/LJ035-0149.wav|She ran inside and up the front stairs into the large open office reserved for clerical employees.
LJSpeech-1.1/wavs/LJ028-0188.wav|In five thirty-eight the city fell, and for a time it became the home of the Persian King.
LJSpeech-1.1/wavs/LJ003-0320.wav|which recommended restrictions upon the number of visitors admitted.
LJSpeech-1.1/wavs/LJ013-0241.wav|The policeman insisted on searching the premises, at which Good displayed some uneasiness.
LJSpeech-1.1/wavs/LJ018-0194.wav|Cummings was repeatedly "run in" for the offense of coining and uttering bad money, whether coin or notes.
LJSpeech-1.1/wavs/LJ046-0135.wav|PRS received items in eight thousand, seven hundred nine cases.
LJSpeech-1.1/wavs/LJ046-0143.wav|These instructions to PRS personnel appear to be the only instance where an effort was made to reduce the criteria to writing.
LJSpeech-1.1/wavs/LJ048-0103.wav|and with the concurrence of the Dallas police, was entirely appropriate, in view of the known desires of the President.
LJSpeech-1.1/wavs/LJ038-0279.wav|I think is going overboard in the other direction.
LJSpeech-1.1/wavs/LJ044-0117.wav|that there were people who understood his activity, end quote.
LJSpeech-1.1/wavs/LJ028-0485.wav|The outer and inner defenses of Babylon were so strong and so high that no enemy could hope to take them,
LJSpeech-1.1/wavs/LJ031-0174.wav|After the President was pronounced dead,
LJSpeech-1.1/wavs/LJ026-0020.wav|If chlorophyll is present, the carbon dioxide of the air serves as a source of carbon,
LJSpeech-1.1/wavs/LJ027-0136.wav|Illustrations quoted from the works of Romanes and Le Conte will make this principle clear.
LJSpeech-1.1/wavs/LJ002-0113.wav|in an age when insolvent acts and bankruptcy courts do so much to relieve the impecunious,
LJSpeech-1.1/wavs/LJ004-0113.wav|It was further ordered that male prisoners should be kept perfectly distinct from the females.
LJSpeech-1.1/wavs/LJ044-0115.wav|he felt that this was a great man that he had received the letter from, end quote.
LJSpeech-1.1/wavs/LJ039-0012.wav|The Commission first learned of this incident when Robert Oswald related it to FBI agents on February nineteen, nineteen sixty-four,
LJSpeech-1.1/wavs/LJ014-0164.wav|as the wickedness and levity of the immense crowd collected at the execution this morning could be imagined by no man,
LJSpeech-1.1/wavs/LJ050-0018.wav|and to keep the Secretary fully informed regarding all significant developments relating to Presidential protection.
LJSpeech-1.1/wavs/LJ012-0131.wav|The letter informed him of the marks and sizes of the cases containing the precious metal,
LJSpeech-1.1/wavs/LJ016-0308.wav|yet the witnesses were not unanimous.
LJSpeech-1.1/wavs/LJ028-0332.wav|Once more, however, he waited till the interval appointed had gone by, and then leading the troops to the place where the four thousand were,
LJSpeech-1.1/wavs/LJ006-0251.wav|but the presence and authority of the governor himself became indispensable.
LJSpeech-1.1/wavs/LJ006-0016.wav|These considerations no doubt had weight
LJSpeech-1.1/wavs/LJ031-0093.wav|Answer: No, sir. Before -- well, in trying to treat an acutely injured patient, you have to establish an airway, adequate ventilation
LJSpeech-1.1/wavs/LJ042-0163.wav|After, however, two years and a lot of growing up, I decided to return to the USA.
LJSpeech-1.1/wavs/LJ031-0220.wav|During the autopsy examination, Federal agents brought the surgeons three pieces of bone recovered from Elm Street and the Presidential automobile.
LJSpeech-1.1/wavs/LJ030-0050.wav|The Presidential limousine.
LJSpeech-1.1/wavs/LJ012-0010.wav|both having been recognized by the clergyman who had performed the ceremony, and the assault had been committed to secure the money
LJSpeech-1.1/wavs/LJ004-0213.wav|Compared with those highly meritorious institutions Newgate still showed but badly.
LJSpeech-1.1/wavs/LJ010-0061.wav|That some thirty or more needy men should hope to revolutionize England is a sufficient proof of the absurdity of their attempt.
LJSpeech-1.1/wavs/LJ022-0195.wav|But it is more than the recovery of the material basis of our individual lives.
LJSpeech-1.1/wavs/LJ039-0102.wav|After familiarization with live ammunition in the twenty-two rifle and the twenty-two pistol,
LJSpeech-1.1/wavs/LJ020-0073.wav|Sift the flour, salt and sugar into a bowl,
LJSpeech-1.1/wavs/LJ040-0038.wav|Such ideas of grandeur were apparently accompanied by notions of oppression.
LJSpeech-1.1/wavs/LJ019-0049.wav|the principles of which were debated by disputants of widely opposite opinions with an earnestness that sometimes bordered upon acrimony.
LJSpeech-1.1/wavs/LJ050-0012.wav|through an Assistant Secretary whose duties also include the direct supervision of the Bureau of the Mint
LJSpeech-1.1/wavs/LJ007-0117.wav|where the upper ward was exclusively appropriated to their use. They also had their meals sent in, and, with the food, wine almost ad libitum.
LJSpeech-1.1/wavs/LJ004-0169.wav|On the dirty bedstead lay a wretched being in the throes of severe illness.
LJSpeech-1.1/wavs/LJ019-0127.wav|or the still more costly process of walling in the whole farm, would have greatly added to the charges of these establishments.
LJSpeech-1.1/wavs/LJ014-0141.wav|and stretching out her hand, she gathered up a quantity of the rue which, following ancient custom dating from the days of the jail fever,
LJSpeech-1.1/wavs/LJ037-0041.wav|The man appeared to step back as the policeman, quote, calmly opened the car door, end quote, and very slowly got out and walked toward the front of the car.
LJSpeech-1.1/wavs/LJ012-0023.wav|He was taken up when still in his teens for stealing a pocketbook, and was sentenced to transportation, but did not get beyond the hulks at Chatham.
LJSpeech-1.1/wavs/LJ032-0115.wav|A few minutes after the rifle was discovered on the sixth floor of the Depository Building
LJSpeech-1.1/wavs/LJ047-0007.wav|It had interviewed him twice shortly after his return to the United States, again a year later at his request
LJSpeech-1.1/wavs/LJ006-0049.wav|an occasional prisoner or two committed by the Houses of Parliament, the Courts of King's Bench, Common Pleas,
LJSpeech-1.1/wavs/LJ028-0065.wav|Eleven years later, in five eighty-six, he destroyed the sacred Hebrew city,
LJSpeech-1.1/wavs/LJ049-0076.wav|The Commission's review of the provisions for Presidential protection at the time of President Kennedy's trip to Dallas demonstrates the need for substantial improvements.
LJSpeech-1.1/wavs/LJ003-0091.wav|Constantly associated with these convicted felons were numbers of juveniles, infants of tender years.
LJSpeech-1.1/wavs/LJ050-0030.wav|The Commission also recommends
LJSpeech-1.1/wavs/LJ013-0122.wav|Stealing plate was about this period the crime of a more aristocratic thief.
LJSpeech-1.1/wavs/LJ046-0013.wav|Prompted by these dismaying statistics, the Commission has inquired into the problems and methods of Presidential protection in effect
LJSpeech-1.1/wavs/LJ035-0134.wav|that they were watching the parade from the top step of the building entrance when Gloria Calverly, who works in the Depository Building,
LJSpeech-1.1/wavs/LJ016-0232.wav|and he owned a pet pony which would follow him about like a dog.
LJSpeech-1.1/wavs/LJ020-0023.wav|If too stiff, warm water, a spoonful at a time until you can handle the paste easily. The danger is in getting it too stiff. Now.
LJSpeech-1.1/wavs/LJ005-0046.wav|The good it tried to do took active shape in the establishment of temporary refuges -- at Hoxton for males, and in the Hackney Road for females
LJSpeech-1.1/wavs/LJ010-0019.wav|As time passed,
LJSpeech-1.1/wavs/LJ049-0130.wav|The Secret Service must rely in large part
LJSpeech-1.1/wavs/LJ024-0023.wav|ever since a similar proposal passed the House of Representatives in eighteen sixty-nine.
LJSpeech-1.1/wavs/LJ018-0315.wav|to whom it was said one hundred pounds apiece had been given down as the price of their infidelity.
LJSpeech-1.1/wavs/LJ029-0037.wav|Advance preparations for President Kennedy's visit to Dallas were primarily the responsibility of two Secret Service agents:
LJSpeech-1.1/wavs/LJ049-0218.wav|between the Secret Service and the President and his family is contemplated.
LJSpeech-1.1/wavs/LJ003-0155.wav|Tailoring and shoemaking was permitted, but it was deemed unsafe to allow a carpenter or blacksmith to have his tools.
LJSpeech-1.1/wavs/LJ013-0113.wav|Robberies as daring in conception as they were boldly executed were common enough.
LJSpeech-1.1/wavs/LJ045-0047.wav|and I told him that
LJSpeech-1.1/wavs/LJ006-0065.wav|were associated together, "of every variety of age, habit, and delinquency, without employment, oversight, or control."
LJSpeech-1.1/wavs/LJ003-0316.wav|It should be peremptorily forbidden to the keeper or any officer to make a pecuniary profit out of the supplies of food, fuel, or other necessaries.
LJSpeech-1.1/wavs/LJ021-0004.wav|Tonight I continue that report, though, because of the shortness of time, I must defer a number of subjects to a later date.
LJSpeech-1.1/wavs/LJ031-0022.wav|Charles R. Baxter, Robert N. McClelland, Ronald C. Jones; the chief neurologist, Dr. William Kemp Clark;
LJSpeech-1.1/wavs/LJ007-0030.wav|consisted of two dozen rooms and fifteen cells. In these various chambers, until just before the inspectors made their report,
LJSpeech-1.1/wavs/LJ021-0137.wav|Step by step we have created all the government agencies necessary to insure, as a general rule, industrial peace,
LJSpeech-1.1/wavs/LJ033-0081.wav|she looked out the breakfast-room window and saw Oswald cross the street and walk toward the driveway where her brother parked his car near the carport.
LJSpeech-1.1/wavs/LJ003-0218.wav|The chapel was filled with a curious but callous congregation, who came to stare at the miserable people thus publicly exposed.
LJSpeech-1.1/wavs/LJ028-0317.wav|Introduced into their assembly, he began to bewail his misfortunes, telling them that
LJSpeech-1.1/wavs/LJ047-0014.wav|the Office of Naval Intelligence, the FBI and the CIA. The information known to the FBI is summarized below.
LJSpeech-1.1/wavs/LJ002-0067.wav|but really kept for the few who had funds sufficient to gain them admission to these more comfortable quarters.
LJSpeech-1.1/wavs/LJ003-0101.wav|must have had a tendency to turn them into the world hardened and accomplished in the ways of vice and crime. End quote.
LJSpeech-1.1/wavs/LJ036-0048.wav|She boarded the Marsalis bus at St. Paul and Elm Streets to return home. She testified further, quote,
LJSpeech-1.1/wavs/LJ022-0129.wav|in making this the most efficient and the cleanest example of public enterprise the world has ever seen.
LJSpeech-1.1/wavs/LJ038-0121.wav|or to answer any questions concerning the card.
LJSpeech-1.1/wavs/LJ031-0095.wav|Before this was accomplished the President's cardiac activity had ceased and closed cardiac massage was instituted, which made it impossible to inspect his back.
LJSpeech-1.1/wavs/LJ007-0131.wav|Enough has probably been extracted from this most damnatory report to give a complete picture of the disgraceful state in which Newgate still remained in eighteen thirty-five.
LJSpeech-1.1/wavs/LJ001-0067.wav|In the Low Countries and Cologne, which were very fertile of printed books, Gothic was the favorite.
LJSpeech-1.1/wavs/LJ011-0061.wav|Let this monster give his name; I am ready to fight him. I am still determined to put myself in the place of Mr. Fauntleroy.
LJSpeech-1.1/wavs/LJ019-0381.wav|in another there was half-heartedness, even apathy and an almost complete contempt for the provisions of the act.
LJSpeech-1.1/wavs/LJ012-0170.wav|According to his statement, when sentenced to death, he had been driven to horse-stealing by the execration which had pursued him after the murder.
LJSpeech-1.1/wavs/LJ005-0090.wav|the first by daily services, the latter by the appointment of schoolmasters and instruction in reading and writing.
LJSpeech-1.1/wavs/LJ049-0127.wav|agencies other than the Secret Service have become involved in phases of the overall problem of protecting our national leaders.
LJSpeech-1.1/wavs/LJ004-0100.wav|An infirmary, consisting of two distinct rooms, one for males and one for females, should be provided for the separate accommodation of the sick.
LJSpeech-1.1/wavs/LJ003-0148.wav|and spent in providing coals, candles, plates, knives, and forks; while all the occupants of this part of the prison
LJSpeech-1.1/wavs/LJ005-0073.wav|To its efforts, and their effect upon Parliament and the public mind, we must attribute the new Jail Acts of four George the fourth
LJSpeech-1.1/wavs/LJ003-0166.wav|association at one time forbidden by custom, but which greed and rapacity long made the rule.
LJSpeech-1.1/wavs/LJ028-0076.wav|However, several decades ago, an Oriental appeared at the Berlin Museum,
LJSpeech-1.1/wavs/LJ012-0253.wav|A further discovery was made in an osier bed near Cold Harbor Lane, Camberwell,
LJSpeech-1.1/wavs/LJ024-0053.wav|Fundamentally, if in the future, America cannot trust the Congress it elects to refrain from abuse of our Constitutional usages
LJSpeech-1.1/wavs/LJ032-0069.wav|The person having access to the box then takes the notice to the window and is given the package.
LJSpeech-1.1/wavs/LJ037-0082.wav|On the evening of November twenty-two,
LJSpeech-1.1/wavs/LJ040-0085.wav|John Pic, however, did not think her position was worse than that of many other people.
LJSpeech-1.1/wavs/LJ028-0099.wav|the first-born son of Nabopolassar, King of Babylon, am I.
LJSpeech-1.1/wavs/LJ004-0170.wav|The only ventilation of this pit, this "dark, cheerless, damp, unwholesome cavern -- a dungeon in its worst sense"
LJSpeech-1.1/wavs/LJ022-0110.wav|The key men for the major responsibilities of this great task already have been selected.
LJSpeech-1.1/wavs/LJ024-0116.wav|When the time comes for action,
LJSpeech-1.1/wavs/LJ040-0161.wav|Dr. Hartogs recommended that Oswald be placed on probation on condition that he seek help and guidance through a child guidance clinic.
LJSpeech-1.1/wavs/LJ032-0266.wav|Paul M. Stombaugh, of the FBI Laboratory,
LJSpeech-1.1/wavs/LJ006-0086.wav|his place is assigned among the most depraved, the most experienced, and the most incorrigible offenders in the middle yard.
LJSpeech-1.1/wavs/LJ038-0228.wav|and into downtown Dallas through the Triple Underpass.
LJSpeech-1.1/wavs/LJ028-0319.wav|"And now," he went on to say, "my coming to you, Babylonians,
LJSpeech-1.1/wavs/LJ023-0054.wav|I hope that you have re-read the Constitution of the United States in these past few weeks.
LJSpeech-1.1/wavs/LJ028-0108.wav|Fortunately in several of his long inscriptions, recently discovered in the Babylonian mounds, Nebuchadnezzar speaks of the building of the walls.
LJSpeech-1.1/wavs/LJ042-0134.wav|The psychological effects of that change must have been highly unsettling. It should be remembered
LJSpeech-1.1/wavs/LJ032-0083.wav|Experts on questioned documents from the Treasury Department and the FBI testified that the Hidell cards were counterfeit photographic reproductions
LJSpeech-1.1/wavs/LJ036-0216.wav|Tippit got out and started to walk around the front of the car
LJSpeech-1.1/wavs/LJ002-0281.wav|The demands for fees were excessive in Giltspur Street.
LJSpeech-1.1/wavs/LJ034-0169.wav|the same corner where Brennan was sitting on a concrete wall.
LJSpeech-1.1/wavs/LJ009-0004.wav|would be astonished to observe the peculiar tenderness, I was going to add respect,
LJSpeech-1.1/wavs/LJ004-0094.wav|This act set forth that "whereas the malignant fever commonly called the jail distemper
LJSpeech-1.1/wavs/LJ034-0122.wav|As will be discussed fully below, the Commission has concluded that this suspect was Lee Harvey Oswald.
LJSpeech-1.1/wavs/LJ033-0179.wav|since the original bag had been discolored during various laboratory examinations and could not be used for valid identification by witnesses.
LJSpeech-1.1/wavs/LJ022-0094.wav|in whose field the project falls, and also to notify another agency which I am creating -- a Progress Division.
LJSpeech-1.1/wavs/LJ003-0334.wav|It made this too the excuse for begging the most important issue of the whole question.
LJSpeech-1.1/wavs/LJ004-0034.wav|Moreover, the laws applied more particularly to county jurisdictions.
LJSpeech-1.1/wavs/LJ048-0254.wav|advised, in the course of the Secret Service investigation of these events, that each agent reported for duty on time,
LJSpeech-1.1/wavs/LJ025-0038.wav|carbon, hydrogen and oxygen.
LJSpeech-1.1/wavs/LJ036-0217.wav|As Tippit reached the left front wheel the man pulled out a revolver and fired several shots.
LJSpeech-1.1/wavs/LJ043-0100.wav|It is not possible to tell whether Oswald did this to provide an excuse for his eventual discharge,
LJSpeech-1.1/wavs/LJ005-0222.wav|and as the window-frames would not shut tight, the prisoners complained much of the cold, especially at night.
LJSpeech-1.1/wavs/LJ032-0040.wav|were written the words "A. Hidell, P.O. Box two nine one five Dallas, Texas."
LJSpeech-1.1/wavs/LJ015-0011.wav|Maltby, who had bolted, was pursued and arrested, to end his life miserably by committing suicide in a Newgate cell.
LJSpeech-1.1/wavs/LJ032-0153.wav|A palmprint could not be placed on this portion of the rifle, when assembled, because the wooden foregrip covers the barrel at this point.
LJSpeech-1.1/wavs/LJ029-0092.wav|On November eight, when Lawson was briefed on the itinerary for the trip to Dallas,
LJSpeech-1.1/wavs/LJ004-0132.wav|the old evils of indiscriminate association still continued unchecked.
LJSpeech-1.1/wavs/LJ039-0067.wav|which is ordinarily required when a marksman must raise his rifle as a target moves farther away.
LJSpeech-1.1/wavs/LJ044-0235.wav|If there was no conspiracy which would help him escape, the possibility of which has been considered in chapter six,
LJSpeech-1.1/wavs/LJ028-0144.wav|fifty royal cubits in width, and two hundred in height.
LJSpeech-1.1/wavs/LJ029-0102.wav|After the selection of the Trade Mart as the luncheon site,
LJSpeech-1.1/wavs/LJ009-0116.wav|On the following day the capital convicts, whose companions have been hanged, are required to return thanks for their narrow escape.
LJSpeech-1.1/wavs/LJ040-0228.wav|Despite his withdrawal, he gives the impression that he is not so difficult to reach as he appears and patient, prolonged effort
LJSpeech-1.1/wavs/LJ022-0020.wav|cause of clearer thinking and a better understanding, are considering the whole rather than a mere part relating to one section or to one crop,
LJSpeech-1.1/wavs/LJ047-0104.wav|reluctant and actually as far as I was concerned, was completely evasive on them. End quote.
LJSpeech-1.1/wavs/LJ031-0127.wav|a protective circle of Secret Service agents surrounded Vice President and Mrs. Johnson
LJSpeech-1.1/wavs/LJ043-0021.wav|While the exact sequence of events is not clear because of conflicting testimony,
LJSpeech-1.1/wavs/LJ007-0005.wav|The inspectors paid tribute to the excellence of the motives of these philanthropic ladies, and recognized the good they did.
LJSpeech-1.1/wavs/LJ018-0307.wav|Through Noyes the rest of the conspirators were eventually apprehended. Very little if any of the ill-gotten proceeds, however, was ever recovered.
LJSpeech-1.1/wavs/LJ029-0191.wav|He asserted that Dallas had shed its reputation of the twenties as the, quote, Southwest hate capital of Dixie, end quote
LJSpeech-1.1/wavs/LJ009-0088.wav|ignominy, sorrow, sufferings, wretchedness, pangs,
LJSpeech-1.1/wavs/LJ021-0192.wav|We are not frightened by reactionary lawyers or political editors.
LJSpeech-1.1/wavs/LJ038-0179.wav|(three) firearm identification of the bullet found in Walker's home, and (four)
LJSpeech-1.1/wavs/LJ028-0518.wav|It is not strange, then, that they were included among the Seven Wonders of the World,
LJSpeech-1.1/wavs/LJ026-0026.wav|As in the liquefaction of gases, there is a "critical point" at which the substance under experiment is neither gaseous nor liquid.
LJSpeech-1.1/wavs/LJ031-0090.wav|A thorough inspection would have involved washing and cleansing the back, and this is not practical in treating an acutely injured patient.
LJSpeech-1.1/wavs/LJ016-0110.wav|The third, Bell, remained longest at large. He too was run into at a lodging in the Kingsland Road.
LJSpeech-1.1/wavs/LJ032-0019.wav|Shortly after the Mannlicher-Carcano rifle was found on the sixth floor of the Texas School Book Depository Building, agents of the FBI
LJSpeech-1.1/wavs/LJ044-0146.wav|On June twenty-four, nineteen sixty-three, he applied for a new passport
LJSpeech-1.1/wavs/LJ048-0003.wav|Hosty's interpretation of the prevailing FBI instructions on referrals to the Secret Service was defended before the Commission by his superiors.
LJSpeech-1.1/wavs/LJ013-0194.wav|but on the second day the discovery of fresh evidence, more particularly the recovery of some of Lord William's stolen plate,
LJSpeech-1.1/wavs/LJ038-0224.wav|Another statement which limits the time when it could have been written is the reference, quote, you and the baby, end quote,
LJSpeech-1.1/wavs/LJ014-0147.wav|shaking her clenched and manacled hands in the officers' faces.
LJSpeech-1.1/wavs/LJ019-0168.wav|Renewed recommendations to provide employment resulted in the provision of a certain amount of oakum for picking,
LJSpeech-1.1/wavs/LJ029-0175.wav|Both Dallas papers cited White House sources on September twenty-six as confirming the President's intention to visit Texas on November twenty-one and twenty-two,
LJSpeech-1.1/wavs/LJ033-0078.wav|Neither she nor Mrs. Paine saw him leave the house. About half-a-block away from the Paine house was the residence of Mrs. Linnie Mae Randle,
LJSpeech-1.1/wavs/LJ040-0235.wav|When Lee became a disciplinary problem upon his return to school in the fall of nineteen fifty-three,
LJSpeech-1.1/wavs/LJ003-0322.wav|except for the use of the debtors, or as medical comforts for the infirmary.
LJSpeech-1.1/wavs/LJ018-0359.wav|her object being first to dispose of the wife of a man for whom she had conceived a guilty passion,
LJSpeech-1.1/wavs/LJ030-0128.wav|From Main Street the motorcade turned right and went north on Houston Street, passing tall buildings on the right,
LJSpeech-1.1/wavs/LJ033-0204.wav|So if I found all of these then I would have been able to say these fibers probably had come from this blanket. But since I found so few,
LJSpeech-1.1/wavs/LJ013-0042.wav|the foundations of which had been laid by buying old ships on purpose to cast them away.
LJSpeech-1.1/wavs/LJ041-0174.wav|and had not intended any criticism of Oswald's political views which is the way in which, Thornley thought, Oswald took his remarks.
LJSpeech-1.1/wavs/LJ030-0245.wav|I was pushed down by Agent Youngblood.
LJSpeech-1.1/wavs/LJ031-0103.wav|While Dr. Carrico went on to attend the President, Dr. Dulany stayed with the Governor and was soon joined by several other doctors.
LJSpeech-1.1/wavs/LJ048-0152.wav|At some overpasses all persons were excluded
LJSpeech-1.1/wavs/LJ018-0232.wav|He himself prepared it on a blank form which he had brought with him on purpose.
LJSpeech-1.1/wavs/LJ050-0200.wav|The Secret Service should utilize the personnel of other Federal law enforcement offices
LJSpeech-1.1/wavs/LJ012-0167.wav|But Probert, who turned king's evidence, and materially assisted conviction,
LJSpeech-1.1/wavs/LJ006-0225.wav|If any man presumed to turn in too early
LJSpeech-1.1/wavs/LJ014-0127.wav|She was smartly dressed in a plaid shawl, a white lace cap;
LJSpeech-1.1/wavs/LJ033-0021.wav|after the birth of their second child.
LJSpeech-1.1/wavs/LJ036-0080.wav|toward a light-colored Rambler station wagon, which was moving slowly along Elm toward the underpass:
LJSpeech-1.1/wavs/LJ008-0083.wav|Two cart-loads of faggots were piled about her, and after she had hung for half-an-hour the fire was kindled.
LJSpeech-1.1/wavs/LJ010-0282.wav|Pate was said to be an eccentric person, given to strange acts and antics, such as mixing whiskey and camphor with his morning bath-water,
LJSpeech-1.1/wavs/LJ013-0088.wav|the cashier gave them eight Bank of England notes for one thousand pounds each, saying that they could get so much specie nowhere else.
LJSpeech-1.1/wavs/LJ028-0279.wav|after which he commanded his servants to tell no one what had come to pass, while he himself pondered the matter.
LJSpeech-1.1/wavs/LJ002-0057.wav|These wards were all fitted with barrack-beds, but no bedding was supplied.
LJSpeech-1.1/wavs/LJ032-0253.wav|From September twenty-four, nineteen sixty-three, when Marina Oswald arrived in Irving from New Orleans, until the morning of the assassination,
LJSpeech-1.1/wavs/LJ043-0135.wav|Oswald shot at Maj. Gen. Edwin A. Walker (Resigned, U.S. Army),
LJSpeech-1.1/wavs/LJ025-0115.wav|and from that day to this the rapid improvement of methods of investigation and the energy of a host of accurate observers
LJSpeech-1.1/wavs/LJ050-0166.wav|The Commission was struck by the apparent lack of effort, on an interagency basis,
LJSpeech-1.1/wavs/LJ038-0026.wav|Other policemen entered the front door and searched the balcony.
LJSpeech-1.1/wavs/LJ028-0470.wav|Time has dealt even less kindly with it, for it may be traced only for the distance of about a mile along its eastern side.
LJSpeech-1.1/wavs/LJ018-0253.wav|For these crimes William Roupell was tried at the Central Criminal Court on the twenty-fourth September, eighteen sixty-two.
LJSpeech-1.1/wavs/LJ019-0147.wav|this occurred in summer at eight, but in the winter months it took place at dusk, and was often as early as four or five.
LJSpeech-1.1/wavs/LJ045-0148.wav|After all, when will all your foolishness come to an end? All of these comedies. First one thing and then another. And now this fictitious name, end quote.
LJSpeech-1.1/wavs/LJ043-0031.wav|I am surprised that he didn't do something worse, end quote.
LJSpeech-1.1/wavs/LJ033-0039.wav|and one which provided an excuse for the carrying of a bulky package the following morning.
LJSpeech-1.1/wavs/LJ010-0006.wav|Certain crimes, those against the person especially, diminished gradually. They became less easy or remunerative.
LJSpeech-1.1/wavs/LJ049-0005.wav|Rigorous security precautions had been arranged at Love Field with the local law enforcement authorities by Agents Sorrels and Lawson.
LJSpeech-1.1/wavs/LJ004-0142.wav|where a lad lay ill with fever, three other prisoners, at first perfectly healthy, were lodged. Of course they were seized with the fever;
LJSpeech-1.1/wavs/LJ042-0038.wav|and religion and education are used as a tool to suppress what would otherwise be a population questioning their government's unfair
LJSpeech-1.1/wavs/LJ046-0079.wav|The rights of private individuals must not be infringed.
LJSpeech-1.1/wavs/LJ026-0123.wav|which could be derived by the ordinary chemical evolution of protoplasm, proteid, sugar, starch or fats.
LJSpeech-1.1/wavs/LJ037-0255.wav|testified that Commission Exhibit Number one sixty-two was the jacket worn by the man they saw on November twenty-two.
LJSpeech-1.1/wavs/LJ028-0345.wav|He then chose out near three thousand of the leading citizens and caused them to be crucified, while he allowed the remainder still to inhabit the city.
LJSpeech-1.1/wavs/LJ045-0076.wav|The letter fell into Oswald's hands when it was returned to his post office box
LJSpeech-1.1/wavs/LJ027-0103.wav|Thus, for instance, the unborn whale has rudimentary teeth,
LJSpeech-1.1/wavs/LJ011-0076.wav|His offense was uttering forged notes, and there was strong suspicion that he had long subsisted entirely by this fraud.
LJSpeech-1.1/wavs/LJ047-0223.wav|I don't recall the exact date. It was about a week prior. End quote.
LJSpeech-1.1/wavs/LJ016-0369.wav|upon them devolved the painful duty of cutting down the body and preparing for the inquest.
LJSpeech-1.1/wavs/LJ050-0189.wav|that written instructions might come into the hands of local newspapers, to the prejudice of the precautions described.
LJSpeech-1.1/wavs/LJ019-0095.wav|which was yet under full control, and might be made to work corn-mills or prove otherwise productive;
LJSpeech-1.1/wavs/LJ029-0205.wav|for President Kennedy, stating that "in many respects Dallas County has isolated itself from the main stream of life in the world in this decade.
LJSpeech-1.1/wavs/LJ047-0045.wav|and promised to advise the FBI if he heard from them.
LJSpeech-1.1/wavs/LJ036-0069.wav|Instead of waiting there, Oswald apparently went as far away as he could and boarded the first Oak Cliff bus which came along
LJSpeech-1.1/wavs/LJ014-0180.wav|secure the stock of watches and jewelry, then lock up the place and take on the keys to Mr. Berry's private house in Pimlico.
LJSpeech-1.1/wavs/LJ021-0060.wav|Minimum wages have been established and other wages adjusted toward a rising standard of living.
LJSpeech-1.1/wavs/LJ002-0128.wav|He also makes the curious calculation that the costs of these actions if undefended
LJSpeech-1.1/wavs/LJ028-0437.wav|Here, it has been suggested, were the famous hanging gardens which some ancient authors included among the Seven Wonders of the World.
LJSpeech-1.1/wavs/LJ028-0234.wav|Cyrus was now reduced to great perplexity, as time went on and he made no progress against the place.
LJSpeech-1.1/wavs/LJ001-0050.wav|and though the famous family of Aldus restored its technical excellence, rejecting battered letters,
LJSpeech-1.1/wavs/LJ006-0154.wav|Nothing was more prominently brought out by the inspectors than the inefficiency of the governor at that time, Mr. Cope.
LJSpeech-1.1/wavs/LJ022-0148.wav|to enforce minimum wages, to prevent excessive hours,
LJSpeech-1.1/wavs/LJ035-0070.wav|Truly stood in front of the building.
LJSpeech-1.1/wavs/LJ028-0250.wav|Such, then, were the circumstances of the first taking of Babylon.
LJSpeech-1.1/wavs/LJ043-0001.wav|Report of the President's Commission on the Assassination of President Kennedy.
LJSpeech-1.1/wavs/LJ004-0171.wav|was by a kind of chimney, which the prisoners kept hermetically sealed, and which had never been opened in the memory of the turnkey.
LJSpeech-1.1/wavs/LJ025-0009.wav|for in the past fifty years it has been made evident that in general principles all living things are fundamentally similar.
LJSpeech-1.1/wavs/LJ010-0066.wav|which under Thistlewood as dictator was to rule the nation, by first handing over its capital to fire and pillage.
LJSpeech-1.1/wavs/LJ022-0139.wav|with which we have been concerned for two years.
LJSpeech-1.1/wavs/LJ014-0056.wav|while in Ireland a wife dashed out her husband's brains with a hammer.
LJSpeech-1.1/wavs/LJ037-0079.wav|They ran to the door in time to see a man with a revolver cut across their lawn and disappear around a corner of the house onto Patton.
LJSpeech-1.1/wavs/LJ032-0044.wav|shows an imprint made by the cash register which recorded the receipt of twenty-one dollars, forty-five cents on March thirteen, nineteen sixty-three.
LJSpeech-1.1/wavs/LJ036-0116.wav|Lee Oswald was Number three;
LJSpeech-1.1/wavs/LJ028-0476.wav|The entire width of this inner defense was about fifty-five feet; its height is uncertain.
LJSpeech-1.1/wavs/LJ004-0137.wav|and that it was accomplished by "sleeping edgewise."
LJSpeech-1.1/wavs/LJ003-0113.wav|A prisoner, generally the oldest and most dexterous thief,
LJSpeech-1.1/wavs/LJ037-0128.wav|were on the lot at the time, and they saw a white male with a revolver in his hands running south on Patton.
LJSpeech-1.1/wavs/LJ031-0137.wav|At approximately one:twenty p.m., Vice President Johnson was notified by O'Donnell that President Kennedy was dead.
LJSpeech-1.1/wavs/LJ005-0008.wav|they were followed by a crowd of reckless boys, who jeered at and insulted them.
LJSpeech-1.1/wavs/LJ001-0083.wav|The seventeenth century founts were bad rather negatively than positively.
LJSpeech-1.1/wavs/LJ006-0224.wav|New arrivals, especially the innocent and still guileless debutant, were tormented with rude horse-play, and assailed by the most insulting "chaff."
LJSpeech-1.1/wavs/LJ015-0298.wav|But while Hardwicke was in communication with Saward, the bank was in communication with London
LJSpeech-1.1/wavs/LJ017-0212.wav|Her captain was John Smith;
LJSpeech-1.1/wavs/LJ049-0096.wav|There have been a number of efforts to make assassination a Federal crime, particularly after the assassination of President McKinley
LJSpeech-1.1/wavs/LJ015-0069.wav|but the firm he served got him a situation as clerk in the office of the Great Northern Railway,
LJSpeech-1.1/wavs/LJ013-0243.wav|Good now offered to go to Wandsworth and satisfy the pawnbroker.
LJSpeech-1.1/wavs/LJ015-0235.wav|and last, but not least, Agar frequently traveled up and down the line to test the false keys he had manufactured with Pierce's assistance.
LJSpeech-1.1/wavs/LJ016-0096.wav|They were penal servitude men, their names Bell, Brown, and Barry, and they were awaiting transfer to Leicester,
LJSpeech-1.1/wavs/LJ029-0110.wav|The route impressed the agents as a natural and desirable one.
LJSpeech-1.1/wavs/LJ011-0098.wav|He soon, however, became deeply involved in Stock Exchange speculations,
LJSpeech-1.1/wavs/LJ001-0016.wav|The Middle Ages brought calligraphy to perfection, and it was natural therefore
LJSpeech-1.1/wavs/LJ005-0130.wav|There were tread-wheels at most of the prisons, and regular employment thereon or at some other kind of hard labor.
LJSpeech-1.1/wavs/LJ018-0091.wav|Wagner and Bateman, who had already been convicted of systematic forgery, and sentenced to transportation, but they had been released on ticket-of-leave
LJSpeech-1.1/wavs/LJ019-0053.wav|and our modern practice has prudently tried to steer between the two extremes, accepting as the best system a judicious combination of both.
LJSpeech-1.1/wavs/LJ023-0071.wav|For nearly twenty years there was no conflict between the Congress and the Court.
LJSpeech-1.1/wavs/LJ019-0390.wav|Since then a strong central authority has labored steadfastly to compass concentration,
LJSpeech-1.1/wavs/LJ047-0163.wav|According to Hosty, Mrs. Paine indicated that she thought she could find out where Oswald was living and would let him know.
LJSpeech-1.1/wavs/LJ016-0035.wav|the wall beneath and above it was "rusticated," in other words, the granite surface had become roughened, and offered a sort of foothold.
LJSpeech-1.1/wavs/LJ015-0211.wav|Each safe had three sets of double keys, all held by confidential servants of the company.
LJSpeech-1.1/wavs/LJ043-0148.wav|She testified that she was agitated because she had found the note in Oswald's room,
LJSpeech-1.1/wavs/LJ028-0207.wav|On the fourteenth day Sippar was taken without a battle.
LJSpeech-1.1/wavs/LJ007-0062.wav|Latterly his ministrations to the condemned had been restricted to a visit on Sunday afternoons, and occasionally about once a fortnight on a week-day.
LJSpeech-1.1/wavs/LJ049-0186.wav|the Commission received a number of proposals designed to improve current arrangements for protecting the President.
LJSpeech-1.1/wavs/LJ011-0196.wav|Mr. Turner at once set off for London, where he sought the assistance of the police,
LJSpeech-1.1/wavs/LJ003-0227.wav|So unjust and unequal was the system, that the allowance to convicted criminals was better than that of the innocent debtor,
LJSpeech-1.1/wavs/LJ047-0243.wav|According to Revill, Hosty indicated that he was going to tell this to Lieutenant Wells of the homicide and robbery bureau.
LJSpeech-1.1/wavs/LJ007-0116.wav|A few others, who could not afford a payment of more than half a guinea, were permitted to monopolize a part of the prison infirmary,
LJSpeech-1.1/wavs/LJ018-0243.wav|the hardship to the holders of these lands being plain, should the allegations of invalidity be made good.
LJSpeech-1.1/wavs/LJ007-0080.wav|These powers were not invariably put in force, and there were in consequence many unhappy lunatics in Newgate and other jails,
LJSpeech-1.1/wavs/LJ038-0037.wav|Oswald then struck McDonald between the eyes with his left fist; with his right hand he drew a gun from his waist.
LJSpeech-1.1/wavs/LJ043-0175.wav|The items which Oswald left at home when he made his attack on Walker suggest a strong concern for his place in history.
LJSpeech-1.1/wavs/LJ040-0114.wav|Relations soon became strained, however, so in late September Lee and his mother moved to their own apartment in the Bronx.
LJSpeech-1.1/wavs/LJ010-0241.wav|but she declared she would not remain a prisoner in her own palace, and next day drove out as usual in an open barouche.
LJSpeech-1.1/wavs/LJ037-0202.wav|identified records of Seaport Traders, Incorporated, which showed that a, quote, point three eight
LJSpeech-1.1/wavs/LJ019-0230.wav|In eighteen sixty-one a similar work was undertaken to provide separate cellular accommodation for the female inmates of Newgate,
LJSpeech-1.1/wavs/LJ010-0134.wav|He roared out snatches of a song about Death or Liberty, and just before he was turned off,
LJSpeech-1.1/wavs/LJ014-0005.wav|but too late to give substantial aid.
LJSpeech-1.1/wavs/LJ005-0186.wav|They neither built new jails nor contracted with the counties, as had been expected, for the transfer of their prisoners.
LJSpeech-1.1/wavs/LJ017-0003.wav|Nevertheless, in order to give completeness to the picture
LJSpeech-1.1/wavs/LJ020-0014.wav|beating the batter smooth as you go on until all of the liquid and flour has gone in.
LJSpeech-1.1/wavs/LJ014-0245.wav|It was the custom in this office to make the banker's passbook the basis of the entries in the company's ledgers.
LJSpeech-1.1/wavs/LJ008-0180.wav|Among the dead was a sailor lad whom no one knew;
LJSpeech-1.1/wavs/LJ019-0022.wav|On the other hand, it must be admitted
LJSpeech-1.1/wavs/LJ027-0034.wav|Hence, as Jordan has said, "the inside of an animal tells the real history of its ancestry; the outside tells us only where its ancestors have been."
LJSpeech-1.1/wavs/LJ040-0124.wav|This continued despite the efforts of the school authorities and, to a lesser extent, of his mother to have him return to school.
LJSpeech-1.1/wavs/LJ006-0192.wav|There was no school for adults; only the boys were taught anything, and their instructor, with his assistant, were convicted prisoners.
LJSpeech-1.1/wavs/LJ014-0229.wav|Mobbs systematically ill-used his wife for a long space of time, and at last cut her throat.
LJSpeech-1.1/wavs/LJ031-0162.wav|other terminal buildings and the neighboring parking lots, of all people.
LJSpeech-1.1/wavs/LJ032-0094.wav|listing Marina Oswald and A. J. Hidell
LJSpeech-1.1/wavs/LJ022-0155.wav|Power production in this country is virtually back to the nineteen twenty-nine peak.
LJSpeech-1.1/wavs/LJ009-0291.wav|He was always known as a mild-mannered man of simple tastes, much given to angling in the New River, and a devoted rabbit fancier.
LJSpeech-1.1/wavs/LJ006-0130.wav|had a key of both the master's side and middle side yards, was the only person present at the distribution of beer, and was trusted to examine,
LJSpeech-1.1/wavs/LJ040-0131.wav|Marguerite Oswald visited her son at Youth House, where she recalled that she waited in line, quote,
LJSpeech-1.1/wavs/LJ009-0113.wav|whistles merrily, and points upwards with madness in his look.
LJSpeech-1.1/wavs/LJ037-0078.wav|when they heard the sound of gunfire and the screams of Helen Markham.
LJSpeech-1.1/wavs/LJ006-0093.wav|So closely did they lie together, that the inspectors at their night visits found it difficult in stepping across the room to avoid treading on them.
LJSpeech-1.1/wavs/LJ008-0061.wav|The entrance upon this floor or leaf is from the middle window over the gate of the prison;
LJSpeech-1.1/wavs/LJ001-0156.wav|The paper on which the printing is to be done is a necessary part of our subject:
LJSpeech-1.1/wavs/LJ029-0195.wav|when Governor Connally confirmed on November eight that the President would come to Texas on November twenty-one and twenty-two,
LJSpeech-1.1/wavs/LJ040-0080.wav|That situation, however, was short-lived,
LJSpeech-1.1/wavs/LJ010-0165.wav|but he came as a lad to London, and took service as a pot-boy to a publican.
LJSpeech-1.1/wavs/LJ018-0334.wav|Webster, it may be mentioned here, was one of the worst prisoners ever remembered in Newgate
LJSpeech-1.1/wavs/LJ046-0227.wav|According to Special Agent in Charge Bouck,
LJSpeech-1.1/wavs/LJ019-0089.wav|sometimes it embraced the tread-wheel or the newly-invented instruments known as cranks, which ground air.
LJSpeech-1.1/wavs/LJ034-0005.wav|He worked principally on the first and sixth floors of the building, gathering books listed on orders and delivering them to the shipping room on the first floor.
LJSpeech-1.1/wavs/LJ043-0089.wav|to a commercial advertising photography firm in Dallas, where he was employed as a trainee starting October twelve, nineteen sixty-two.
LJSpeech-1.1/wavs/LJ016-0247.wav|while round about were shoe-strings, boot-laces, and lasts. Marwood, strange to say, followed the same trade as Calcraft.
LJSpeech-1.1/wavs/LJ045-0105.wav|She testified that she told him, quote,
LJSpeech-1.1/wavs/LJ020-0027.wav|Half the quantity of sponge given in preceding receipt.
LJSpeech-1.1/wavs/LJ028-0211.wav|He appointed Gobrias governor of Babylon.
LJSpeech-1.1/wavs/LJ019-0314.wav|The separation of prisoners in cells duly certified by the inspectors was insisted upon,
LJSpeech-1.1/wavs/LJ005-0001.wav|The Chronicles of Newgate, Volume two. By Arthur Griffiths. Section eight: The beginnings of prison reform.
LJSpeech-1.1/wavs/LJ009-0178.wav|In eighteen thirty-two the dissection of bodies cut down from the gallows, which had been decreed centuries previously, was abolished;
LJSpeech-1.1/wavs/LJ034-0062.wav|Although a person could handle a carton and not leave identifiable prints,
LJSpeech-1.1/wavs/LJ027-0088.wav|Extensive comparison, on the contrary, shows them to be the same, although the essential identity is obscured by adaptive modifications.
LJSpeech-1.1/wavs/LJ032-0240.wav|By Sunday, March thirty-one, nineteen sixty-three,
LJSpeech-1.1/wavs/LJ036-0024.wav|on a trip which passed a check point at St. Paul and Elm Streets at twelve:thirty-six p.m., November twenty-two, nineteen sixty-three.
LJSpeech-1.1/wavs/LJ039-0201.wav|fired two series of three shots at twenty-five yards in four point six and four point eight seconds.
LJSpeech-1.1/wavs/LJ006-0113.wav|The authority of these wardsmen so improperly exalted, and so entirely unchecked, degenerated into a baneful despotism.
LJSpeech-1.1/wavs/LJ048-0137.wav|there have been references to the numerous discussions between Secret Service representatives and the Dallas Police Department.
LJSpeech-1.1/wavs/LJ007-0014.wav|The admission of a crowd of visitors to assist in these lay services has already been remarked upon; as the inspectors pointed out,
LJSpeech-1.1/wavs/LJ007-0057.wav|Turnkeys occasionally visited the press-yard, but its occupants were under little or no control.
LJSpeech-1.1/wavs/LJ010-0121.wav|that he was, to use Thistlewood's words, "a contriver, instigator, and entrapper."
LJSpeech-1.1/wavs/LJ011-0176.wav|He now pretended that Mr. Turner was also on his way to the border, pursued by sheriffs' officers.
LJSpeech-1.1/wavs/LJ036-0189.wav|at the southeast corner of tenth Street and Patton Avenue, moments before the Tippit shooting.
LJSpeech-1.1/wavs/LJ006-0068.wav|We have reason to fear that poverty, ragged clothes, and an inability to pay the ward dues, elsewhere exacted for better accommodation,
LJSpeech-1.1/wavs/LJ006-0097.wav|Water might not be taken into the ward for washing purposes.
LJSpeech-1.1/wavs/LJ048-0085.wav|the Commission believes that the liaison between all Federal agencies responsible for Presidential protection should be improved.
LJSpeech-1.1/wavs/LJ039-0160.wav|In tests with the Mannlicher-Carano C twenty-seven sixty-six rifle, over one hundred rounds of this ammunition were fired by the FBI
LJSpeech-1.1/wavs/LJ038-0052.wav|testified regarding the arrest of Oswald, as did the various police officers who participated in the fight.
LJSpeech-1.1/wavs/LJ010-0063.wav|The massacre of the whole of the Cabinet Ministers at one stroke was to be followed by an attack
LJSpeech-1.1/wavs/LJ009-0295.wav|who had been a convicted prisoner at York, but who consented to act as hangman when Calcraft was engaged, and no other functionary could be obtained.
LJSpeech-1.1/wavs/LJ011-0250.wav|While thus engaged, Howard thrust the poker into the fire.
LJSpeech-1.1/wavs/LJ018-0273.wav|Tarpey was caught through his wife,
LJSpeech-1.1/wavs/LJ047-0131.wav|In early September nineteen sixty-three
LJSpeech-1.1/wavs/LJ040-0232.wav|Few social agencies even in New York were equipped to provide the kind of intensive treatment that he needed,
LJSpeech-1.1/wavs/LJ010-0051.wav|The well-known Cato Street conspiracy,
LJSpeech-1.1/wavs/LJ008-0077.wav|where the apparatus for the punishment she was about to experience
LJSpeech-1.1/wavs/LJ006-0115.wav|Their original capital had been a few shillings, and for this they purchased the right to tax their fellows to the extent of pounds per week.
LJSpeech-1.1/wavs/LJ048-0262.wav|during the hours they are officially employed at their post of duty, or when they may reasonably expect that they may be called upon to perform an official duty.
LJSpeech-1.1/wavs/LJ020-0101.wav|From the beginning of your apprenticeship in housewifery, learn how to "dovetail" your duties neatly into one another.
LJSpeech-1.1/wavs/LJ045-0207.wav|He could not keep them with him in Dallas, where at least he could see his children whom, several witnesses testified, he seemed to love.
LJSpeech-1.1/wavs/LJ021-0009.wav|with a greater certainty of the employment of labor at a reasonable wage and of more business at a fair profit.
LJSpeech-1.1/wavs/LJ038-0137.wav|the Commission found that Oswald lied when he told Frazier that he was returning to Irving to obtain curtain rods.
LJSpeech-1.1/wavs/LJ041-0164.wav|which Thornley read at Oswald's suggestion.
LJSpeech-1.1/wavs/LJ001-0006.wav|And it is worth mention in passing that, as an example of fine typography,
LJSpeech-1.1/wavs/LJ003-0131.wav|He was an inmate of the same ward with others of the most dreadful sort, quote,
LJSpeech-1.1/wavs/LJ003-0208.wav|a number of amateurs were ever ready to give their gratuitous ministrations to the condemned.
LJSpeech-1.1/wavs/LJ010-0172.wav|He saw Prince Albert return there from a visit to Woolwich, and then passed on to Constitution Hill,
LJSpeech-1.1/wavs/LJ028-0203.wav|Less picturesque than this Hebrew legend is the royal record of Babylon, which fortunately was inscribed upon a clay cylinder from the ruins of the city.
LJSpeech-1.1/wavs/LJ007-0146.wav|vaunting his own adventures, or listening to those of others;
LJSpeech-1.1/wavs/LJ021-0087.wav|We have the right to expect that this driving power will be given patriotically and whole-heartedly to our nation.
LJSpeech-1.1/wavs/LJ025-0077.wav|Their food is provided for them,
LJSpeech-1.1/wavs/LJ028-0185.wav|Perhaps Babylon was so strongly fortified that at first he made no attempt to add it to his empire,
LJSpeech-1.1/wavs/LJ030-0207.wav|with the follow-up car trailing the President's automobile by approximately five feet.
LJSpeech-1.1/wavs/LJ012-0109.wav|But they at once made tracks, and took up their residence under assumed names in a tavern in Bloomsbury.
LJSpeech-1.1/wavs/LJ032-0230.wav|that the published pictures were the same as the original except for retouching done by these publications, apparently for the purpose of clarifying the lines of the rifle
LJSpeech-1.1/wavs/LJ049-0095.wav|for all offenses within its jurisdiction, as are FBI agents and Federal marshals.
LJSpeech-1.1/wavs/LJ024-0142.wav|I seek to make American democracy succeed.
LJSpeech-1.1/wavs/LJ050-0177.wav|This PRS agent will also be responsible for establishing an informal local liaison committee
LJSpeech-1.1/wavs/LJ011-0006.wav|He went to the bank, and found that no stocks stood in her name. He called at once upon Fauntleroy, his client's bankers, for an explanation,
LJSpeech-1.1/wavs/LJ029-0012.wav|He had made only a few brief visits to the State since the nineteen sixty Presidential campaign and in nineteen sixty-two he began to consider a formal visit.
LJSpeech-1.1/wavs/LJ026-0022.wav|if chlorophyll is absent, carbon is obtained from sugar or some similar compound,
LJSpeech-1.1/wavs/LJ019-0233.wav|and when it was completed, both sides of the prison were brought into harmony with modern ideas.
LJSpeech-1.1/wavs/LJ010-0096.wav|Edgeware Road, completing their dispositions for assuming supreme power after the blow had been struck.
LJSpeech-1.1/wavs/LJ045-0111.wav|They asked for Lee Oswald who was not called to the telephone because he was known by the other name.
LJSpeech-1.1/wavs/LJ005-0298.wav|to the county jails from such prisons as were past improvement, and that the borough funds should be charged for the accommodation.
LJSpeech-1.1/wavs/LJ009-0224.wav|At the first-named the exhibition nearly created a tumult, and the body was taken down and buried,
LJSpeech-1.1/wavs/LJ014-0179.wav|a working jeweler, shopman to a Mr. Berry of Parliament Street. It was Cope's duty to stay in the shop till the last, close the shutters,
LJSpeech-1.1/wavs/LJ035-0044.wav|If the man had passed from the vestibule into the lunchroom, Baker could not have seen him.
LJSpeech-1.1/wavs/LJ008-0113.wav|and his soul shot out so piercingly through the port-holes of his head, that the first glance of him nearly petrified me
LJSpeech-1.1/wavs/LJ050-0087.wav|propensity toward violent action, or some similar characteristic, coupled with some evaluation of the capability of the individual or group
LJSpeech-1.1/wavs/LJ047-0135.wav|According to the information received by the Bureau
LJSpeech-1.1/wavs/LJ049-0066.wav|For instance, the lead car always is manned by Secret Service agents familiar with the area and with local law enforcement officials;
LJSpeech-1.1/wavs/LJ030-0005.wav|by helicopter at ten:forty-five A.M., Eastern Standard Time, on November twenty-one, nineteen sixty-three, for Andrews Air Force Base.
LJSpeech-1.1/wavs/LJ027-0158.wav|But according to the opposite view no reason can be assigned why such should be the case.
LJSpeech-1.1/wavs/LJ048-0225.wav|they had little opportunity to eat during the day. No food was available at the Press Club.
LJSpeech-1.1/wavs/LJ033-0149.wav|the FBI Laboratory developed a latent palmprint and latent fingerprint on the bag.
LJSpeech-1.1/wavs/LJ018-0255.wav|The case was easily and rapidly disposed of.
LJSpeech-1.1/wavs/LJ014-0276.wav|Watts's crime was discovered by the secretary of the Globe Company, who came suddenly upon the extensive falsification of the passbook.
LJSpeech-1.1/wavs/LJ039-0219.wav|Frazier testified that the rifle was accurate, that it had less recoil than the average military rifle
LJSpeech-1.1/wavs/LJ036-0213.wav|The man's general description was similar to the one broadcast over the police radio.
LJSpeech-1.1/wavs/LJ037-0179.wav|was discarded along with the others as Oswald left the scene.
LJSpeech-1.1/wavs/LJ037-0009.wav|One witness felt he was too distant from the gunman to make a positive identification.
LJSpeech-1.1/wavs/LJ038-0163.wav|Prior attempt to kill.
LJSpeech-1.1/wavs/LJ006-0139.wav|Nobody interfered with them or regulated their conduct. They might get drunk when so disposed, and did so frequently, alone or in company.
LJSpeech-1.1/wavs/LJ039-0091.wav|Sergeant Zahm expressed the opinion that the shot which struck President Kennedy in the neck at one hundred seventy-six point nine
LJSpeech-1.1/wavs/LJ036-0016.wav|Lee Harvey Oswald left the building approximately three minutes after the assassination.
LJSpeech-1.1/wavs/LJ030-0109.wav|The Vice-Presidential car
LJSpeech-1.1/wavs/LJ019-0030.wav|Major, afterwards Sir Joshua Jebb,
LJSpeech-1.1/wavs/LJ015-0154.wav|When the crash came there were pensioners and other recipients of his bounty who could not believe
LJSpeech-1.1/wavs/LJ038-0039.wav|Three other officers, moving toward the scuffle, grabbed Oswald from the front, rear and side.
LJSpeech-1.1/wavs/LJ017-0146.wav|He had all the characteristics of the poisoner -- the calm deliberation,
LJSpeech-1.1/wavs/LJ036-0171.wav|he would have arrived there about twelve:fifty-nine to one p.m.
LJSpeech-1.1/wavs/LJ039-0099.wav|In accordance with standard Marine procedures, Oswald received extensive training in marksmanship.
LJSpeech-1.1/wavs/LJ004-0216.wav|The most noticeable of the improvements introduced was a better regulation of dietaries within the prison.
LJSpeech-1.1/wavs/LJ045-0136.wav|as Oswald went on to say. In Oswald's imagination, quote,
LJSpeech-1.1/wavs/LJ004-0135.wav|twenty men slept on eight straw beds, with sixteen rugs amongst them, and a piece of timber for a bolster.
LJSpeech-1.1/wavs/LJ045-0173.wav|Question: What did you say to that? Answer:
LJSpeech-1.1/wavs/LJ040-0030.wav|When he was in the Soviet Union, he apparently resented the Communist Party members,
LJSpeech-1.1/wavs/LJ024-0096.wav|No amendment which any powerful economic interests or the leaders of any powerful political party have had reason to oppose
LJSpeech-1.1/wavs/LJ018-0208.wav|were a low lot, the lowest among criminals except, perhaps, the 'smashers,' or those who passed the counterfeit money.
LJSpeech-1.1/wavs/LJ030-0215.wav|the car lurched forward, causing him to lose his footing. He ran three or four steps, regained his position and mounted the car.
LJSpeech-1.1/wavs/LJ012-0156.wav|His arrest and conviction cast dismay over the whole gang of receivers, and for a time seriously checked the nefarious traffic.
LJSpeech-1.1/wavs/LJ019-0028.wav|Mr. Shaw-Lefevre, the Speaker of the House of Commons, Sir Benjamin Brodie,
LJSpeech-1.1/wavs/LJ019-0079.wav|The cells inhabited by prisoners were of very varying dimensions;
LJSpeech-1.1/wavs/LJ046-0046.wav|In all of these roles the President must go to the people.
LJSpeech-1.1/wavs/LJ018-0054.wav|While in the condemned cell he conversed freely with the warders in broken English or through an interpreter.
LJSpeech-1.1/wavs/LJ014-0338.wav|These bankers, wishing for more specific information,
LJSpeech-1.1/wavs/LJ026-0113.wav|Only proteid foods form new protoplasm
LJSpeech-1.1/wavs/LJ015-0310.wav|which had received so perverted and mistaken direction,
LJSpeech-1.1/wavs/LJ049-0040.wav|The assassination suggests that it would have been of prime importance
LJSpeech-1.1/wavs/LJ022-0052.wav|here as in every other nation, we have come to recognize the possibility and the necessity of certain helpful remedial measures.
LJSpeech-1.1/wavs/LJ032-0054.wav|"A. Hidell, P.O. Box two nine one five, Dallas, Texas," on March twenty, nineteen sixty-three.
LJSpeech-1.1/wavs/LJ005-0290.wav|Instances rarely occur in which the borough jails admit of any proper classification of the prisoners.
LJSpeech-1.1/wavs/LJ028-0314.wav|observing him, hastened down, and setting one of the gates slightly ajar, questioned him who he was, and on what errand he had come.
LJSpeech-1.1/wavs/LJ028-0324.wav|his body red with marks of scourging and with blood, had no suspicion but that he spoke the truth, and was really come to be their friend and helper.
LJSpeech-1.1/wavs/LJ033-0035.wav|and Marina Oswald testified that Oswald did not say anything about curtain rods on the day before the assassination.
LJSpeech-1.1/wavs/LJ050-0082.wav|the interest of the Secret Service goes beyond information on individuals or groups threatening to cause harm or embarrassment to the President.
LJSpeech-1.1/wavs/LJ046-0190.wav|it had arrangements to be notified about release from confinement in roughly one thousand cases;
LJSpeech-1.1/wavs/LJ015-0096.wav|whether representing real or fictitious shares does not appear; but they were certificates connected in some way with Robson's long practiced frauds
LJSpeech-1.1/wavs/LJ019-0146.wav|There was as yet no control over the prisoners after locking-up time;
LJSpeech-1.1/wavs/LJ007-0091.wav|The lunatic became the sport of the idle and the depraved. His cure was out of the question;
LJSpeech-1.1/wavs/LJ033-0087.wav|She thought that its color was similar to that of the bag found on the sixth floor of the School Book Depository after the assassination.
LJSpeech-1.1/wavs/LJ050-0086.wav|Under these criteria, whether the case should be referred to the Secret Service depends on the existence of a previous history of mental instability,
LJSpeech-1.1/wavs/LJ025-0011.wav|is Huxley's famous essay, "The Border Territory Between the Animal and Vegetable Kingdoms," written in eighteen seventy-six,
LJSpeech-1.1/wavs/LJ003-0338.wav|End quote. it would cover some thirty acres, and cost a great deal more than the city, with the example of Whitecross Street prison before it,
LJSpeech-1.1/wavs/LJ038-0282.wav|there is enough on it to say that it could have come, and even perhaps a little stronger, to say that it probably came from this,
LJSpeech-1.1/wavs/LJ037-0075.wav|However, even in the absence of Mrs. Markham's testimony, there is ample evidence to identify Oswald as the killer of Tippit.
LJSpeech-1.1/wavs/LJ003-0033.wav|Enough has been said, probably, to prove that there was room for improvement in the condition and treatment of debtors in the prisons of the city of London.
LJSpeech-1.1/wavs/LJ041-0011.wav|Several witnesses testified that Lee Oswald was not aggressive. He was, however, involved in some fights.
LJSpeech-1.1/wavs/LJ026-0102.wav|but root pressure due to osmosis, capillary action and evaporation from the leaves are factors.
LJSpeech-1.1/wavs/LJ048-0078.wav|In each instance, liaison contacts should be developed to include a close friendly relationship,
LJSpeech-1.1/wavs/LJ028-0489.wav|Had the enemy of Babylon succeeded in breaking through the outer and inner defenses of the city the royal palace would have still been far from his reach.
LJSpeech-1.1/wavs/LJ007-0192.wav|inasmuch as it has aroused the attention of those upon whom parliamentary reports and grand jury presentments had hitherto failed to make the slightest impression.
LJSpeech-1.1/wavs/LJ006-0210.wav|Drink, in more or less unlimited quantities, was still to be had.
LJSpeech-1.1/wavs/LJ018-0361.wav|tampered with, and returned to the shop.
LJSpeech-1.1/wavs/LJ033-0132.wav|I didn't pay too much attention the way he was walking because I was walking along there looking at the railroad cars and watching the men on the diesel switch them cars
LJSpeech-1.1/wavs/LJ029-0131.wav|The building overlooks Dealey Plaza, an attractively landscaped triangle of three acres.
LJSpeech-1.1/wavs/LJ030-0221.wav|stated that Mrs. Kennedy would probably have fallen off the rear end of the car and been killed if Hill had not pushed her back into the Presidential automobile.
LJSpeech-1.1/wavs/LJ002-0257.wav|and a master of the ale-room, who kept this the scene of their revels clean, and saw that boiling water was provided for grog.
LJSpeech-1.1/wavs/LJ025-0066.wav|and his skepticism was the more justified since Ehrenberg in his elaborate and comprehensive work on the infusoria,
LJSpeech-1.1/wavs/LJ030-0110.wav|and Vice-Presidential follow-up car used portable sets with a separate frequency for their own car-to-car communication.
LJSpeech-1.1/wavs/LJ019-0119.wav|but Mr. Pearson hardly considered the converse sufficiently, and
LJSpeech-1.1/wavs/LJ048-0070.wav|reflect keen awareness of the necessity of communicating a much wider range of intelligence information to the Service.
LJSpeech-1.1/wavs/LJ033-0060.wav|Marina Oswald testified that at that time, quote, My heart dropped.
LJSpeech-1.1/wavs/LJ037-0077.wav|Barbara Jeanette Davis and Virginia Davis, were in an apartment of a multiple-unit house on the southeast corner of tenth and Patton
LJSpeech-1.1/wavs/LJ018-0115.wav|It was stated in evidence that the monies obtained by these forgeries amounted to eight thousand pounds or ten thousand pounds,
LJSpeech-1.1/wavs/LJ025-0032.wav|Hence arose the second great distinctive character of animals, or the circulatory system, which is less important than the digestive,
LJSpeech-1.1/wavs/LJ014-0248.wav|This passbook, when not at the bank, was in the exclusive custody of Watts.
LJSpeech-1.1/wavs/LJ003-0256.wav|By this means spirits, otherwise unattainable and strictly prohibited, were smuggled into the jail.
LJSpeech-1.1/wavs/LJ009-0216.wav|Hanging in chains upon the gibbet which had served for the execution,
LJSpeech-1.1/wavs/LJ034-0168.wav|Ronald Fischer and Robert Edwards were standing on the curb at the southwest corner of Elm and Houston Streets,
LJSpeech-1.1/wavs/LJ028-0481.wav|Nabopolassar, the father, my begetter, built Imgur-Bel, the great wall of Babylon,
LJSpeech-1.1/wavs/LJ028-0086.wav|the lips are thin, the chin prominent; the neck is that of a strong vigorous man.
LJSpeech-1.1/wavs/LJ003-0278.wav|Insuperable difficulties were still supposed to stand in the way of any general employment of prisoners at their trades.
LJSpeech-1.1/wavs/LJ047-0067.wav|Hosty decided that the Lee Harvey Oswald case should be reopened because of the alleged personal difficulties and the contact with the Worker,
LJSpeech-1.1/wavs/LJ028-0472.wav|Its outer part, about twelve feet in width, was protected with towers at intervals of sixty-five feet.
LJSpeech-1.1/wavs/LJ008-0024.wav|of the trading vessel 'Adventure,' upon the high seas.
LJSpeech-1.1/wavs/LJ036-0022.wav|the transfer was conclusively identified as having been issued by Cecil J. McWatters, a busdriver for the Dallas Transit Co.
LJSpeech-1.1/wavs/LJ023-0136.wav|In many states judges must retire at the age of seventy.
LJSpeech-1.1/wavs/LJ028-0428.wav|has been removed, and the surrounding city walls have been traced.
LJSpeech-1.1/wavs/LJ037-0258.wav|There is no doubt, however, that Oswald was seen leaving his roominghouse at about one p.m. wearing a zipper jacket,
LJSpeech-1.1/wavs/LJ042-0087.wav|He stated that he had volunteered to give Soviet officials any information that he had concerning Marine Corps operations, and intimated
LJSpeech-1.1/wavs/LJ016-0400.wav|Wainwright's demeanor was one of reckless effrontery steadily maintained to the last.
LJSpeech-1.1/wavs/LJ040-0021.wav|ascertained the facts surrounding the assassination but did not draw conclusions concerning Oswald's legal guilt.
LJSpeech-1.1/wavs/LJ036-0196.wav|Tippit patroled district Number seventy-eight in the Oak Cliff area of Dallas during daylight hours.
LJSpeech-1.1/wavs/LJ038-0254.wav|She testified that several days later Oswald recovered his rifle and brought it back to their apartment.
LJSpeech-1.1/wavs/LJ039-0117.wav|He had the services of an experienced highly trained coach.
LJSpeech-1.1/wavs/LJ028-0371.wav|Then the world metropolis, stripped of most of its population, became a mere village.
LJSpeech-1.1/wavs/LJ017-0080.wav|Palmer, alive to the danger he ran of a prosecution for forgery, should the fraud he had committed be brought to light,
LJSpeech-1.1/wavs/LJ022-0198.wav|We have in the darkest moments of our national trials retained our faith in our own ability to master our destiny.
LJSpeech-1.1/wavs/LJ047-0048.wav|while Oswald remained somewhat evasive at this interview, he was not antagonistic and seemed generally to be settling down.
LJSpeech-1.1/wavs/LJ029-0126.wav|to reach the northbound lanes of the Stemmons Freeway is via Elm Street, which Route Number seventy-seven traffic is instructed to follow in this part of the city.
LJSpeech-1.1/wavs/LJ015-0133.wav|Even when the bubble burst Redpath, who had lived at the rate of twenty thousand a year,
LJSpeech-1.1/wavs/LJ028-0440.wav|Possibly along the terraces of the walls, or upon the stages of some lofty temple tower,
LJSpeech-1.1/wavs/LJ018-0388.wav|and the bird's legs were a couple of teeth broken off the prisoner's comb.
LJSpeech-1.1/wavs/LJ028-0011.wav|As the taps upon her shoulder are repeated, she stretches out her long neck, and with long strides makes for the eastern horizon;
LJSpeech-1.1/wavs/LJ047-0074.wav|Under a general Bureau request to be on the alert for activities of the Fair Play for Cuba Committee
LJSpeech-1.1/wavs/LJ035-0046.wav|I was coming out this one on the second floor, and I don't know, I was kind of sweeping this area as I come up, I was looking from right to left
LJSpeech-1.1/wavs/LJ021-0136.wav|We have passed through more than a year of education.
LJSpeech-1.1/wavs/LJ002-0262.wav|owing to many charitable gifts and bequests, which included annual donations from the Archbishop of Canterbury,
LJSpeech-1.1/wavs/LJ044-0070.wav|WDSU has no program of any kind called, quote, Latin American Focus, end quote.
LJSpeech-1.1/wavs/LJ017-0216.wav|there were also a Frenchman, a Norwegian (the carpenter), three Chinamen, a "Sclavonian," and a black on board.
LJSpeech-1.1/wavs/LJ013-0070.wav|Mrs. Hunt had left money in the funds which remained unclaimed, and had been transferred, as in Miss Slack's case.
LJSpeech-1.1/wavs/LJ005-0006.wav|who handed a shilling to the escort warder to provide her with a hackney coach; but this functionary pocketed the cash, and obliged the woman to walk
LJSpeech-1.1/wavs/LJ008-0267.wav|no less than four hundred and fifty-one sentences of death for capital crimes were passed at the Old Bailey;
LJSpeech-1.1/wavs/LJ030-0069.wav|scanning the route, and getting out and standing near the President when the cars stopped.
LJSpeech-1.1/wavs/LJ027-0035.wav|In the second place, it must be noted that adaptations to similar conditions may result in superficial resemblances.
LJSpeech-1.1/wavs/LJ004-0188.wav|and which even fulfilled many of the exacting requirements of modern days.
LJSpeech-1.1/wavs/LJ049-0037.wav|In contrast, the Vice Presidential vehicle, although not specially designed for that purpose,
LJSpeech-1.1/wavs/LJ015-0004.wav|The goods having no existence, Cole of course could not deliver them.
LJSpeech-1.1/wavs/LJ018-0220.wav|The old gentleman's signature to this deed of gift was a forgery,
LJSpeech-1.1/wavs/LJ050-0196.wav|Since the assassination of President Kennedy, the Secret Service has been experimenting with new techniques in the inspection of buildings along a motorcade route.
LJSpeech-1.1/wavs/LJ002-0279.wav|The Giltspur Street Compter received sheriffs' debtors, also felons, vagrants, and night charges.
LJSpeech-1.1/wavs/LJ004-0122.wav|fifty-nine had no division whatever to separate males and females; one hundred and thirty-six had only one division for the purpose;
LJSpeech-1.1/wavs/LJ039-0090.wav|Characterizing the four-power scope as, quote, a real aid, an extreme aid, end quote, in rapid fire shooting,
LJSpeech-1.1/wavs/LJ027-0065.wav|enclosed and protected by the skeleton, viz., the neural cavity above, and the visceral or body cavity below, the vertebral column.
LJSpeech-1.1/wavs/LJ023-0083.wav|The Court has been acting not as a judicial body, but as a policy-making body.
LJSpeech-1.1/wavs/LJ015-0229.wav|who quickly took the wax impression, handed it back to Pierce; Pierce replaced it, left the office, and the thing was done.
LJSpeech-1.1/wavs/LJ005-0085.wav|Classification was insisted upon, in the manner laid down by the twenty-four George the third cap. fifty-four,
LJSpeech-1.1/wavs/LJ003-0300.wav|that all prisoners should always be in separate cells by night, and those of short sentences by day.
LJSpeech-1.1/wavs/LJ048-0276.wav|Since he was convinced that this was not the case, he believed that it would be unfair to the agents and their families to take explicit disciplinary measures.
LJSpeech-1.1/wavs/LJ011-0228.wav|Mr. W. Wakefield served in a continental army, and rose to the rank of colonel,
LJSpeech-1.1/wavs/LJ045-0117.wav|it would appear to be unlikely that his landlady in Dallas
LJSpeech-1.1/wavs/LJ033-0010.wav|the presence of a long handmade brown paper bag near the point from which the shots were fired, and (five) the palmprint,
LJSpeech-1.1/wavs/LJ050-0110.wav|and each agency given clear understanding of the assistance which the Secret Service expects.
LJSpeech-1.1/wavs/LJ003-0315.wav|All three, governor, chaplain, and surgeon, should keep journals, which should be inspected periodically by the visiting magistrates.
LJSpeech-1.1/wavs/LJ048-0196.wav|According to Captain Lawrence, quote,
LJSpeech-1.1/wavs/LJ042-0055.wav|In the event of war I would kill any American who put a uniform on in defense of the American government -- any American.
LJSpeech-1.1/wavs/LJ029-0029.wav|When the trip was planned for only one day, Governor Connally had opposed the motorcade because there was not enough time. The Governor stated, however, that, quote,
LJSpeech-1.1/wavs/LJ047-0138.wav|He checked in Oswald's old neighborhood and throughout the Dallas-Fort Worth area but was unable to locate Oswald.
LJSpeech-1.1/wavs/LJ003-0047.wav|Newgate continued to be a reproach to those responsible for its management.
LJSpeech-1.1/wavs/LJ018-0242.wav|The case was tried at Guildford Assizes, and caused intense excitement,
LJSpeech-1.1/wavs/LJ005-0067.wav|The Prison Society reproves the misdirected efforts of ambitious architects, who by a lavish and improvident expenditure of public money
LJSpeech-1.1/wavs/LJ009-0281.wav|Calcraft was forced to return, and he once more pushed Bousfield off,
LJSpeech-1.1/wavs/LJ018-0246.wav|At first the case was contested hotly, but, to the profound astonishment of every one inside and outside the court,
LJSpeech-1.1/wavs/LJ007-0082.wav|At the time the Lords' Committee sat there were eight thus retained in Newgate, and a return in the appendix of the Lords' report
LJSpeech-1.1/wavs/LJ037-0114.wav|He and Scoggins attempted to chase down the gunman in Scoggin's taxicab, but he had disappeared.
LJSpeech-1.1/wavs/LJ045-0115.wav|and he thought that he loses jobs because the FBI visits the place of his employment, end quote.
LJSpeech-1.1/wavs/LJ049-0030.wav|the Commission does believe that there are aspects of the protective measures employed in the motorcade at Dallas which deserve special comment.
LJSpeech-1.1/wavs/LJ011-0155.wav|He had eloped with his first wife from school.
LJSpeech-1.1/wavs/LJ034-0016.wav|Behind these boxes was another carton placed on the floor on which a man sitting
LJSpeech-1.1/wavs/LJ046-0047.wav|Exposure of the President to public view through travel among the people of this country is a great and historic tradition of American life.
LJSpeech-1.1/wavs/LJ022-0048.wav|They are still declining.
LJSpeech-1.1/wavs/LJ016-0427.wav|He tripped up the chapel-stairs to hear the condemned sermon, and came out with cheerful alacrity on the morning he was to die.
LJSpeech-1.1/wavs/LJ049-0134.wav|The Commission suggests that consideration might be given to assigning to a Cabinet-level committee or the National Security Council
LJSpeech-1.1/wavs/LJ006-0237.wav|"A lad named Matthew White has had a wound in his eye by a bone thrown at him, which very nearly destroyed vision."
LJSpeech-1.1/wavs/LJ046-0136.wav|Before the assassination of President Kennedy,
LJSpeech-1.1/wavs/LJ009-0246.wav|Good people, do not be hurried; I am not, I can wait.
LJSpeech-1.1/wavs/LJ050-0016.wav|This special assistant should be required to have sufficient stature and experience in law enforcement, intelligence, or allied fields
LJSpeech-1.1/wavs/LJ004-0033.wav|and they were not threatened with any particular penalties if they evaded or ignored the new acts.
LJSpeech-1.1/wavs/LJ009-0020.wav|previous to the execution of Henry Fauntleroy for uttering a forged security
LJSpeech-1.1/wavs/LJ041-0161.wav|It was just -- never getting back to looking at things from any other way once he had become a Marxist, whenever that was, end quote.
LJSpeech-1.1/wavs/LJ002-0108.wav|watch all night. Adjoining the felons' side lodge is the keeper's office, where the prison books are kept, and his clerk,
LJSpeech-1.1/wavs/LJ005-0198.wav|In another borough, with a population of ten thousand, the prison was of the same dimensions.
LJSpeech-1.1/wavs/LJ038-0226.wav|Oswald had apparently mistaken the county jail for the city jail.
LJSpeech-1.1/wavs/LJ041-0183.wav|Delgado and Oswald talked more about Cuba than Russia, and sometimes imagined themselves as leaders in the Cuban Army or Government,
LJSpeech-1.1/wavs/LJ031-0012.wav|The base station replied, quote, They have been notified, end quote,
LJSpeech-1.1/wavs/LJ022-0060.wav|We must begin now to make provision for the future.
LJSpeech-1.1/wavs/LJ047-0166.wav|Hosty assured her, through Mrs. Paine as interpreter, that the FBI would not harm or harass her.
LJSpeech-1.1/wavs/LJ029-0147.wav|Sorrels and Lawson reviewed the route in cooperation with Assistant Chief Bachelor and other Dallas police officials who took notes on the requirements
LJSpeech-1.1/wavs/LJ008-0274.wav|The result in the latter case was left in the first place to the king in council,
LJSpeech-1.1/wavs/LJ031-0098.wav|I suppose nobody really had the heart to do it.
LJSpeech-1.1/wavs/LJ044-0219.wav|you could tell what they wanted you to do by reading between the lines, reading the thing and doing a little reading between the lines, end quote.
LJSpeech-1.1/wavs/LJ002-0271.wav|The compters of Ludgate, Giltspur Street, and the Borough were discontinued as debtors' prisons (as was Newgate also)
LJSpeech-1.1/wavs/LJ025-0095.wav|once supposed to be exclusively confined to plants, are now known to be regular and normal products of animals.
LJSpeech-1.1/wavs/LJ018-0350.wav|He might swear he was not the murderer, that he never fired a pistol in his life,
LJSpeech-1.1/wavs/LJ022-0189.wav|They are a minimum of wise readjustments of our Federal Reserve System in the light of past experience and present needs.
LJSpeech-1.1/wavs/LJ016-0056.wav|running a great risk of discovery as he passed by a lot of workmen at Tyler's manufactory in Warwick Square, which had formerly been the College of Physicians.
LJSpeech-1.1/wavs/LJ049-0198.wav|in all its ramifications and alert to every danger that might befall it,
LJSpeech-1.1/wavs/LJ010-0211.wav|The society was supposed to meet regularly, and its proceedings, together with the speeches made, were duly recorded.
LJSpeech-1.1/wavs/LJ037-0170.wav|But he agreed that because the other three bullets were mutilated, he could not determine if they had been fired from the same weapon as the test bullets.
LJSpeech-1.1/wavs/LJ039-0072.wav|From my own experience in shooting over the years,
LJSpeech-1.1/wavs/LJ013-0162.wav|a bundle lying on the floor, as though thieves had been interrupted in the act.
LJSpeech-1.1/wavs/LJ042-0085.wav|He said that his Marine service in Okinawa and elsewhere had given him, quote, a chance to observe American imperialism, end quote.
LJSpeech-1.1/wavs/LJ044-0102.wav|samples of his photographic work, offering to contribute that sort of service without charge.
LJSpeech-1.1/wavs/LJ017-0223.wav|The mutiny was organized with great secrecy, and broke out most unexpectedly in the middle of the night.
LJSpeech-1.1/wavs/LJ032-0052.wav|According to Klein's shipping order form, one Italian carbine six point five x four x scope,
LJSpeech-1.1/wavs/LJ025-0046.wav|The relations of plants and animals to the atmosphere are therefore inverse.
LJSpeech-1.1/wavs/LJ030-0003.wav|Visits to Other Texas Cities
LJSpeech-1.1/wavs/LJ028-0389.wav|St. Jerome said:
LJSpeech-1.1/wavs/LJ007-0035.wav|The better-disposed complained bitterly of what they had to endure;
LJSpeech-1.1/wavs/LJ034-0178.wav|He said that he could see the man from the middle of his chest to the top of his head, and that as he was facing the window the man was in the lower right-hand portion of the window
LJSpeech-1.1/wavs/LJ047-0032.wav|In June nineteen sixty-two, the Bureau was advised by the Department of State of Oswald's plan to return to the United States.
LJSpeech-1.1/wavs/LJ047-0010.wav|the agent who was assigned his case at the time of the assassination, the Director of the FBI,
LJSpeech-1.1/wavs/LJ032-0200.wav|One Sunday, while his wife was hanging diapers, Oswald asked her to take a picture of him holding a rifle, a pistol
LJSpeech-1.1/wavs/LJ006-0163.wav|and that he was constantly engaged attending sessions and going with drafts to the hulks.
LJSpeech-1.1/wavs/LJ002-0127.wav|and the aggregate amount of debts sued for was eighty-one thousand, seven hundred ninety-one pounds.
LJSpeech-1.1/wavs/LJ003-0207.wav|For while Roman Catholics and Dissenters were encouraged to see ministers of their own persuasion,
LJSpeech-1.1/wavs/LJ050-0185.wav|The Service should consider preparing formal explanations of the cooperation anticipated during a Presidential visit to a city,
LJSpeech-1.1/wavs/LJ040-0130.wav|and interviewed and observed by other members of the Youth House staff.
LJSpeech-1.1/wavs/LJ014-0135.wav|It was against common sense to charge her with murdering the only friend she had in the world;
LJSpeech-1.1/wavs/LJ022-0038.wav|That makes it particularly necessary for the Vice- President and for me to conceive of our duty toward the entire country.
LJSpeech-1.1/wavs/LJ030-0087.wav|The Vice-Presidential automobile, a four-door Lincoln convertible obtained locally for use in the motorcade,
LJSpeech-1.1/wavs/LJ049-0068.wav|A doctor is in the motorcade.
LJSpeech-1.1/wavs/LJ022-0165.wav|both in its public relations and in its internal relations.
LJSpeech-1.1/wavs/LJ037-0120.wav|Guinyard said, quote, I told them that was him right there. I pointed him out right there, end quote.
LJSpeech-1.1/wavs/LJ035-0206.wav|An eyewitness to the shooting immediately provided a description of the man in the window which was similar to Oswald's actual appearance.
LJSpeech-1.1/wavs/LJ050-0157.wav|On the basis of such a feasibility study,
LJSpeech-1.1/wavs/LJ014-0295.wav|This man got up to look for him, and found him hanging from the bars of a neighboring room.
LJSpeech-1.1/wavs/LJ019-0102.wav|which was promptly carried, with the additional instruction to the committee to suggest any improvements.
LJSpeech-1.1/wavs/LJ030-0136.wav|end quote, the President replied, "That is very obvious."
LJSpeech-1.1/wavs/LJ010-0044.wav|The present Queen very soon after her accession
LJSpeech-1.1/wavs/LJ050-0065.wav|Alan H. Belmont, Assistant to the Director of the FBI, testified that this revision was initiated by the FBI itself.
LJSpeech-1.1/wavs/LJ009-0275.wav|who bad been further upset by a letter threatening to shoot him when he appeared to perform his task.
LJSpeech-1.1/wavs/LJ014-0064.wav|He lived at Mile End, whence he walked often to call at three, Minver Place, Bermondsey, the residence of his old love.
LJSpeech-1.1/wavs/LJ044-0061.wav|there also seems to be no basis for his claim that he had distributed, quote,
LJSpeech-1.1/wavs/LJ039-0138.wav|Lee Oswald exhibited an average amount of proficiency with that weapon.
LJSpeech-1.1/wavs/LJ018-0191.wav|Bob was therefore allowed to pass on.
LJSpeech-1.1/wavs/LJ050-0219.wav|However, the nineteen sixty-four to sixty-five budget request was submitted in November nineteen sixty-three
LJSpeech-1.1/wavs/LJ030-0075.wav|followed closely behind the President's automobile.
LJSpeech-1.1/wavs/LJ035-0167.wav|the earliest estimates would still have permitted Oswald to leave the building by twelve:thirty-three.
LJSpeech-1.1/wavs/LJ017-0168.wav|But a long public discussion followed, and in consequence he was reprieved.
LJSpeech-1.1/wavs/LJ016-0363.wav|This feeling was the stronger because
LJSpeech-1.1/wavs/LJ040-0042.wav|He defected to the Soviet Union, shot at General Walker, tried to go to Cuba and even contemplated hijacking an airplane to get there.
LJSpeech-1.1/wavs/LJ028-0046.wav|Another son, Assurbanipal, or the great Sardanapalus of the Greeks, became the King of Nineveh.
LJSpeech-1.1/wavs/LJ001-0098.wav|and the whole effect is a little too gray, owing to the thinness of the letters.
LJSpeech-1.1/wavs/LJ008-0004.wav|The reasons for this change were fully set forth in a previous chapter.
LJSpeech-1.1/wavs/LJ010-0109.wav|Thistlewood made a long and rambling defense, the chief features of which were abuse of Lord Sidmouth, and the vilification of the informer Edwards.
LJSpeech-1.1/wavs/LJ050-0038.wav|In attempting to identify those individuals who might prove a danger to the President,
LJSpeech-1.1/wavs/LJ005-0070.wav|These are principles fully recognized now-a-days, and it may fairly be conceded that the Prison Discipline Society's ideal
LJSpeech-1.1/wavs/LJ027-0022.wav|An examination of the facts in each of the lines of evidence makes it clear
LJSpeech-1.1/wavs/LJ018-0351.wav|and that, in spite of the verdict of the jury, "he left the dock with a calm and quiet conscience;"
LJSpeech-1.1/wavs/LJ021-0181.wav|thereby saving the British treasury one hundred and fifty million dollars a year in interest alone?
LJSpeech-1.1/wavs/LJ005-0163.wav|One great impediment to wide amelioration was that a vast number of small jails lay out of reach of the law.
LJSpeech-1.1/wavs/LJ019-0237.wav|The Act for private executions led to the erection of the gallows shed in the exercising yard, and at the flank of the passage from the condemned cells.
LJSpeech-1.1/wavs/LJ010-0149.wav|She was seized before she could do any mischief,
LJSpeech-1.1/wavs/LJ028-0499.wav|Ctesias mentions three hundred feet; probably they were not far from the truth.
LJSpeech-1.1/wavs/LJ038-0294.wav|He indicated that he wanted more people in the vicinity at the time of the attempt so that his arrival and departure would not attract great attention.
LJSpeech-1.1/wavs/LJ035-0017.wav|He heard two more shots spaced, quote, pretty well even to me.
LJSpeech-1.1/wavs/LJ016-0436.wav|pointing to an aperture for ventilating the cell. On the morning of execution he asked how far it was to the gallows, and was told it was quite close.
LJSpeech-1.1/wavs/LJ012-0228.wav|The evidence from first to last was circumstantial,
LJSpeech-1.1/wavs/LJ028-0179.wav|It is only from their ruins that we may hope to obtain accurate information of the strongest fortifications in the ancient world.
LJSpeech-1.1/wavs/LJ015-0165.wav|But the peer rushed forward and shook Redpath warmly by the hand.
LJSpeech-1.1/wavs/LJ036-0159.wav|On the other hand, Whaley identified Commission Exhibit Number one fifty (the shirt taken from Oswald upon arrest) as the shirt his passenger was wearing.
LJSpeech-1.1/wavs/LJ037-0127.wav|Four men -- Warren Reynolds, Harold Russell, Pat Patterson, and L. J. Lewis
LJSpeech-1.1/wavs/LJ009-0218.wav|But there was an attempt to revive it at that date, when the act for dispensing with the dissection of criminals was passed.
LJSpeech-1.1/wavs/LJ027-0142.wav|the antlers of an existing deer furnish in their development a kind of "resume," or recapitulation, of the successive phases
LJSpeech-1.1/wavs/LJ001-0122.wav|that he has a five, an eight, or a three before him, unless the press work is of the best:
LJSpeech-1.1/wavs/LJ032-0270.wav|Having reviewed the evidence that (one) Lee Harvey Oswald purchased the rifle used in the assassination,
LJSpeech-1.1/wavs/LJ016-0438.wav|that the convict's clothes were still the executioner's perquisite.
LJSpeech-1.1/wavs/LJ006-0061.wav|and "the greatest contempt shown for the law."
LJSpeech-1.1/wavs/LJ038-0275.wav|and concluded, quote, that there is a fair probability, end quote, that the bullet was fired from the rifle used in the assassination of President Kennedy.
LJSpeech-1.1/wavs/LJ022-0168.wav|who have little protection under the old laws against what used to be called frenzied finance.
LJSpeech-1.1/wavs/LJ037-0074.wav|and her positive identification of Oswald at a police lineup, the Commission considers her testimony reliable.
LJSpeech-1.1/wavs/LJ012-0276.wav|While she was senseless, but really still alive, he cut off her head, and dismembered the body in the manner already described.
LJSpeech-1.1/wavs/LJ047-0221.wav|or Vice President, to immediately notify the Secret Service and confirm it in writing.
LJSpeech-1.1/wavs/LJ021-0061.wav|The emergency purpose of the N.R.A. was to put men to work and since its creation more than four million persons have been reemployed,
LJSpeech-1.1/wavs/LJ023-0094.wav|and placed "an unwarranted limitation upon the commerce clause."
LJSpeech-1.1/wavs/LJ044-0236.wav|it is unlikely that a reasoning person would plan to attempt to travel from Dallas, Texas to Cuba
LJSpeech-1.1/wavs/LJ029-0066.wav|No one else was identified to the Secret Service through local inquiry as potentially dangerous,
LJSpeech-1.1/wavs/LJ039-0101.wav|He went through a series of exercises called dry firing where he assumed all positions which would later be used in the qualification course.
LJSpeech-1.1/wavs/LJ013-0008.wav|came to light in this same year, eighteen forty-one.
LJSpeech-1.1/wavs/LJ010-0181.wav|and the royal party drove back to Clarence House, the Queen being anxious to give the first news of the outrage and of her safety to her mother,
LJSpeech-1.1/wavs/LJ030-0042.wav|Manned by officers of the Dallas Police Department, this automobile preceded the main party by approximately quarter of a mile.
LJSpeech-1.1/wavs/LJ024-0141.wav|You who know me will accept my solemn assurance that in a world in which democracy is under attack,
LJSpeech-1.1/wavs/LJ012-0095.wav|The jewels had belonged to a Spanish countess recently deceased, who had sent them to England for greater security on the outbreak of the first Carlist war.
LJSpeech-1.1/wavs/LJ041-0181.wav|Delgado testified that Oswald was, quote, a complete believer that our way of government was not quite right, end quote.
LJSpeech-1.1/wavs/LJ023-0075.wav|that it was an extraordinary power to exercise and through Mr. Justice Washington laid down this limitation upon it:
LJSpeech-1.1/wavs/LJ018-0223.wav|In eighteen fifty-six the father died.
LJSpeech-1.1/wavs/LJ008-0292.wav|but it was seldom less than six weeks. It all depended upon the sovereign's disposition to do business.
LJSpeech-1.1/wavs/LJ015-0053.wav|Bates was at once captured in Norfolk Street, Strand.
LJSpeech-1.1/wavs/LJ030-0067.wav|on his right sat Kellerman.
LJSpeech-1.1/wavs/LJ027-0083.wav|For such a series of facts the reader must be referred to special books like Wiedersheim's "Comparative Anatomy of the Vertebrates,"
LJSpeech-1.1/wavs/LJ007-0165.wav|any awakening truth, salutary exhortation, or imperfect resolutions of amendment can take root or grow.
LJSpeech-1.1/wavs/LJ050-0255.wav|This no longer appears to be the case.
LJSpeech-1.1/wavs/LJ037-0004.wav|At least twelve persons saw the man with the revolver in the vicinity of the Tippit crime scene at or immediately after the shooting.
LJSpeech-1.1/wavs/LJ041-0170.wav|Thornley testified, quote, At which time he looked at me like a betrayed Caesar and screamed, screamed definitely, "Not you, too, Thornley!"
LJSpeech-1.1/wavs/LJ050-0050.wav|As a result of these studies, the planning document submitted by the Secretary of the Treasury to the Bureau of the Budget on August thirty-one,
LJSpeech-1.1/wavs/LJ030-0165.wav|which caused her to look to her right.
LJSpeech-1.1/wavs/LJ028-0224.wav|I quieted their sighings and soothed their sorrows.
LJSpeech-1.1/wavs/LJ001-0180.wav|the proportions of the page still give pleasure by the sense of richness that the cuts and letter together convey.
LJSpeech-1.1/wavs/LJ010-0277.wav|Lord Abinger sentenced him to eighteen months' imprisonment in Newgate,
LJSpeech-1.1/wavs/LJ027-0070.wav|So much concerns the general plan of skeletal structures and is strongly suggestive of -- in fact it is inexplicable without -- common origin.
LJSpeech-1.1/wavs/LJ046-0214.wav|members of his immediate family, the President-Elect, and the Vice-President is within the exclusive jurisdiction of the U.S. Secret Service.
LJSpeech-1.1/wavs/LJ003-0189.wav|The name still survived in the new press yard, which was the receptacle of the male condemned prisoners. It was generally crowded, like the rest of the prison.
LJSpeech-1.1/wavs/LJ015-0204.wav|This was the question which presented itself to the fertile brain of one Pierce,
LJSpeech-1.1/wavs/LJ008-0010.wav|at first, and for some few years after seventeen eighty-four, executions took place occasionally at a distance from Newgate.
LJSpeech-1.1/wavs/LJ011-0039.wav|but in a separate chamber, that belonging to one of the warders of the jail.
LJSpeech-1.1/wavs/LJ003-0128.wav|caught their flash terms and sung their songs, was admitted to their revels, and acquired, in place of habits of perfect sobriety,
LJSpeech-1.1/wavs/LJ026-0153.wav|the other part of respiration, elimination of carbon dioxide, has been treated under excretions.
LJSpeech-1.1/wavs/LJ032-0227.wav|Shaneyfelt testified that the published photographs appeared to be based on a copy of the original which the publications had each retouched differently.
LJSpeech-1.1/wavs/LJ007-0149.wav|the jail, and the scaffold.
LJSpeech-1.1/wavs/LJ006-0119.wav|When the wardsman was a man of some education, with some knowledge of legal chicanery gained by personal experience, he might add considerably to his emoluments
LJSpeech-1.1/wavs/LJ037-0264.wav|and seven eyewitnesses who saw the flight of the gunman with revolver in hand
LJSpeech-1.1/wavs/LJ024-0070.wav|I now propose that we establish by law an assurance against any such ill-balanced court in the future.
LJSpeech-1.1/wavs/LJ017-0243.wav|but Vartos, or Watto, the Turk, would not allow any but the eight mutineers to have anything.
LJSpeech-1.1/wavs/LJ038-0081.wav|Captain Fritz of the homicide and robbery bureau did most of the questioning, but he kept no notes and there were no stenographic or tape recordings.
LJSpeech-1.1/wavs/LJ002-0090.wav|More than half their quadrangle had been partitioned off for another purpose,
LJSpeech-1.1/wavs/LJ050-0221.wav|The Secret Service has now presented its recommendations to the Bureau of the Budget. The plan proposed by the Service
LJSpeech-1.1/wavs/LJ012-0265.wav|A woman named Gale, who lived with him, was arrested at the same time. The prisoners were examined at the Marylebone police court.
LJSpeech-1.1/wavs/LJ045-0147.wav|She asked Oswald, quote,
LJSpeech-1.1/wavs/LJ005-0153.wav|Sometimes this money might be expended in the purchase of extra articles of food.
LJSpeech-1.1/wavs/LJ021-0062.wav|in great part through the cooperation of American business brought about under the codes.
LJSpeech-1.1/wavs/LJ023-0077.wav|by which any law is passed,
LJSpeech-1.1/wavs/LJ030-0025.wav|when Air Force One touched down at Love Field at eleven:forty a.m., Eastern Standard Time.
LJSpeech-1.1/wavs/LJ032-0209.wav|He stated, however, that while he, quote, found no differences, end quote, between the rifles in the two photographs, he could not make a, quote, positive identification
LJSpeech-1.1/wavs/LJ004-0099.wav|Ventilators, hand and others, were to be supplied.
LJSpeech-1.1/wavs/LJ007-0034.wav|At times the numbers congregated together were very great; as many as fifty and sixty, even more, were crowded indiscriminately into the press-yard.
LJSpeech-1.1/wavs/LJ039-0157.wav|A series of tests were performed to determine whether the weapon and ammunition used in the assassination
LJSpeech-1.1/wavs/LJ042-0191.wav|While, quote, resourcefulness and patient working towards the aforesaid goals
LJSpeech-1.1/wavs/LJ016-0441.wav|The man, Bousfield, however, whose execution was so sadly bungled,
LJSpeech-1.1/wavs/LJ003-0306.wav|The clothes of prisoners arriving dirty, or in rags, should be fumigated before worn in the jail,
LJSpeech-1.1/wavs/LJ028-0351.wav|As for Zopyrus he was considered by Darius to have surpassed, in the greatness of his achievements, all other Persians,
LJSpeech-1.1/wavs/LJ022-0002.wav|April twenty-eight, nineteen thirty-five.
LJSpeech-1.1/wavs/LJ008-0149.wav|and after a long interval his dead body was discovered, shockingly disfigured, in a ditch. This was in eighteen oh two.
LJSpeech-1.1/wavs/LJ010-0212.wav|With Oxford's other papers were found letters from the secretary, written as it seemed by Oxford to himself, after the manner of Mr. Toots,
LJSpeech-1.1/wavs/LJ044-0074.wav|an agent of Bringuier's attempting to learn more about the true nature
LJSpeech-1.1/wavs/LJ016-0435.wav|"Two fresh men! May I speak to them? Yes! I must caution you," he went on to the warders, "not to go to sleep, or I shall be off through that little hole,"
LJSpeech-1.1/wavs/LJ003-0172.wav|The keeper went still further in his efforts to make money.
LJSpeech-1.1/wavs/LJ038-0202.wav|Prior to the Walker shooting on April ten, Oswald had been attending typing classes on Monday, Tuesday, and Thursday evenings.
LJSpeech-1.1/wavs/LJ007-0123.wav|and the inspectors expressed themselves still more strongly in reprehension of the practice.
LJSpeech-1.1/wavs/LJ029-0163.wav|and then west on Main, turning back to Elm at Houston and then out Stemmons Freeway to the Trade Mart.
LJSpeech-1.1/wavs/LJ045-0073.wav|Oswald once struck his wife because of a letter which she wrote to a former boyfriend in Russia.
LJSpeech-1.1/wavs/LJ009-0293.wav|While Calcraft was in office other aspirants to fame appeared in the field.
LJSpeech-1.1/wavs/LJ030-0247.wav|I was bent over under the weight of Agent Youngblood's body, toward Mrs. Johnson and Senator Yarborough, end quote,
LJSpeech-1.1/wavs/LJ012-0143.wav|"Money Moses" had received the stolen gold-dust from Moss' father-in-law, Davis, or Isaacs, who was never arrested,
LJSpeech-1.1/wavs/LJ029-0011.wav|President Kennedy's visit to Texas in November nineteen sixty-three had been under consideration for almost a year before it occurred.
LJSpeech-1.1/wavs/LJ036-0195.wav|He was described by Chief Curry as having the reputation of being "a very fine, dedicated officer."
LJSpeech-1.1/wavs/LJ011-0212.wav|then a maid and heir apparent unto her father, for the sake of the lucre of her substance; and for having afterwards unlawfully and against her will
LJSpeech-1.1/wavs/LJ044-0098.wav|He did not think Oswald looked like the, quote, type, end quote, that he would have expected to find associating with a group such as the Fair Play for Cuba Committee.
LJSpeech-1.1/wavs/LJ028-0159.wav|thence from the corners of the wall there is carried along each bank of the river a fence of burned bricks.
LJSpeech-1.1/wavs/LJ019-0312.wav|with the local jurisdictions, although still very leniently disposed.
LJSpeech-1.1/wavs/LJ015-0257.wav|Kay, who had been living with Agar at the time of the bullion robbery,
LJSpeech-1.1/wavs/LJ035-0023.wav|Baker testified that he entered the lobby of the building and, quote, spoke out and asked where the stairs or elevator was
LJSpeech-1.1/wavs/LJ028-0249.wav|continued dancing and reveling until they learned the capture but too certainly.
LJSpeech-1.1/wavs/LJ029-0094.wav|Lawson was not specifically instructed to select the parade route, but he understood that this was one of his functions.
LJSpeech-1.1/wavs/LJ025-0065.wav|Even as late as eighteen forty-five, however, a botanist of Schleiden's eminence dealt very skeptically with these statements,
LJSpeech-1.1/wavs/LJ002-0142.wav|These courts were open to many and grave objections.
LJSpeech-1.1/wavs/LJ011-0244.wav|through whose interest an appointment under Government was to be obtained for Mullay, would be present.
LJSpeech-1.1/wavs/LJ007-0043.wav|were associated continually with a number of those who could look with certainty on a mitigation of punishment.
LJSpeech-1.1/wavs/LJ009-0056.wav|The last of the four is said to have been a clergyman of the Church of England, condemned for forgery, "a miserable old man in a tattered suit of black.
LJSpeech-1.1/wavs/LJ039-0166.wav|which was in turn placed to the right of the closest silhouette.
LJSpeech-1.1/wavs/LJ014-0155.wav|Now she abused the jury, now called Manning a vagabond,
LJSpeech-1.1/wavs/LJ047-0186.wav|I can now afford to wait until New Orleans forwarded the necessary papers to me to show me I now had all the information.
LJSpeech-1.1/wavs/LJ043-0068.wav|While he might have expected difficulty from such an approach, in fact
LJSpeech-1.1/wavs/LJ033-0049.wav|According to Mrs. Paine, Oswald had gone to bed by nine p.m.;
LJSpeech-1.1/wavs/LJ009-0270.wav|A worse case still was that of William Bousfield, who, when awaiting execution for murder, about the same date,
LJSpeech-1.1/wavs/LJ022-0009.wav|The job of creating a program for the nation's welfare is, in some respects, like the building of a ship.
LJSpeech-1.1/wavs/LJ046-0102.wav|This section considers first the means used to locate potential sources of danger to the President in time to take appropriate precautions.
LJSpeech-1.1/wavs/LJ008-0302.wav|I have heard the protracted agony of both classes described by those who witnessed it in terms so strong, that I am unwilling to repeat them.
LJSpeech-1.1/wavs/LJ029-0021.wav|The three agreed that the President would come to Texas in late November nineteen sixty-three.
LJSpeech-1.1/wavs/LJ012-0178.wav|Thurtell drove him down in a gig, "to be killed as he traveled," in Thurtell's own words.
LJSpeech-1.1/wavs/LJ017-0122.wav|A government prosecution was instituted, and Palmer was brought to Newgate for trial at the Central Criminal Court.
LJSpeech-1.1/wavs/LJ016-0295.wav|curse, or shout, as in this heaving and struggling forward they gained or lost in their strong efforts to get nearer where Müller was to die.
LJSpeech-1.1/wavs/LJ034-0183.wav|Boxes and cases were stacked behind him.
LJSpeech-1.1/wavs/LJ043-0066.wav|Some of his acquaintances, feeling that Oswald tried to impress people with the fact that he had lived and worked in Russia, were led to the belief
LJSpeech-1.1/wavs/LJ009-0062.wav|buries his head under his body.
LJSpeech-1.1/wavs/LJ012-0177.wav|The victim was invited to visit Probert's cottage in the country near Elstree.
LJSpeech-1.1/wavs/LJ015-0275.wav|Burglars brought him the cheques they stole from houses, thieves what they got in pocketbooks.
LJSpeech-1.1/wavs/LJ045-0005.wav|especially an atmosphere of extreme opposition to President Kennedy that was present in some parts of the Dallas community
LJSpeech-1.1/wavs/LJ009-0289.wav|and before he took to hanging he was employed as a watchman at Reid's brewery in Liquorpond Street.
LJSpeech-1.1/wavs/LJ050-0071.wav|Both Director Hoover and Belmont expressed to the Commission the great concern of the FBI, which is shared by the Secret Service,
LJSpeech-1.1/wavs/LJ017-0264.wav|They were an abject, miserable crew, cowards at heart; but some, especially Lopez, continued bloodthirsty to the last.
LJSpeech-1.1/wavs/LJ047-0170.wav|On November five, Hosty was traveling near Mrs. Paine's home and took the occasion to stop by to ask whether she had any further information.
LJSpeech-1.1/wavs/LJ015-0132.wav|The total amount was never exactly made out, but the false stock created and issued by him was estimated at two hundred twenty thousand pounds.
LJSpeech-1.1/wavs/LJ025-0111.wav|But has the advance of biology simply tended to break down old distinctions without establishing new ones?
LJSpeech-1.1/wavs/LJ004-0061.wav|by want of bed-clothing by night or firing by day
LJSpeech-1.1/wavs/LJ026-0139.wav|It is probable that new leaves and new tissues generally are always formed in part from this reserve starch.
LJSpeech-1.1/wavs/LJ012-0128.wav|A young man named Caspar, clerk to a steam-ship company,
LJSpeech-1.1/wavs/LJ014-0054.wav|a maidservant, Sarah Thomas, murdered her mistress, an aged woman, by beating out her brains with a stone.
LJSpeech-1.1/wavs/LJ004-0199.wav|This made exertion compulsory, and imposed hard labor as a proper punishment.
LJSpeech-1.1/wavs/LJ012-0136.wav|Moss was known to be intimate with the elder Caspar,
LJSpeech-1.1/wavs/LJ049-0180.wav|The Commission has the impression
LJSpeech-1.1/wavs/LJ025-0093.wav|and that the latter is, chemically speaking, just as complicated as the former.
LJSpeech-1.1/wavs/LJ046-0076.wav|He cannot and will not take the precautions of a dictator or a sovereign.
LJSpeech-1.1/wavs/LJ049-0173.wav|As the testimony of J. Edgar Hoover and other Bureau officials revealed, the FBI did not believe that its directive required the Bureau
LJSpeech-1.1/wavs/LJ034-0177.wav|Fischer placed the man in the easternmost window on the south side of the Depository Building on either the fifth or the sixth floor.
LJSpeech-1.1/wavs/LJ050-0096.wav|I ask you to look into this case and take the necessary steps to repair the damage done to me and my family. End quote.
LJSpeech-1.1/wavs/LJ018-0322.wav|Nor were these two abortive efforts all that were planned.
LJSpeech-1.1/wavs/LJ040-0240.wav|and returned to New Orleans where Lee finished the ninth grade before he left school to work for a year.
LJSpeech-1.1/wavs/LJ018-0070.wav|Sattler was ironed for safe custody,
LJSpeech-1.1/wavs/LJ024-0113.wav|The other groups is composed of those who honestly believe the amendment process is the best
LJSpeech-1.1/wavs/LJ042-0105.wav|he took the position that the Communist Party officials in the Soviet Union were opportunists who were betraying their positions for personal gain.
LJSpeech-1.1/wavs/LJ032-0265.wav|Each man testified that he thought he could detect the outline of a rifle in the blanket, even though the blanket was empty.
LJSpeech-1.1/wavs/LJ003-0015.wav|For this and other acts of misconduct there was the discipline of the refractory ward, or "strong room" on the debtors' side.
LJSpeech-1.1/wavs/LJ005-0274.wav|The committee most of all insisted upon the entire individual separation of prisoners, except during the hours of labor,
LJSpeech-1.1/wavs/LJ041-0037.wav|and subsequently, after this conversation, my father came in and we were kind of arguing back and forth about the situation, and my father came in the room,
LJSpeech-1.1/wavs/LJ016-0421.wav|brutal creature who showed no remorse, but was subject to fits of ungovernable passion, when she broke out into language the most appalling.
LJSpeech-1.1/wavs/LJ009-0098.wav|There! there! I see the Lamb of God! Oh! how happy! Oh! this is happy!
LJSpeech-1.1/wavs/LJ015-0089.wav|in receiving guests at home, or could be spared from two rival establishments in other parts of the town.
LJSpeech-1.1/wavs/LJ008-0201.wav|in an adjoining house, that of an undertaker, was Lord Alfred Paget, also with several friends.
LJSpeech-1.1/wavs/LJ013-0255.wav|The victim was the first of these three.
LJSpeech-1.1/wavs/LJ015-0124.wav|but it was based upon more extended and audacious forgeries.
LJSpeech-1.1/wavs/LJ006-0235.wav|The surgeon's journal produced to the inspectors contained numerous entries of terrible wounds inflicted in a cowardly way.
LJSpeech-1.1/wavs/LJ031-0115.wav|From approximately four p.m. to four:fifty p.m. on November twenty-two,
LJSpeech-1.1/wavs/LJ050-0261.wav|from many Government agencies including the Department of Defense and the President's Office of Science and Technology.
LJSpeech-1.1/wavs/LJ006-0277.wav|But there were evils akin to those on the male side, prominent amongst which was the undue influence accorded to prisoners.
LJSpeech-1.1/wavs/LJ047-0090.wav|Our investigation of Oswald had disclosed no evidence that Oswald was acting under the instructions or on behalf of
LJSpeech-1.1/wavs/LJ005-0134.wav|in twenty-two county jails there were one thousand sixty-three sleeping cells in all (in eighteen twenty-three)
LJSpeech-1.1/wavs/LJ023-0140.wav|But all federal judges, once appointed,
LJSpeech-1.1/wavs/LJ019-0322.wav|On the other hand, new and careful regulations were framed to secure the moral and material well-being of the inmates of the jails.
LJSpeech-1.1/wavs/LJ046-0165.wav|Most of these cases involved persons who used threatening language in communications to or about the President.
LJSpeech-1.1/wavs/LJ015-0138.wav|His fault was generosity,
LJSpeech-1.1/wavs/LJ028-0411.wav|dressing the same, eating the same food as did their ancestors when Nebuchadnezzar built the walls of Babylon.
LJSpeech-1.1/wavs/LJ008-0187.wav|It was still greater at Fauntleroy's execution in eighteen twenty-four, when no less than one hundred thousand persons assembled, it was said.
LJSpeech-1.1/wavs/LJ045-0055.wav|He said this about members of the Russian-speaking group in the Dallas-Ft. Worth area, whom she said he tried to forbid her from seeing,
LJSpeech-1.1/wavs/LJ040-0071.wav|From the time Marguerite Oswald returned to work until December twenty-six, nineteen forty-two, when Lee too was sent to the orphans' home,
LJSpeech-1.1/wavs/LJ044-0040.wav|He wrote that, quote, thousands of circulars were distributed, end quote.
LJSpeech-1.1/wavs/LJ023-0001.wav|The Fireside Chats of Franklin Delano Roosevelt, by Franklin D Roosevelt, Section nine.
LJSpeech-1.1/wavs/LJ004-0021.wav|The neglect of prison reform in those days was not to be visited upon the legislature.
LJSpeech-1.1/wavs/LJ003-0169.wav|Prisoners who could afford it sometimes paid for four beds, at the rate of twenty-eight shillings, and so secured the luxury of a private room.
LJSpeech-1.1/wavs/LJ010-0139.wav|After the five bodies had hung for half-an-hour, a man in a mask came forward to complete the sentence.
LJSpeech-1.1/wavs/LJ039-0074.wav|you should not have any difficulty in hitting your target. I mean it requires no training at all to shoot a weapon with a telescopic sight
LJSpeech-1.1/wavs/LJ008-0239.wav|The loud shout of the multitude once more subsided, or only fell upon the abstracted ear like the dreamy murmur of an ocean shell.
LJSpeech-1.1/wavs/LJ033-0026.wav|There was little conversation between them on the way home.
LJSpeech-1.1/wavs/LJ008-0305.wav|they never ceased cursing until the passion of anger so excited was exchanged for joy in some and grief in others.
LJSpeech-1.1/wavs/LJ012-0114.wav|but their request, in spite of their earnest entreaties,
LJSpeech-1.1/wavs/LJ025-0092.wav|It is now established that nitrogen is as essential a constituent of vegetable as of animal living matter
LJSpeech-1.1/wavs/LJ016-0257.wav|and the raison d'être of the penalty, which in principle so many opposed, would be gone.
LJSpeech-1.1/wavs/LJ044-0133.wav|He had not found another job. His wife was expecting their second child in October and there was concern about the cost which would be involved.
LJSpeech-1.1/wavs/LJ025-0039.wav|he afterward affirms that nitrogen is peculiar to animals, and herein he places the third distinction between the animal and the plant.
LJSpeech-1.1/wavs/LJ043-0060.wav|from one of the elderly relatives with whom he spoke. Oswald's interest in such things presents a sharp contrast with his attitude
LJSpeech-1.1/wavs/LJ015-0040.wav|This borrowing continued, and on such a scale that their paper was soon at a discount,
LJSpeech-1.1/wavs/LJ003-0163.wav|the misdemeanant tried or untried, the debtor who wished to avoid the discomfort of the crowded debtors' side, the outspoken newspaper editor,
LJSpeech-1.1/wavs/LJ033-0091.wav|Frazier told the Commission, quote,
LJSpeech-1.1/wavs/LJ049-0187.wav|These proposals included suggestions to locate exclusive responsibility for all phases of the work
LJSpeech-1.1/wavs/LJ028-0442.wav|At a distance of about two miles to the south of Babel is the larger and lower mound called the Kasr, or the Fortress,
LJSpeech-1.1/wavs/LJ006-0134.wav|The governor himself admitted that a prisoner of weak intellect who had been severely beaten and much injured by a wardsman did not dare complain
LJSpeech-1.1/wavs/LJ023-0127.wav|When I commenced to review the situation with the problem squarely before me,
LJSpeech-1.1/wavs/LJ037-0117.wav|Both men picked Oswald as the man who had run south on Patton with a gun in his hand.
LJSpeech-1.1/wavs/LJ039-0180.wav|As has been shown in chapter three, if the three shots were fired within a period of from four point eight to five point six seconds,
LJSpeech-1.1/wavs/LJ043-0038.wav|nor his mother were objects of his affection, quote, but only examples of workers in the U.S., end quote.
LJSpeech-1.1/wavs/LJ024-0059.wav|It is the clear intention of our public policy to provide for a constant flow of new and younger blood into the judiciary.
LJSpeech-1.1/wavs/LJ009-0163.wav|In the same year capital punishment was further restricted, and ceased to be the legal sentence for coining,
LJSpeech-1.1/wavs/LJ019-0007.wav|By other acts local authorities were empowered to construct new jails or hire accommodation in the district;
LJSpeech-1.1/wavs/LJ018-0317.wav|and one warder admitted that he was to have one thousand pounds more paid to him, and to be provided with a passage to Australia.
LJSpeech-1.1/wavs/LJ040-0169.wav|She observed that since Lee's mother worked all day, he made his own meals and spent all his time alone
LJSpeech-1.1/wavs/LJ037-0259.wav|that the man who killed Tippit was wearing a light-colored jacket,
LJSpeech-1.1/wavs/LJ048-0101.wav|The Commission concludes that the most significant advance arrangements for the President's trip were soundly planned.
LJSpeech-1.1/wavs/LJ010-0312.wav|Among the prisoner's private papers, one was found giving full details of the stock he had feloniously sold out,
LJSpeech-1.1/wavs/LJ009-0215.wav|All the shops in the neighborhood were shut, and the windows and tops of the houses were crowded with spectators.
LJSpeech-1.1/wavs/LJ028-0130.wav|And after he had walled the city, and adorned its gates, he built another palace before his father's palace; but so that they joined to it:
LJSpeech-1.1/wavs/LJ026-0040.wav|And in this connection the fact that some bacteria -- the simplest organisms known and devoid of chlorophyll
LJSpeech-1.1/wavs/LJ016-0328.wav|Nevertheless, in the very next session
LJSpeech-1.1/wavs/LJ004-0069.wav|In short, attention to his feelings, mental and bodily, a supply of every necessary, abstraction from evil society,
LJSpeech-1.1/wavs/LJ016-0235.wav|He was so much in favor of short drops that his immediate successor, Marwood, stigmatized him as "short-drop" man.
LJSpeech-1.1/wavs/LJ042-0229.wav|To the question of, quote, Are you a Communist? End quote, he first answered "Yes,"
LJSpeech-1.1/wavs/LJ008-0217.wav|a throng of people whom neither rain, snow, storm, nor darkness ever hindered from attending the show.
LJSpeech-1.1/wavs/LJ032-0194.wav|the Commission was unable to reach any firm conclusion as to when the fibers were caught in the rifle.
LJSpeech-1.1/wavs/LJ021-0187.wav|It is perhaps not strange that the conservative British press has told us with pardonable irony
LJSpeech-1.1/wavs/LJ047-0236.wav|was capable or potentially an assassin of the President of the United States, end quote.
LJSpeech-1.1/wavs/LJ039-0161.wav|and the Infantry Weapons Evaluation Branch of the U.S. Army. There were no misfires.
LJSpeech-1.1/wavs/LJ006-0282.wav|the pump was the only provision, and this in a place within sight of visitors, of the windows of the male turnkeys, and unprotected from the weather.
LJSpeech-1.1/wavs/LJ010-0036.wav|Hocker was found upon the scene of his crime, irresistibly attracted thither, as was Theodore Gardelle.
LJSpeech-1.1/wavs/LJ008-0273.wav|At the Old Bailey almost every one capitally convicted by a jury was sentenced to be hanged.
LJSpeech-1.1/wavs/LJ006-0181.wav|It was the same old story -- evil constantly in the ascendant, the least criminal at the mercy of the most depraved.
LJSpeech-1.1/wavs/LJ018-0289.wav|When all was ready, Bidwell first "refreshed his credit" at the Bank of England, as well as disarmed suspicion,
LJSpeech-1.1/wavs/LJ018-0087.wav|Several cases of gigantic fraud, rivaling any already recorded, were brought to light between eighteen fifty-six and eighteen seventy-three.
LJSpeech-1.1/wavs/LJ016-0098.wav|or any of the new establishments at home, at Portland, Dartmoor, or elsewhere.
LJSpeech-1.1/wavs/LJ015-0240.wav|Agar at once got to work on the first safe.
LJSpeech-1.1/wavs/LJ041-0182.wav|and believed that our Government did not have, quote, too much to offer, end quote, but was not in favor of, quote, the Communist way of life, end quote.
LJSpeech-1.1/wavs/LJ012-0288.wav|and was seen to be busily engaged in washing down the house with bucket and mop.
LJSpeech-1.1/wavs/LJ025-0015.wav|in which the question is treated with that comprehensiveness of knowledge and clear critical judgment which characterize his writings
LJSpeech-1.1/wavs/LJ038-0060.wav|John Gibson, another patron in the theatre, saw an officer grab Oswald, and he claims that he heard the click of a gun misfiring.
LJSpeech-1.1/wavs/LJ007-0207.wav|but this had the effect of throwing them into closer contact, and of making them more intimately acquainted with, more directly influential upon, one another.
LJSpeech-1.1/wavs/LJ003-0025.wav|In the latter duties he was, however, supervised by three auditors, freely chosen by the prisoners among themselves.
LJSpeech-1.1/wavs/LJ017-0213.wav|the first and second mates, Karswell and Taffir; there were two other Englishmen on board, and the rest of the crew were a polyglot lot,
LJSpeech-1.1/wavs/LJ009-0064.wav|the lately smirking footmen close their eyes and forget their liveries, the ordinary clasps his hands, the turnkeys cry 'Hush!'
LJSpeech-1.1/wavs/LJ019-0238.wav|The first "glass house," or room in which prisoners could talk in private with their attorneys, but yet be seen by the warder on the watch, had been constructed
LJSpeech-1.1/wavs/LJ009-0165.wav|House-breaking, as distinguished from burglary, was similarly exempted in the following year;
LJSpeech-1.1/wavs/LJ032-0145.wav|also bore impressions of the same irregularities that appeared on the barrel of the rifle.
LJSpeech-1.1/wavs/LJ006-0136.wav|These wardsmen, besides thus ruling the roast, had numerous special privileges, if such they can be called.
LJSpeech-1.1/wavs/LJ015-0129.wav|by inserting say one before five hundred, and thus making it
LJSpeech-1.1/wavs/LJ031-0191.wav|Nine minutes later, the Presidential airplane departed for Washington, D.C.
LJSpeech-1.1/wavs/LJ031-0117.wav|assisted by Drs. William Osborne and John Parker.
LJSpeech-1.1/wavs/LJ010-0250.wav|of the criminal intent to kill.
LJSpeech-1.1/wavs/LJ026-0103.wav|Just as the solid food of animals must be digested in preparation for absorption,
LJSpeech-1.1/wavs/LJ002-0234.wav|In the yard behind the prison
LJSpeech-1.1/wavs/LJ030-0086.wav|Vice-Presidential car.
LJSpeech-1.1/wavs/LJ006-0179.wav|But, indeed, his whole rule was far too mild, and under this mistaken leniency
LJSpeech-1.1/wavs/LJ042-0184.wav|Expanding on his ideas on how his alternative to communism and capitalism might be introduced, he wrote of a, quote, readily foreseeable
LJSpeech-1.1/wavs/LJ034-0217.wav|One of these employees was alleged to resemble Lee Harvey Oswald.
LJSpeech-1.1/wavs/LJ032-0220.wav|Since Exhibit Number one thirty-three B was taken with Oswald's camera,
LJSpeech-1.1/wavs/LJ039-0151.wav|Accuracy of Weapon
LJSpeech-1.1/wavs/LJ028-0079.wav|The museum authorities believed that the cameo was one of the many spurious objects which the Eastern forgers were constantly sending to Europe,
LJSpeech-1.1/wavs/LJ013-0108.wav|Next day a person betrayed him for the reward, and he was soon captured.
LJSpeech-1.1/wavs/LJ038-0102.wav|Oswald also denied owning a rifle and said that since leaving the Marine Corps he had fired only a small bore twenty-two rifle.
LJSpeech-1.1/wavs/LJ044-0007.wav|that he was involved in any conspiracy, his political activities do provide insight into certain aspects of Oswald's character
LJSpeech-1.1/wavs/LJ008-0316.wav|"A. B., your case has been taken into consideration by the king in council, and His Majesty has been mercifully pleased to spare your life."
LJSpeech-1.1/wavs/LJ019-0259.wav|while the former was intended for the congregate labor of a number, and the latter, as its name implies, imposed continuous solitary toil.
LJSpeech-1.1/wavs/LJ014-0090.wav|On the Sunday Mrs. Manning roasted a goose at this same kitchen fire, and ate it with relish in the afternoon.
LJSpeech-1.1/wavs/LJ029-0123.wav|was not used for the main portion of the downtown part of the motorcade because Main Street offered better vantage points for spectators.
LJSpeech-1.1/wavs/LJ016-0204.wav|For acting as executioner of Horsemonger Lane Jail
LJSpeech-1.1/wavs/LJ042-0224.wav|For example, in response to his questions about his decision to go to the Soviet Union, his first draft answered, quote,
LJSpeech-1.1/wavs/LJ001-0085.wav|It was reserved for the founders of the later eighteenth century to produce letters which are positively ugly, and which, it may be added,
LJSpeech-1.1/wavs/LJ040-0127.wav|or for detention pending court appearance or commitment to a child-caring or custodial institution such as a training school.
LJSpeech-1.1/wavs/LJ013-0190.wav|and at last his lordship's watch was found secreted under the leads of the sink.
LJSpeech-1.1/wavs/LJ012-0120.wav|one of them of considerable size.
LJSpeech-1.1/wavs/LJ021-0168.wav|I do not want to think that it is the destiny of any American to remain permanently on relief rolls.
LJSpeech-1.1/wavs/LJ023-0082.wav|In the last four years the sound rule of giving statutes the benefit of all reasonable doubt has been cast aside.
LJSpeech-1.1/wavs/LJ030-0116.wav|looking out toward the crowd, and Special Agent Kellerman assumed his position next to the car.
LJSpeech-1.1/wavs/LJ038-0176.wav|The Commission evaluated the following evidence in considering whether Lee Harvey Oswald fired the shot which almost killed General Walker:
LJSpeech-1.1/wavs/LJ042-0193.wav|But these preferred tactics now may prove to be too limited in the near future,
LJSpeech-1.1/wavs/LJ048-0044.wav|the knowledge of his defection, his arrogance and hostility to the United States,
LJSpeech-1.1/wavs/LJ043-0177.wav|and his Communist and Socialist Worker's Party newspapers would probably have appeared on the front pages of newspapers or magazines all over the country,
LJSpeech-1.1/wavs/LJ028-0503.wav|and decorated here and there with large reliefs representing bulls and lions and dragons,
LJSpeech-1.1/wavs/LJ031-0017.wav|since the riders in his car, quote, were not exactly aware what had happened, end quote, and the car went on to the Trade Mart first.
LJSpeech-1.1/wavs/LJ015-0062.wav|but in neither did the sums misappropriated reach quite the same high figure.
LJSpeech-1.1/wavs/LJ016-0087.wav|Among the escapes still remembered was one in eighteen forty-nine, accomplished by a man who had been employed
LJSpeech-1.1/wavs/LJ041-0101.wav|aggressive and even somewhat pugnacious, although Powers, quote, wouldn't say that this guy is a troublemaker, end quote.
LJSpeech-1.1/wavs/LJ014-0340.wav|They could not deny that the latter was the truth, and were forthwith stigmatized by Mr. Chapman, Overend and Gurney's representative, as rogues.
LJSpeech-1.1/wavs/LJ048-0002.wav|Chapter eight. The Protection of the President. Part three.
LJSpeech-1.1/wavs/LJ004-0211.wav|The moral welfare of the inmates was as closely looked after as the physical.
LJSpeech-1.1/wavs/LJ050-0230.wav|Manpower and Technical Assistance From Other Agencies
LJSpeech-1.1/wavs/LJ040-0209.wav|well I've got to live with her. I guess I love her, end quote.
LJSpeech-1.1/wavs/LJ012-0272.wav|They were apparently good friends when last seen together at a neighbor's, where they seemed "perfectly happy and sociable, and eager for the wedding day."
LJSpeech-1.1/wavs/LJ014-0167.wav|When I came upon the scene at midnight, the shrillness of the cries and howls that were raised from time to time,
LJSpeech-1.1/wavs/LJ038-0027.wav|Detective Paul L. Bentley rushed to the balcony and told the projectionist to turn up the house lights.
LJSpeech-1.1/wavs/LJ043-0049.wav|After she had bought some clothes for Marina Oswald and a highchair for the baby, Oswald emphatically told her to stop.
LJSpeech-1.1/wavs/LJ040-0171.wav|where he did as he wanted and he didn't have to live by any rules or come into contact with people, end quote.
LJSpeech-1.1/wavs/LJ019-0363.wav|he could close the "inadequate" prison, by declaring it unfit for the reception of prisoners.
LJSpeech-1.1/wavs/LJ016-0080.wav|and probably one of the few cases of a recurrence, but under proper safeguards and limitations, to the old system of chains.
LJSpeech-1.1/wavs/LJ036-0060.wav|south on Houston, and southwest across the Houston viaduct to service the Oak Cliff area along Marsalis.
LJSpeech-1.1/wavs/LJ010-0170.wav|His acquaintances often asked his object in this, but he kept his own counsel till the tenth June.
LJSpeech-1.1/wavs/LJ026-0093.wav|The plant absorbs also a small amount of kinetic energy, independently of the sunlight, in the form of heat.
LJSpeech-1.1/wavs/LJ046-0074.wav|If the sole goal were to protect the life of the President, it could be accomplished with reasonable assurance despite the multiple roles he must play.
LJSpeech-1.1/wavs/LJ020-0086.wav|Hot rolls and muffins should never be cut.
LJSpeech-1.1/wavs/LJ021-0017.wav|Instead of the give and take of free individual contract,
LJSpeech-1.1/wavs/LJ029-0185.wav|by Vice President Johnson during the nineteen sixty campaign.
LJSpeech-1.1/wavs/LJ023-0066.wav|and provide for the common defense and general welfare of the United States.
LJSpeech-1.1/wavs/LJ040-0236.wav|and when his mother failed to cooperate in any way with school authorities,
LJSpeech-1.1/wavs/LJ002-0028.wav|In order to realize the evils entailed by incarceration in Newgate in these days, it is necessary to give some account of its interior
LJSpeech-1.1/wavs/LJ048-0278.wav|The Commission recognizes that the responsibilities of members of the White House detail of the Secret Service are arduous.
LJSpeech-1.1/wavs/LJ008-0125.wav|The sheriff arrived, attended by his officers, to receive the prisoner from the keeper.
LJSpeech-1.1/wavs/LJ005-0207.wav|Kidderminster had a prison, one damp chill room,
LJSpeech-1.1/wavs/LJ016-0432.wav|The only subject another showed any interest in was the theatres and the new pieces that were being produced. A third, Christian Satler,
LJSpeech-1.1/wavs/LJ044-0118.wav|He anticipated that the full disclosure of his defection would hinder him in, quote, the struggle for progress and freedom in the United States, end quote.
LJSpeech-1.1/wavs/LJ015-0183.wav|His manner was generally self-possessed, but his face was marked with "uneasy earnestness,"
LJSpeech-1.1/wavs/LJ017-0026.wav|Yet it was clearly proved that the dumplings contained arsenic, that she, and she alone, had made the dough,
LJSpeech-1.1/wavs/LJ048-0245.wav|but that their visits to the Cellar were, quote, neither consistent nor inconsistent, end quote, with their duty.
LJSpeech-1.1/wavs/LJ006-0109.wav|a perpetuation under another form of the old detestable custom of garnish.
LJSpeech-1.1/wavs/LJ045-0125.wav|For example, in his letter of November nine, nineteen sixty-three,
LJSpeech-1.1/wavs/LJ047-0087.wav|According to the Bureau, quote,
LJSpeech-1.1/wavs/LJ018-0377.wav|Conviction was obtained through the evidence of the steward and two of the least culpable of the crew.
LJSpeech-1.1/wavs/LJ013-0027.wav|He crept along the coast close in shore, looking for a quiet spot to cast away the ship,
LJSpeech-1.1/wavs/LJ035-0001.wav|Report of the President's Commission on the Assassination of President Kennedy. The Warren Commission Report. By The President's Commission on the Assassination of President Kennedy.
LJSpeech-1.1/wavs/LJ031-0182.wav|Concerned that the local officials might try to prevent the plane's departure, O'Donnell asked that the pilot take off immediately.
LJSpeech-1.1/wavs/LJ033-0184.wav|had different characteristics from both the actual bag and the sample taken on November twenty-two.
LJSpeech-1.1/wavs/LJ018-0235.wav|He embarked forthwith in a career of the wildest extravagance, and ere long he had parted in his mother's name with most of the landed estates.
LJSpeech-1.1/wavs/LJ012-0204.wav|They presented themselves about noon one day at the dissecting room of King's College Hospital, accompanied by a third man,
LJSpeech-1.1/wavs/LJ032-0001.wav|Report of the President's Commission on the Assassination of President Kennedy. The Warren Commission Report. By The President's Commission on the Assassination of President Kennedy.
LJSpeech-1.1/wavs/LJ048-0147.wav|The Commission believes
LJSpeech-1.1/wavs/LJ013-0189.wav|next, and in the same place, a chased gold key;
LJSpeech-1.1/wavs/LJ020-0036.wav|Take care that it does not burn in baking. The molasses renders it liable to scorching.
LJSpeech-1.1/wavs/LJ010-0029.wav|the cold-blooded, calculating atrocity born of self-interest, were still the irresistible incentives to kill.
LJSpeech-1.1/wavs/LJ002-0156.wav|Thomas Dobson, on twenty-second August, seventeen ninety-nine, for one shilling, with costs of eight shillings, ten pence.
LJSpeech-1.1/wavs/LJ021-0064.wav|not only to labor in the form of new jobs, in relief from overwork and in relief from underpay,
LJSpeech-1.1/wavs/LJ050-0151.wav|it makes no use of the recent developments in automatic data processing which are widely used in the business world and in other Government offices.
LJSpeech-1.1/wavs/LJ046-0099.wav|thoroughly professional personnel, using the best technical equipment that can be devised.
LJSpeech-1.1/wavs/LJ030-0201.wav|According to Governor and Mrs. Connally, it was after this shot that Kellerman issued his emergency instructions and the car accelerated.
LJSpeech-1.1/wavs/LJ016-0126.wav|The top of the wall was gained without difficulty.
LJSpeech-1.1/wavs/LJ032-0078.wav|a Selective Service registration certificate, and a certificate of service in the U.S. Marine Corps, all three cards being in his own name.
LJSpeech-1.1/wavs/LJ005-0179.wav|he total number of prisoners they received during the year varied from two persons to many hundreds.
LJSpeech-1.1/wavs/LJ006-0053.wav|and the whole by proper management might have been so accommodated as to prevent overcrowding.
LJSpeech-1.1/wavs/LJ019-0385.wav|Mr. (now Sir Richard) Cross, having applied himself vigorously to the task of reorganizing the whole system, became convinced
LJSpeech-1.1/wavs/LJ037-0062.wav|The Commission reviewed the transcript of a phone conversation in which Mrs. Markham is alleged to have provided such a description.
LJSpeech-1.1/wavs/LJ016-0429.wav|Others talk freely enough on various topics, but principally upon their own cases.
LJSpeech-1.1/wavs/LJ047-0187.wav|It was then my plan to interview Marina Oswald in detail concerning both herself and her husband's background. Question:
LJSpeech-1.1/wavs/LJ033-0163.wav|James C. Cadigan, a questioned-documents expert with the Bureau, compared the samples with the paper and tape in the actual bag.
LJSpeech-1.1/wavs/LJ026-0009.wav|The fact is that they are on the border line, are neither plants nor animals but simply organisms.
LJSpeech-1.1/wavs/LJ033-0092.wav|the main reason he was going over there that Thursday afternoon when he was to bring back some curtain rods, so I didn't think any more about it when he told me that, end quote,
LJSpeech-1.1/wavs/LJ001-0059.wav|the greater part of these Italian printers, it should be mentioned, were Germans or Frenchmen, working under the influence of Italian opinion and aims.
LJSpeech-1.1/wavs/LJ025-0171.wav|setting free the oxygen and laying hold of the carbon which it contains.
LJSpeech-1.1/wavs/LJ044-0068.wav|as a result of which he was, quote, flooded with callers and invitations to debates, etc. as well as people interested in joining the F.P.C.C.
LJSpeech-1.1/wavs/LJ040-0054.wav|It was a factor which contributed to his character and thereby might have influenced his decision to assassinate President Kennedy.
LJSpeech-1.1/wavs/LJ025-0030.wav|As the animal body required to be independent of heat and of the atmosphere,
LJSpeech-1.1/wavs/LJ019-0071.wav|moreover, it was nearly impossible to prevent communication and mutual contamination.
LJSpeech-1.1/wavs/LJ023-0084.wav|When the Congress has sought to stabilize national agriculture, to improve the conditions of labor,
LJSpeech-1.1/wavs/LJ040-0115.wav|Pic and his wife would have been happy to have kept Lee, however,
LJSpeech-1.1/wavs/LJ011-0162.wav|and it stated that Mrs. Turner had been stricken with paralysis.
LJSpeech-1.1/wavs/LJ004-0106.wav|They were to report in writing to quarter sessions as to the state of the jail, and as to all abuses which they might observe therein.
LJSpeech-1.1/wavs/LJ015-0095.wav|The blow fell suddenly, and when least expected. One morning Mr. Fasson asked casually for certain certificates,
LJSpeech-1.1/wavs/LJ005-0251.wav|Again in eighteen thirty-five prisons and their inmates became once more the care of the senate, and the subject was taken up this time by the House of Lords.
LJSpeech-1.1/wavs/LJ006-0005.wav|just before the public mind was first awakened to the need for thorough reform.
LJSpeech-1.1/wavs/LJ005-0034.wav|Undeterred by these sarcasms and misrepresentations,
LJSpeech-1.1/wavs/LJ004-0040.wav|They felt that private enterprise might
LJSpeech-1.1/wavs/LJ034-0018.wav|Next to these cartons was the handmade paper bag, previously discussed,
LJSpeech-1.1/wavs/LJ043-0024.wav|Even though it appears that they may have left Oswald a few days before, it seems that he resisted the move as best he could.
LJSpeech-1.1/wavs/LJ007-0048.wav|so subversive of meditation, so disturbing to the thoughts;
LJSpeech-1.1/wavs/LJ001-0143.wav|For where these are boldly and carefully designed, and each letter is thoroughly individual in form,
LJSpeech-1.1/wavs/LJ003-0132.wav|whose language and manners, whose female associates of the most abandoned description, and the scenes consequent with such lost wretches
LJSpeech-1.1/wavs/LJ021-0055.wav|In meeting the problems of industrial recovery the chief agency of the government has been the National Recovery Administration.
LJSpeech-1.1/wavs/LJ012-0162.wav|Having brought down the records of great frauds, forgeries, and thefts from about eighteen twenty-five to eighteen forty,
LJSpeech-1.1/wavs/LJ005-0043.wav|and obtained full details, from places where they had been adopted, of the nature of these new machines
LJSpeech-1.1/wavs/LJ050-0136.wav|the Committee will include representatives of the President's Office of Science and Technology, Department of Defense, CIA,
LJSpeech-1.1/wavs/LJ037-0107.wav|They saw a man coming south on Patton with a revolver held high in his right hand. According to Callaway, the man crossed to the west side of Patton.
LJSpeech-1.1/wavs/LJ012-0187.wav|one of his accomplices, who took the police to the pond, where the remains of the unfortunate Mr. Weare were discovered, sunk in a sack weighted by stones.
LJSpeech-1.1/wavs/LJ046-0018.wav|to which the events of last November called attention.
LJSpeech-1.1/wavs/LJ024-0104.wav|I am therefore, going to spend my time, my efforts and my money
LJSpeech-1.1/wavs/LJ014-0195.wav|two of them supported Cope, who was still alive, although insensible, and Marley was apprehended. The evidence against him was completed
LJSpeech-1.1/wavs/LJ042-0081.wav|is made only after the longest and most serious considerations. I affirm that my allegiance is to the Union of Soviet Socialist Republics.
LJSpeech-1.1/wavs/LJ028-0280.wav|Calling to mind then the words of the Babylonian at the beginning of the siege:
LJSpeech-1.1/wavs/LJ016-0164.wav|When discovered next morning, quite dead, it was found that the strap actually did not touch his throat;
LJSpeech-1.1/wavs/LJ016-0095.wav|In eighteen fifty-three three men escaped in company from one of the wards in the middle yard.
LJSpeech-1.1/wavs/LJ050-0175.wav|Since the assassination, Secret Service procedures have been changed to require that a member of PRS accompany each advance survey team
LJSpeech-1.1/wavs/LJ041-0180.wav|never heard him in any way, shape or form confess that he was a Communist, or that he ever thought about being a Communist, end quote.
LJSpeech-1.1/wavs/LJ002-0213.wav|The Fleet, which stood in Farringdon Street,
LJSpeech-1.1/wavs/LJ012-0060.wav|to let the coach change and pass Petticoat Lane en route to the jail, where the suffering woman might be handed over to her friends.
LJSpeech-1.1/wavs/LJ034-0126.wav|shows three employees looking out of the fifth-floor window directly below the window from which the shots were fired.
LJSpeech-1.1/wavs/LJ015-0002.wav|The course of the swindlers was by no means smooth, but it was not till eighteen fifty-four that suspicion arose that anything was wrong.
LJSpeech-1.1/wavs/LJ017-0162.wav|Whatever the exact cause which impelled him to crime, it seems certain that he began to give her some poison,
LJSpeech-1.1/wavs/LJ010-0216.wav|and it is said that he is a military officer, but his name has not yet transpired.
LJSpeech-1.1/wavs/LJ039-0069.wav|Maj. Eugene D. Anderson, assistant head of the Marksmanship Branch of U.S. Marine Corps
LJSpeech-1.1/wavs/LJ019-0359.wav|He could in the first place withhold the government grant in aid of prison funds by refusing the certificate to the Treasury upon which the allowance was paid.
LJSpeech-1.1/wavs/LJ005-0292.wav|In many of the smaller boroughs they are totally unfit for the confinement of human beings.
LJSpeech-1.1/wavs/LJ035-0139.wav|They reentered the building by the rear door several minutes after Baker and Truly rushed through the front entrance.
LJSpeech-1.1/wavs/LJ013-0220.wav|that Courvoisier was idle, discontented, ready to take offense, greedy of gain;
LJSpeech-1.1/wavs/LJ048-0213.wav|watching his car, watching the sides, watching the crowds, giving advice or asking advice from the Chief
LJSpeech-1.1/wavs/LJ030-0193.wav|Kellerman saw Governor Connally in his wife's lap and Special Agent Clinton J. Hill lying across the trunk of the car.
LJSpeech-1.1/wavs/LJ018-0018.wav|he wore gold-rimmed eye-glasses and a gold watch and chain.
LJSpeech-1.1/wavs/LJ047-0182.wav|was quite interested in determining the nature of his contact with the Soviet Embassy in Mexico City, end quote.
LJSpeech-1.1/wavs/LJ041-0008.wav|He took walks and visited museums, and sometimes rode a rented bicycle in the park on Saturday mornings.
LJSpeech-1.1/wavs/LJ028-0169.wav|a square enclosure two furlongs each way, with gates of solid brass; which was also remaining in my time.
LJSpeech-1.1/wavs/LJ038-0216.wav|The references to house rent and payments for water and gas
LJSpeech-1.1/wavs/LJ006-0306.wav|Some of the prisoners had their valets, and all these were constantly in and out of the kitchen where this female prisoner was employed.
LJSpeech-1.1/wavs/LJ050-0028.wav|no sizable organization can achieve efficiency without the careful analysis and demarcation of responsibility
LJSpeech-1.1/wavs/LJ038-0095.wav|by the Commission, they had probative value in deciding the weight to be given to his denials that he assassinated President Kennedy
LJSpeech-1.1/wavs/LJ039-0213.wav|So that if you aimed with this weapon as it actually was received at the laboratory, it would not be necessary to take any lead whatsoever
LJSpeech-1.1/wavs/LJ004-0079.wav|"All measures and practices in prison which may injure him in any way are illegal,
LJSpeech-1.1/wavs/LJ015-0312.wav|The proceeds of these forgeries amounted, it was said, to some thousands per annum.
LJSpeech-1.1/wavs/LJ028-0287.wav|but found none by which he could hope to prevail, unless he maimed himself and then went over to the enemy.
LJSpeech-1.1/wavs/LJ034-0147.wav|Brennan stated that he was sure that the person firing the rifle was Oswald.
LJSpeech-1.1/wavs/LJ023-0128.wav|I came by a process of elimination to the conclusion that, short of amendments,
LJSpeech-1.1/wavs/LJ011-0007.wav|and was told by Mr. Fauntleroy that the lady had desired him to sell out, "which I have done," added the fraudulent banker, "and here are the proceeds,"
LJSpeech-1.1/wavs/LJ025-0045.wav|They get rid of the superfluous hydrogen and carbon and accumulate nitrogen.
LJSpeech-1.1/wavs/LJ005-0093.wav|The second act, passed in the following year, enlarged and amended the first, and at the same time gave powers to the House
LJSpeech-1.1/wavs/LJ018-0217.wav|As such he had pretty general control over his father's estates and affairs.
LJSpeech-1.1/wavs/LJ019-0258.wav|Both, however, varied greatly in mechanism and in the amount of energy they called forth,
LJSpeech-1.1/wavs/LJ025-0006.wav|that the functions or workings of the organs of plants, animals or man are quite distinct,
LJSpeech-1.1/wavs/LJ003-0123.wav|The most trifling acts were magnified into offenses.
LJSpeech-1.1/wavs/LJ026-0059.wav|For this purpose the food is taken not into the body proper, but into a kind of tubular chemical laboratory; called the alimentary canal,
LJSpeech-1.1/wavs/LJ032-0250.wav|In September nineteen sixty-three,
LJSpeech-1.1/wavs/LJ007-0163.wav|all the tumultuous and diversified passions and emotions which circumstances like these must necessarily generate
LJSpeech-1.1/wavs/LJ043-0110.wav|beginning May ten, nineteen sixty-three.
LJSpeech-1.1/wavs/LJ007-0215.wav|are the association of prisoners, and the unusual contamination to which such association gives rise.
LJSpeech-1.1/wavs/LJ021-0178.wav|Has England gone back to the gold standard today?
LJSpeech-1.1/wavs/LJ049-0160.wav|Since that time, the Secret Service has had and exercised responsibility for the physical protection of the President
LJSpeech-1.1/wavs/LJ001-0084.wav|But for the beauty of the earlier work they might have seemed tolerable.
LJSpeech-1.1/wavs/LJ014-0040.wav|that he rushed to a slaughterhouse in Hampstead and purposely stained his clothes with blood.
LJSpeech-1.1/wavs/LJ047-0114.wav|and that they had been married in Fort Worth and lived there until coming to New Orleans. He had told the New Orleans arresting officers that he had been born in Cuba.
LJSpeech-1.1/wavs/LJ038-0024.wav|Patrol cars bearing at least fifteen officers converged on the Texas Theatre.
LJSpeech-1.1/wavs/LJ048-0277.wav|He felt that each agent recognized the seriousness of the infraction and that there was no danger of a repetition.
LJSpeech-1.1/wavs/LJ001-0170.wav|being thin, tough, and opaque.
LJSpeech-1.1/wavs/LJ030-0152.wav|Speed of the Limousine
LJSpeech-1.1/wavs/LJ010-0213.wav|all of which declared their approval of the commander-in-chief.
LJSpeech-1.1/wavs/LJ038-0132.wav|When asked why he lived at his roominghouse under the name O. H. Lee,
LJSpeech-1.1/wavs/LJ028-0439.wav|for none of the many building inscriptions from Nebuchadnezzar mentions them.
LJSpeech-1.1/wavs/LJ028-0152.wav|In the circuit of the wall are a hundred gates, all of brass, with brazen lintels and sideposts.
LJSpeech-1.1/wavs/LJ038-0190.wav|three. I paid the house rent on the second so don't worry about it. four. Recently I also paid for water and gas.
LJSpeech-1.1/wavs/LJ014-0336.wav|Last of all, the well-known bankers Overend and Gurney, whose own affairs created much excitement some years later,
LJSpeech-1.1/wavs/LJ019-0226.wav|the erection of a wing or large block of cells was commenced within the original walls of the prison, and upon the north or male side.
LJSpeech-1.1/wavs/LJ002-0040.wav|intended to accommodate a couple of prisoners apiece, but often much more crowded.
LJSpeech-1.1/wavs/LJ028-0305.wav|troops for whose loss thou wilt care little, a thousand men.
LJSpeech-1.1/wavs/LJ047-0136.wav|they had vacated their apartment, and Marina Oswald had departed with their child in a station wagon with Texas registration.
LJSpeech-1.1/wavs/LJ016-0198.wav|I cannot find that Calcraft was sworn in when appointed, or any exact information when the old forbidding ceremony ceased to be practiced.
LJSpeech-1.1/wavs/LJ042-0171.wav|simply expressed, the left and right, and their offspring factions and concerns. Any practical attempt at one alternative
LJSpeech-1.1/wavs/LJ014-0092.wav|The hole must have been excavated and the quicklime purchased quite three weeks before O'Connor met his death,
LJSpeech-1.1/wavs/LJ019-0176.wav|The wards had open fires, but the separate cells were not warmed at all.
LJSpeech-1.1/wavs/LJ005-0214.wav|Some reforms had certainly been introduced, such as the abolition of irons, already referred to, and the establishment of male and female infirmaries.
LJSpeech-1.1/wavs/LJ008-0310.wav|which was done with a sort of ceremony intended to be impressive.
LJSpeech-1.1/wavs/LJ016-0022.wav|A harrowing scene followed;
LJSpeech-1.1/wavs/LJ013-0137.wav|which the various club secretaries identified as the property of their respective clubs.
LJSpeech-1.1/wavs/LJ020-0060.wav|turning the pan once in this time, and covering with clean -- never printed -- paper, should they brown too fast.
LJSpeech-1.1/wavs/LJ042-0155.wav|it appears to be the work of a fairly well organized person.
LJSpeech-1.1/wavs/LJ025-0159.wav|and this plant will, in due time, flower and produce its crop of beans just as if it were grown in the garden or in the field.
LJSpeech-1.1/wavs/LJ022-0080.wav|Projects will be sought which promise ultimate return to the federal treasury of a considerable proportion of the costs.
LJSpeech-1.1/wavs/LJ044-0168.wav|During this period Oswald may have practiced opening and closing the bolt on his rifle in a screened porch in his apartment.
LJSpeech-1.1/wavs/LJ040-0219.wav|It would be incorrect, however, to believe that those aspects of Lee's personality which were observed in New York
LJSpeech-1.1/wavs/LJ040-0067.wav|and his half-brother John Pic, who had been born in nineteen thirty-two during Marguerite's previous marriage.
LJSpeech-1.1/wavs/LJ007-0121.wav|There were no restraints, cards and backgammon were played, and the time passed in feasting and revelry.
LJSpeech-1.1/wavs/LJ011-0019.wav|when it was found that a sum of ten thousand pounds, standing in the name of three trustees, of whom Fauntleroy was one,
LJSpeech-1.1/wavs/LJ003-0314.wav|the surgeon should see all prisoners, whether ill or well, once a week, and take general charge of the infirmaries.
LJSpeech-1.1/wavs/LJ014-0196.wav|by his identification by Cope in Westminster Hospital, who survived long enough to make a formal deposition before Mr. Jardine,
LJSpeech-1.1/wavs/LJ036-0003.wav|The Killing of Patrolman J. D. Tippit
LJSpeech-1.1/wavs/LJ011-0063.wav|Fauntleroy was not entirely dependent upon the ordinary for ghostly counsel in his extremity.
LJSpeech-1.1/wavs/LJ014-0208.wav|A murderous assault on a police constable, which so nearly ended fatally that the culprit was sentenced to death, although not executed,
|
Tools/DGLPyTorch/SyntheticGraphGeneration/configurations | configurations | ogbn_mag240m | {
"nodes": [
{
"name": "paper",
"count": 121751666,
"features": [
{
"name": "feat_0",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_1",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_2",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_3",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_4",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_5",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_6",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_7",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_8",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_9",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_10",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_11",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_12",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_13",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_14",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_15",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_16",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_17",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_18",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_19",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_20",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_21",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_22",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_23",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_24",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_25",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_26",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_27",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_28",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_29",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_30",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_31",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_32",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_33",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_34",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_35",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_36",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_37",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_38",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_39",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_40",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_41",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_42",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_43",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_44",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_45",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_46",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_47",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_48",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_49",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_50",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_51",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_52",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_53",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_54",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_55",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_56",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_57",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_58",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_59",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_60",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_61",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_62",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_63",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_64",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_65",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_66",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_67",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_68",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_69",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_70",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_71",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_72",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_73",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_74",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_75",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_76",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_77",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_78",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_79",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_80",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_81",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_82",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_83",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_84",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_85",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_86",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_87",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_88",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_89",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_90",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_91",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_92",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_93",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_94",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_95",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_96",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_97",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_98",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_99",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_100",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_101",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_102",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_103",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_104",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_105",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_106",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_107",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_108",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_109",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_110",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_111",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_112",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_113",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_114",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_115",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_116",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_117",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_118",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_119",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_120",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_121",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_122",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_123",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_124",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_125",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_126",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_127",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_128",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_129",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_130",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_131",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_132",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_133",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_134",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_135",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_136",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_137",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_138",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_139",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_140",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_141",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_142",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_143",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_144",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_145",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_146",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_147",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_148",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_149",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_150",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_151",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_152",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_153",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_154",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_155",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_156",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_157",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_158",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_159",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_160",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_161",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_162",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_163",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_164",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_165",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_166",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_167",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_168",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_169",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_170",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_171",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_172",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_173",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_174",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_175",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_176",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_177",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_178",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_179",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_180",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_181",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_182",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_183",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_184",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_185",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_186",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_187",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_188",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_189",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_190",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_191",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_192",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_193",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_194",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_195",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_196",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_197",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_198",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_199",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_200",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_201",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_202",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_203",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_204",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_205",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_206",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_207",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_208",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_209",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_210",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_211",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_212",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_213",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_214",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_215",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_216",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_217",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_218",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_219",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_220",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_221",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_222",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_223",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_224",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_225",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_226",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_227",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_228",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_229",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_230",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_231",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_232",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_233",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_234",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_235",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_236",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_237",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_238",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_239",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_240",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_241",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_242",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_243",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_244",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_245",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_246",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_247",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_248",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_249",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_250",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_251",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_252",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_253",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_254",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_255",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_256",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_257",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_258",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_259",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_260",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_261",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_262",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_263",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_264",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_265",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_266",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_267",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_268",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_269",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_270",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_271",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_272",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_273",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_274",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_275",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_276",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_277",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_278",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_279",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_280",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_281",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_282",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_283",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_284",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_285",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_286",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_287",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_288",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_289",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_290",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_291",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_292",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_293",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_294",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_295",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_296",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_297",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_298",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_299",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_300",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_301",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_302",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_303",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_304",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_305",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_306",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_307",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_308",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_309",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_310",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_311",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_312",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_313",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_314",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_315",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_316",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_317",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_318",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_319",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_320",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_321",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_322",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_323",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_324",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_325",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_326",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_327",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_328",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_329",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_330",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_331",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_332",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_333",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_334",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_335",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_336",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_337",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_338",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_339",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_340",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_341",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_342",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_343",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_344",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_345",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_346",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_347",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_348",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_349",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_350",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_351",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_352",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_353",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_354",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_355",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_356",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_357",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_358",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_359",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_360",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_361",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_362",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_363",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_364",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_365",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_366",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_367",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_368",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_369",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_370",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_371",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_372",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_373",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_374",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_375",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_376",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_377",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_378",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_379",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_380",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_381",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_382",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_383",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_384",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_385",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_386",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_387",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_388",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_389",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_390",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_391",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_392",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_393",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_394",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_395",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_396",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_397",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_398",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_399",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_400",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_401",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_402",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_403",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_404",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_405",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_406",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_407",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_408",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_409",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_410",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_411",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_412",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_413",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_414",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_415",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_416",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_417",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_418",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_419",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_420",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_421",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_422",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_423",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_424",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_425",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_426",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_427",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_428",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_429",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_430",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_431",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_432",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_433",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_434",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_435",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_436",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_437",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_438",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_439",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_440",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_441",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_442",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_443",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_444",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_445",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_446",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_447",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_448",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_449",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_450",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_451",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_452",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_453",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_454",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_455",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_456",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_457",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_458",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_459",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_460",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_461",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_462",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_463",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_464",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_465",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_466",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_467",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_468",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_469",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_470",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_471",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_472",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_473",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_474",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_475",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_476",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_477",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_478",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_479",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_480",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_481",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_482",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_483",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_484",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_485",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_486",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_487",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_488",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_489",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_490",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_491",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_492",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_493",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_494",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_495",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_496",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_497",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_498",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_499",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_500",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_501",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_502",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_503",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_504",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_505",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_506",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_507",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_508",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_509",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_510",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_511",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_512",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_513",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_514",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_515",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_516",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_517",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_518",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_519",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_520",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_521",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_522",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_523",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_524",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_525",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_526",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_527",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_528",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_529",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_530",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_531",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_532",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_533",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_534",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_535",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_536",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_537",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_538",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_539",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_540",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_541",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_542",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_543",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_544",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_545",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_546",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_547",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_548",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_549",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_550",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_551",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_552",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_553",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_554",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_555",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_556",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_557",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_558",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_559",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_560",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_561",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_562",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_563",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_564",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_565",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_566",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_567",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_568",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_569",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_570",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_571",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_572",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_573",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_574",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_575",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_576",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_577",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_578",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_579",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_580",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_581",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_582",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_583",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_584",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_585",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_586",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_587",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_588",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_589",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_590",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_591",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_592",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_593",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_594",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_595",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_596",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_597",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_598",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_599",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_600",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_601",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_602",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_603",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_604",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_605",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_606",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_607",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_608",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_609",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_610",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_611",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_612",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_613",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_614",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_615",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_616",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_617",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_618",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_619",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_620",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_621",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_622",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_623",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_624",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_625",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_626",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_627",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_628",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_629",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_630",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_631",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_632",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_633",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_634",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_635",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_636",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_637",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_638",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_639",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_640",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_641",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_642",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_643",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_644",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_645",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_646",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_647",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_648",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_649",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_650",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_651",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_652",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_653",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_654",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_655",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_656",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_657",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_658",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_659",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_660",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_661",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_662",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_663",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_664",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_665",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_666",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_667",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_668",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_669",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_670",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_671",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_672",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_673",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_674",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_675",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_676",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_677",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_678",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_679",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_680",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_681",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_682",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_683",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_684",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_685",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_686",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_687",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_688",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_689",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_690",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_691",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_692",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_693",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_694",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_695",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_696",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_697",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_698",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_699",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_700",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_701",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_702",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_703",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_704",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_705",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_706",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_707",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_708",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_709",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_710",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_711",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_712",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_713",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_714",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_715",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_716",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_717",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_718",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_719",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_720",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_721",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_722",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_723",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_724",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_725",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_726",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_727",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_728",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_729",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_730",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_731",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_732",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_733",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_734",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_735",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_736",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_737",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_738",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_739",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_740",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_741",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_742",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_743",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_744",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_745",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_746",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_747",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_748",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_749",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_750",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_751",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_752",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_753",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_754",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_755",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_756",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_757",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_758",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_759",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_760",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_761",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_762",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_763",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_764",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_765",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_766",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "feat_767",
"dtype": "float16",
"feature_type": "continuous",
"feature_file": "paper_feats.npy"
},
{
"name": "year",
"dtype": "int32",
"feature_type": "categorical",
"feature_file": "year_label.npy"
},
{
"name": "label",
"dtype": "int32",
"feature_type": "categorical",
"feature_file": "year_label.npy"
}
],
"features_path": "paper_tabular_features",
"[gen]tabular_generators": [
{
"type": "uniform",
"features_list": [
"feat_0",
"feat_1",
"feat_2",
"feat_3",
"feat_4",
"feat_5",
"feat_6",
"feat_7",
"feat_8",
"feat_9",
"feat_10",
"feat_11",
"feat_12",
"feat_13",
"feat_14",
"feat_15",
"feat_16",
"feat_17",
"feat_18",
"feat_19",
"feat_20",
"feat_21",
"feat_22",
"feat_23",
"feat_24",
"feat_25",
"feat_26",
"feat_27",
"feat_28",
"feat_29",
"feat_30",
"feat_31",
"feat_32",
"feat_33",
"feat_34",
"feat_35",
"feat_36",
"feat_37",
"feat_38",
"feat_39",
"feat_40",
"feat_41",
"feat_42",
"feat_43",
"feat_44",
"feat_45",
"feat_46",
"feat_47",
"feat_48",
"feat_49",
"feat_50",
"feat_51",
"feat_52",
"feat_53",
"feat_54",
"feat_55",
"feat_56",
"feat_57",
"feat_58",
"feat_59",
"feat_60",
"feat_61",
"feat_62",
"feat_63",
"feat_64",
"feat_65",
"feat_66",
"feat_67",
"feat_68",
"feat_69",
"feat_70",
"feat_71",
"feat_72",
"feat_73",
"feat_74",
"feat_75",
"feat_76",
"feat_77",
"feat_78",
"feat_79",
"feat_80",
"feat_81",
"feat_82",
"feat_83",
"feat_84",
"feat_85",
"feat_86",
"feat_87",
"feat_88",
"feat_89",
"feat_90",
"feat_91",
"feat_92",
"feat_93",
"feat_94",
"feat_95",
"feat_96",
"feat_97",
"feat_98",
"feat_99",
"feat_100",
"feat_101",
"feat_102",
"feat_103",
"feat_104",
"feat_105",
"feat_106",
"feat_107",
"feat_108",
"feat_109",
"feat_110",
"feat_111",
"feat_112",
"feat_113",
"feat_114",
"feat_115",
"feat_116",
"feat_117",
"feat_118",
"feat_119",
"feat_120",
"feat_121",
"feat_122",
"feat_123",
"feat_124",
"feat_125",
"feat_126",
"feat_127",
"feat_128",
"feat_129",
"feat_130",
"feat_131",
"feat_132",
"feat_133",
"feat_134",
"feat_135",
"feat_136",
"feat_137",
"feat_138",
"feat_139",
"feat_140",
"feat_141",
"feat_142",
"feat_143",
"feat_144",
"feat_145",
"feat_146",
"feat_147",
"feat_148",
"feat_149",
"feat_150",
"feat_151",
"feat_152",
"feat_153",
"feat_154",
"feat_155",
"feat_156",
"feat_157",
"feat_158",
"feat_159",
"feat_160",
"feat_161",
"feat_162",
"feat_163",
"feat_164",
"feat_165",
"feat_166",
"feat_167",
"feat_168",
"feat_169",
"feat_170",
"feat_171",
"feat_172",
"feat_173",
"feat_174",
"feat_175",
"feat_176",
"feat_177",
"feat_178",
"feat_179",
"feat_180",
"feat_181",
"feat_182",
"feat_183",
"feat_184",
"feat_185",
"feat_186",
"feat_187",
"feat_188",
"feat_189",
"feat_190",
"feat_191",
"feat_192",
"feat_193",
"feat_194",
"feat_195",
"feat_196",
"feat_197",
"feat_198",
"feat_199",
"feat_200",
"feat_201",
"feat_202",
"feat_203",
"feat_204",
"feat_205",
"feat_206",
"feat_207",
"feat_208",
"feat_209",
"feat_210",
"feat_211",
"feat_212",
"feat_213",
"feat_214",
"feat_215",
"feat_216",
"feat_217",
"feat_218",
"feat_219",
"feat_220",
"feat_221",
"feat_222",
"feat_223",
"feat_224",
"feat_225",
"feat_226",
"feat_227",
"feat_228",
"feat_229",
"feat_230",
"feat_231",
"feat_232",
"feat_233",
"feat_234",
"feat_235",
"feat_236",
"feat_237",
"feat_238",
"feat_239",
"feat_240",
"feat_241",
"feat_242",
"feat_243",
"feat_244",
"feat_245",
"feat_246",
"feat_247",
"feat_248",
"feat_249",
"feat_250",
"feat_251",
"feat_252",
"feat_253",
"feat_254",
"feat_255",
"feat_256",
"feat_257",
"feat_258",
"feat_259",
"feat_260",
"feat_261",
"feat_262",
"feat_263",
"feat_264",
"feat_265",
"feat_266",
"feat_267",
"feat_268",
"feat_269",
"feat_270",
"feat_271",
"feat_272",
"feat_273",
"feat_274",
"feat_275",
"feat_276",
"feat_277",
"feat_278",
"feat_279",
"feat_280",
"feat_281",
"feat_282",
"feat_283",
"feat_284",
"feat_285",
"feat_286",
"feat_287",
"feat_288",
"feat_289",
"feat_290",
"feat_291",
"feat_292",
"feat_293",
"feat_294",
"feat_295",
"feat_296",
"feat_297",
"feat_298",
"feat_299",
"feat_300",
"feat_301",
"feat_302",
"feat_303",
"feat_304",
"feat_305",
"feat_306",
"feat_307",
"feat_308",
"feat_309",
"feat_310",
"feat_311",
"feat_312",
"feat_313",
"feat_314",
"feat_315",
"feat_316",
"feat_317",
"feat_318",
"feat_319",
"feat_320",
"feat_321",
"feat_322",
"feat_323",
"feat_324",
"feat_325",
"feat_326",
"feat_327",
"feat_328",
"feat_329",
"feat_330",
"feat_331",
"feat_332",
"feat_333",
"feat_334",
"feat_335",
"feat_336",
"feat_337",
"feat_338",
"feat_339",
"feat_340",
"feat_341",
"feat_342",
"feat_343",
"feat_344",
"feat_345",
"feat_346",
"feat_347",
"feat_348",
"feat_349",
"feat_350",
"feat_351",
"feat_352",
"feat_353",
"feat_354",
"feat_355",
"feat_356",
"feat_357",
"feat_358",
"feat_359",
"feat_360",
"feat_361",
"feat_362",
"feat_363",
"feat_364",
"feat_365",
"feat_366",
"feat_367",
"feat_368",
"feat_369",
"feat_370",
"feat_371",
"feat_372",
"feat_373",
"feat_374",
"feat_375",
"feat_376",
"feat_377",
"feat_378",
"feat_379",
"feat_380",
"feat_381",
"feat_382",
"feat_383",
"feat_384",
"feat_385",
"feat_386",
"feat_387",
"feat_388",
"feat_389",
"feat_390",
"feat_391",
"feat_392",
"feat_393",
"feat_394",
"feat_395",
"feat_396",
"feat_397",
"feat_398",
"feat_399",
"feat_400",
"feat_401",
"feat_402",
"feat_403",
"feat_404",
"feat_405",
"feat_406",
"feat_407",
"feat_408",
"feat_409",
"feat_410",
"feat_411",
"feat_412",
"feat_413",
"feat_414",
"feat_415",
"feat_416",
"feat_417",
"feat_418",
"feat_419",
"feat_420",
"feat_421",
"feat_422",
"feat_423",
"feat_424",
"feat_425",
"feat_426",
"feat_427",
"feat_428",
"feat_429",
"feat_430",
"feat_431",
"feat_432",
"feat_433",
"feat_434",
"feat_435",
"feat_436",
"feat_437",
"feat_438",
"feat_439",
"feat_440",
"feat_441",
"feat_442",
"feat_443",
"feat_444",
"feat_445",
"feat_446",
"feat_447",
"feat_448",
"feat_449",
"feat_450",
"feat_451",
"feat_452",
"feat_453",
"feat_454",
"feat_455",
"feat_456",
"feat_457",
"feat_458",
"feat_459",
"feat_460",
"feat_461",
"feat_462",
"feat_463",
"feat_464",
"feat_465",
"feat_466",
"feat_467",
"feat_468",
"feat_469",
"feat_470",
"feat_471",
"feat_472",
"feat_473",
"feat_474",
"feat_475",
"feat_476",
"feat_477",
"feat_478",
"feat_479",
"feat_480",
"feat_481",
"feat_482",
"feat_483",
"feat_484",
"feat_485",
"feat_486",
"feat_487",
"feat_488",
"feat_489",
"feat_490",
"feat_491",
"feat_492",
"feat_493",
"feat_494",
"feat_495",
"feat_496",
"feat_497",
"feat_498",
"feat_499",
"feat_500",
"feat_501",
"feat_502",
"feat_503",
"feat_504",
"feat_505",
"feat_506",
"feat_507",
"feat_508",
"feat_509",
"feat_510",
"feat_511",
"feat_512",
"feat_513",
"feat_514",
"feat_515",
"feat_516",
"feat_517",
"feat_518",
"feat_519",
"feat_520",
"feat_521",
"feat_522",
"feat_523",
"feat_524",
"feat_525",
"feat_526",
"feat_527",
"feat_528",
"feat_529",
"feat_530",
"feat_531",
"feat_532",
"feat_533",
"feat_534",
"feat_535",
"feat_536",
"feat_537",
"feat_538",
"feat_539",
"feat_540",
"feat_541",
"feat_542",
"feat_543",
"feat_544",
"feat_545",
"feat_546",
"feat_547",
"feat_548",
"feat_549",
"feat_550",
"feat_551",
"feat_552",
"feat_553",
"feat_554",
"feat_555",
"feat_556",
"feat_557",
"feat_558",
"feat_559",
"feat_560",
"feat_561",
"feat_562",
"feat_563",
"feat_564",
"feat_565",
"feat_566",
"feat_567",
"feat_568",
"feat_569",
"feat_570",
"feat_571",
"feat_572",
"feat_573",
"feat_574",
"feat_575",
"feat_576",
"feat_577",
"feat_578",
"feat_579",
"feat_580",
"feat_581",
"feat_582",
"feat_583",
"feat_584",
"feat_585",
"feat_586",
"feat_587",
"feat_588",
"feat_589",
"feat_590",
"feat_591",
"feat_592",
"feat_593",
"feat_594",
"feat_595",
"feat_596",
"feat_597",
"feat_598",
"feat_599",
"feat_600",
"feat_601",
"feat_602",
"feat_603",
"feat_604",
"feat_605",
"feat_606",
"feat_607",
"feat_608",
"feat_609",
"feat_610",
"feat_611",
"feat_612",
"feat_613",
"feat_614",
"feat_615",
"feat_616",
"feat_617",
"feat_618",
"feat_619",
"feat_620",
"feat_621",
"feat_622",
"feat_623",
"feat_624",
"feat_625",
"feat_626",
"feat_627",
"feat_628",
"feat_629",
"feat_630",
"feat_631",
"feat_632",
"feat_633",
"feat_634",
"feat_635",
"feat_636",
"feat_637",
"feat_638",
"feat_639",
"feat_640",
"feat_641",
"feat_642",
"feat_643",
"feat_644",
"feat_645",
"feat_646",
"feat_647",
"feat_648",
"feat_649",
"feat_650",
"feat_651",
"feat_652",
"feat_653",
"feat_654",
"feat_655",
"feat_656",
"feat_657",
"feat_658",
"feat_659",
"feat_660",
"feat_661",
"feat_662",
"feat_663",
"feat_664",
"feat_665",
"feat_666",
"feat_667",
"feat_668",
"feat_669",
"feat_670",
"feat_671",
"feat_672",
"feat_673",
"feat_674",
"feat_675",
"feat_676",
"feat_677",
"feat_678",
"feat_679",
"feat_680",
"feat_681",
"feat_682",
"feat_683",
"feat_684",
"feat_685",
"feat_686",
"feat_687",
"feat_688",
"feat_689",
"feat_690",
"feat_691",
"feat_692",
"feat_693",
"feat_694",
"feat_695",
"feat_696",
"feat_697",
"feat_698",
"feat_699",
"feat_700",
"feat_701",
"feat_702",
"feat_703",
"feat_704",
"feat_705",
"feat_706",
"feat_707",
"feat_708",
"feat_709",
"feat_710",
"feat_711",
"feat_712",
"feat_713",
"feat_714",
"feat_715",
"feat_716",
"feat_717",
"feat_718",
"feat_719",
"feat_720",
"feat_721",
"feat_722",
"feat_723",
"feat_724",
"feat_725",
"feat_726",
"feat_727",
"feat_728",
"feat_729",
"feat_730",
"feat_731",
"feat_732",
"feat_733",
"feat_734",
"feat_735",
"feat_736",
"feat_737",
"feat_738",
"feat_739",
"feat_740",
"feat_741",
"feat_742",
"feat_743",
"feat_744",
"feat_745",
"feat_746",
"feat_747",
"feat_748",
"feat_749",
"feat_750",
"feat_751",
"feat_752",
"feat_753",
"feat_754",
"feat_755",
"feat_756",
"feat_757",
"feat_758",
"feat_759",
"feat_760",
"feat_761",
"feat_762",
"feat_763",
"feat_764",
"feat_765",
"feat_766",
"feat_767"
],
"feature_file": "paper_feats.npy",
"data_source": {
"type": "cfg",
"path": "/raid/ogbn_mag240m_syngen",
"name": "paper"
},
"params": {},
"dump_path": null
},
{
"type": "uniform",
"features_list": [
"year",
"label"
],
"feature_file": "year_label.npy",
"data_source": {
"type": "cfg",
"path": "/raid/ogbn_mag240m_syngen",
"name": "paper"
},
"params": {},
"dump_path": null
}
]
},
{
"name": "author",
"count": 122383112,
"features_path": null,
"features": []
},
{
"name": "institution",
"count": 25721,
"features_path": null,
"features": []
}
],
"edges": [
{
"name": "writes",
"count": 386022720,
"src_node_type": "author",
"dst_node_type": "paper",
"directed": false,
"features": [],
"features_path": null,
"structure_path": "writes_list.parquet",
"[gen]structure_generator": {
"type": "RMAT",
"data_source": {
"type": "cfg",
"path": "/raid/ogbn_mag240m_syngen",
"name": "writes"
},
"params": {},
"dump_path": null
}
},
{
"name": "affiliated_with",
"count": 44592586,
"src_node_type": "author",
"dst_node_type": "institution",
"directed": false,
"features": [],
"features_path": null,
"structure_path": "affiliated_with_list.parquet",
"[gen]structure_generator": {
"type": "RMAT",
"data_source": {
"type": "cfg",
"path": "/raid/ogbn_mag240m_syngen",
"name": "affiliated_with"
},
"params": {},
"dump_path": null
}
},
{
"name": "cites",
"count": 1297748926,
"src_node_type": "paper",
"dst_node_type": "paper",
"directed": false,
"features": [],
"features_path": null,
"structure_path": "cites_list.parquet",
"[gen]structure_generator": {
"type": "RMAT",
"data_source": {
"type": "cfg",
"path": "/raid/ogbn_mag240m_syngen",
"name": "cites"
},
"params": {},
"dump_path": null
}
}
],
"path": "/raid/ogbn_mag240m_syngen"
} |
TensorFlow/Segmentation/UNet_Medical | UNet_Medical | requirements | Pillow
tf2onnx
munch
|
TensorFlow/Detection/SSD/models/research/object_detection/data_decoders | data_decoders | tf_example_decoder | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensorflow Example proto decoder for object detection.
A decoder to decode string tensors containing serialized tensorflow.Example
protos for object detection.
"""
import tensorflow as tf
from object_detection.core import data_decoder
from object_detection.core import standard_fields as fields
from object_detection.protos import input_reader_pb2
from object_detection.utils import label_map_util
slim_example_decoder = tf.contrib.slim.tfexample_decoder
class _ClassTensorHandler(slim_example_decoder.Tensor):
"""An ItemHandler to fetch class ids from class text."""
def __init__(self,
tensor_key,
label_map_proto_file,
shape_keys=None,
shape=None,
default_value=''):
"""Initializes the LookupTensor handler.
Simply calls a vocabulary (most often, a label mapping) lookup.
Args:
tensor_key: the name of the `TFExample` feature to read the tensor from.
label_map_proto_file: File path to a text format LabelMapProto message
mapping class text to id.
shape_keys: Optional name or list of names of the TF-Example feature in
which the tensor shape is stored. If a list, then each corresponds to
one dimension of the shape.
shape: Optional output shape of the `Tensor`. If provided, the `Tensor` is
reshaped accordingly.
default_value: The value used when the `tensor_key` is not found in a
particular `TFExample`.
Raises:
ValueError: if both `shape_keys` and `shape` are specified.
"""
name_to_id = label_map_util.get_label_map_dict(
label_map_proto_file, use_display_name=False)
# We use a default_value of -1, but we expect all labels to be contained
# in the label map.
name_to_id_table = tf.contrib.lookup.HashTable(
initializer=tf.contrib.lookup.KeyValueTensorInitializer(
keys=tf.constant(list(name_to_id.keys())),
values=tf.constant(list(name_to_id.values()), dtype=tf.int64)),
default_value=-1)
display_name_to_id = label_map_util.get_label_map_dict(
label_map_proto_file, use_display_name=True)
# We use a default_value of -1, but we expect all labels to be contained
# in the label map.
display_name_to_id_table = tf.contrib.lookup.HashTable(
initializer=tf.contrib.lookup.KeyValueTensorInitializer(
keys=tf.constant(list(display_name_to_id.keys())),
values=tf.constant(
list(display_name_to_id.values()), dtype=tf.int64)),
default_value=-1)
self._name_to_id_table = name_to_id_table
self._display_name_to_id_table = display_name_to_id_table
super(_ClassTensorHandler, self).__init__(tensor_key, shape_keys, shape,
default_value)
def tensors_to_item(self, keys_to_tensors):
unmapped_tensor = super(_ClassTensorHandler,
self).tensors_to_item(keys_to_tensors)
return tf.maximum(self._name_to_id_table.lookup(unmapped_tensor),
self._display_name_to_id_table.lookup(unmapped_tensor))
class _BackupHandler(slim_example_decoder.ItemHandler):
"""An ItemHandler that tries two ItemHandlers in order."""
def __init__(self, handler, backup):
"""Initializes the BackupHandler handler.
If the first Handler's tensors_to_item returns a Tensor with no elements,
the second Handler is used.
Args:
handler: The primary ItemHandler.
backup: The backup ItemHandler.
Raises:
ValueError: if either is not an ItemHandler.
"""
if not isinstance(handler, slim_example_decoder.ItemHandler):
raise ValueError('Primary handler is of type %s instead of ItemHandler' %
type(handler))
if not isinstance(backup, slim_example_decoder.ItemHandler):
raise ValueError(
'Backup handler is of type %s instead of ItemHandler' % type(backup))
self._handler = handler
self._backup = backup
super(_BackupHandler, self).__init__(handler.keys + backup.keys)
def tensors_to_item(self, keys_to_tensors):
item = self._handler.tensors_to_item(keys_to_tensors)
return tf.cond(
pred=tf.equal(tf.reduce_prod(tf.shape(item)), 0),
true_fn=lambda: self._backup.tensors_to_item(keys_to_tensors),
false_fn=lambda: item)
class TfExampleDecoder(data_decoder.DataDecoder):
"""Tensorflow Example proto decoder."""
def __init__(self,
load_instance_masks=False,
instance_mask_type=input_reader_pb2.NUMERICAL_MASKS,
label_map_proto_file=None,
use_display_name=False,
dct_method='',
num_keypoints=0,
num_additional_channels=0):
"""Constructor sets keys_to_features and items_to_handlers.
Args:
load_instance_masks: whether or not to load and handle instance masks.
instance_mask_type: type of instance masks. Options are provided in
input_reader.proto. This is only used if `load_instance_masks` is True.
label_map_proto_file: a file path to a
object_detection.protos.StringIntLabelMap proto. If provided, then the
mapped IDs of 'image/object/class/text' will take precedence over the
existing 'image/object/class/label' ID. Also, if provided, it is
assumed that 'image/object/class/text' will be in the data.
use_display_name: whether or not to use the `display_name` for label
mapping (instead of `name`). Only used if label_map_proto_file is
provided.
dct_method: An optional string. Defaults to None. It only takes
effect when image format is jpeg, used to specify a hint about the
algorithm used for jpeg decompression. Currently valid values
are ['INTEGER_FAST', 'INTEGER_ACCURATE']. The hint may be ignored, for
example, the jpeg library does not have that specific option.
num_keypoints: the number of keypoints per object.
num_additional_channels: how many additional channels to use.
Raises:
ValueError: If `instance_mask_type` option is not one of
input_reader_pb2.DEFAULT, input_reader_pb2.NUMERICAL, or
input_reader_pb2.PNG_MASKS.
"""
# TODO(rathodv): delete unused `use_display_name` argument once we change
# other decoders to handle label maps similarly.
del use_display_name
self.keys_to_features = {
'image/encoded':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/format':
tf.FixedLenFeature((), tf.string, default_value='jpeg'),
'image/filename':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/key/sha256':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/source_id':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/height':
tf.FixedLenFeature((), tf.int64, default_value=1),
'image/width':
tf.FixedLenFeature((), tf.int64, default_value=1),
# Image-level labels.
'image/class/text':
tf.VarLenFeature(tf.string),
'image/class/label':
tf.VarLenFeature(tf.int64),
# Object boxes and classes.
'image/object/bbox/xmin':
tf.VarLenFeature(tf.float32),
'image/object/bbox/xmax':
tf.VarLenFeature(tf.float32),
'image/object/bbox/ymin':
tf.VarLenFeature(tf.float32),
'image/object/bbox/ymax':
tf.VarLenFeature(tf.float32),
'image/object/class/label':
tf.VarLenFeature(tf.int64),
'image/object/class/text':
tf.VarLenFeature(tf.string),
'image/object/area':
tf.VarLenFeature(tf.float32),
'image/object/is_crowd':
tf.VarLenFeature(tf.int64),
'image/object/difficult':
tf.VarLenFeature(tf.int64),
'image/object/group_of':
tf.VarLenFeature(tf.int64),
'image/object/weight':
tf.VarLenFeature(tf.float32),
}
# We are checking `dct_method` instead of passing it directly in order to
# ensure TF version 1.6 compatibility.
if dct_method:
image = slim_example_decoder.Image(
image_key='image/encoded',
format_key='image/format',
channels=3,
dct_method=dct_method)
additional_channel_image = slim_example_decoder.Image(
image_key='image/additional_channels/encoded',
format_key='image/format',
channels=1,
repeated=True,
dct_method=dct_method)
else:
image = slim_example_decoder.Image(
image_key='image/encoded', format_key='image/format', channels=3)
additional_channel_image = slim_example_decoder.Image(
image_key='image/additional_channels/encoded',
format_key='image/format',
channels=1,
repeated=True)
self.items_to_handlers = {
fields.InputDataFields.image:
image,
fields.InputDataFields.source_id: (
slim_example_decoder.Tensor('image/source_id')),
fields.InputDataFields.key: (
slim_example_decoder.Tensor('image/key/sha256')),
fields.InputDataFields.filename: (
slim_example_decoder.Tensor('image/filename')),
# Object boxes and classes.
fields.InputDataFields.groundtruth_boxes: (
slim_example_decoder.BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
'image/object/bbox/')),
fields.InputDataFields.groundtruth_area:
slim_example_decoder.Tensor('image/object/area'),
fields.InputDataFields.groundtruth_is_crowd: (
slim_example_decoder.Tensor('image/object/is_crowd')),
fields.InputDataFields.groundtruth_difficult: (
slim_example_decoder.Tensor('image/object/difficult')),
fields.InputDataFields.groundtruth_group_of: (
slim_example_decoder.Tensor('image/object/group_of')),
fields.InputDataFields.groundtruth_weights: (
slim_example_decoder.Tensor('image/object/weight')),
}
if num_additional_channels > 0:
self.keys_to_features[
'image/additional_channels/encoded'] = tf.FixedLenFeature(
(num_additional_channels,), tf.string)
self.items_to_handlers[
fields.InputDataFields.
image_additional_channels] = additional_channel_image
self._num_keypoints = num_keypoints
if num_keypoints > 0:
self.keys_to_features['image/object/keypoint/x'] = (
tf.VarLenFeature(tf.float32))
self.keys_to_features['image/object/keypoint/y'] = (
tf.VarLenFeature(tf.float32))
self.items_to_handlers[fields.InputDataFields.groundtruth_keypoints] = (
slim_example_decoder.ItemHandlerCallback(
['image/object/keypoint/y', 'image/object/keypoint/x'],
self._reshape_keypoints))
if load_instance_masks:
if instance_mask_type in (input_reader_pb2.DEFAULT,
input_reader_pb2.NUMERICAL_MASKS):
self.keys_to_features['image/object/mask'] = (
tf.VarLenFeature(tf.float32))
self.items_to_handlers[
fields.InputDataFields.groundtruth_instance_masks] = (
slim_example_decoder.ItemHandlerCallback(
['image/object/mask', 'image/height', 'image/width'],
self._reshape_instance_masks))
elif instance_mask_type == input_reader_pb2.PNG_MASKS:
self.keys_to_features['image/object/mask'] = tf.VarLenFeature(tf.string)
self.items_to_handlers[
fields.InputDataFields.groundtruth_instance_masks] = (
slim_example_decoder.ItemHandlerCallback(
['image/object/mask', 'image/height', 'image/width'],
self._decode_png_instance_masks))
else:
raise ValueError('Did not recognize the `instance_mask_type` option.')
if label_map_proto_file:
# If the label_map_proto is provided, try to use it in conjunction with
# the class text, and fall back to a materialized ID.
label_handler = _BackupHandler(
_ClassTensorHandler(
'image/object/class/text', label_map_proto_file,
default_value=''),
slim_example_decoder.Tensor('image/object/class/label'))
image_label_handler = _BackupHandler(
_ClassTensorHandler(
fields.TfExampleFields.image_class_text,
label_map_proto_file,
default_value=''),
slim_example_decoder.Tensor(fields.TfExampleFields.image_class_label))
else:
label_handler = slim_example_decoder.Tensor('image/object/class/label')
image_label_handler = slim_example_decoder.Tensor(
fields.TfExampleFields.image_class_label)
self.items_to_handlers[
fields.InputDataFields.groundtruth_classes] = label_handler
self.items_to_handlers[
fields.InputDataFields.groundtruth_image_classes] = image_label_handler
def decode(self, tf_example_string_tensor):
"""Decodes serialized tensorflow example and returns a tensor dictionary.
Args:
tf_example_string_tensor: a string tensor holding a serialized tensorflow
example proto.
Returns:
A dictionary of the following tensors.
fields.InputDataFields.image - 3D uint8 tensor of shape [None, None, 3]
containing image.
fields.InputDataFields.original_image_spatial_shape - 1D int32 tensor of
shape [2] containing shape of the image.
fields.InputDataFields.source_id - string tensor containing original
image id.
fields.InputDataFields.key - string tensor with unique sha256 hash key.
fields.InputDataFields.filename - string tensor with original dataset
filename.
fields.InputDataFields.groundtruth_boxes - 2D float32 tensor of shape
[None, 4] containing box corners.
fields.InputDataFields.groundtruth_classes - 1D int64 tensor of shape
[None] containing classes for the boxes.
fields.InputDataFields.groundtruth_weights - 1D float32 tensor of
shape [None] indicating the weights of groundtruth boxes.
fields.InputDataFields.groundtruth_area - 1D float32 tensor of shape
[None] containing containing object mask area in pixel squared.
fields.InputDataFields.groundtruth_is_crowd - 1D bool tensor of shape
[None] indicating if the boxes enclose a crowd.
Optional:
fields.InputDataFields.image_additional_channels - 3D uint8 tensor of
shape [None, None, num_additional_channels]. 1st dim is height; 2nd dim
is width; 3rd dim is the number of additional channels.
fields.InputDataFields.groundtruth_difficult - 1D bool tensor of shape
[None] indicating if the boxes represent `difficult` instances.
fields.InputDataFields.groundtruth_group_of - 1D bool tensor of shape
[None] indicating if the boxes represent `group_of` instances.
fields.InputDataFields.groundtruth_keypoints - 3D float32 tensor of
shape [None, None, 2] containing keypoints, where the coordinates of
the keypoints are ordered (y, x).
fields.InputDataFields.groundtruth_instance_masks - 3D float32 tensor of
shape [None, None, None] containing instance masks.
fields.InputDataFields.groundtruth_image_classes - 1D uint64 of shape
[None] containing classes for the boxes.
"""
serialized_example = tf.reshape(tf_example_string_tensor, shape=[])
decoder = slim_example_decoder.TFExampleDecoder(self.keys_to_features,
self.items_to_handlers)
keys = decoder.list_items()
tensors = decoder.decode(serialized_example, items=keys)
tensor_dict = dict(zip(keys, tensors))
is_crowd = fields.InputDataFields.groundtruth_is_crowd
tensor_dict[is_crowd] = tf.cast(tensor_dict[is_crowd], dtype=tf.bool)
tensor_dict[fields.InputDataFields.image].set_shape([None, None, 3])
tensor_dict[fields.InputDataFields.original_image_spatial_shape] = tf.shape(
tensor_dict[fields.InputDataFields.image])[:2]
if fields.InputDataFields.image_additional_channels in tensor_dict:
channels = tensor_dict[fields.InputDataFields.image_additional_channels]
channels = tf.squeeze(channels, axis=3)
channels = tf.transpose(channels, perm=[1, 2, 0])
tensor_dict[fields.InputDataFields.image_additional_channels] = channels
def default_groundtruth_weights():
return tf.ones(
[tf.shape(tensor_dict[fields.InputDataFields.groundtruth_boxes])[0]],
dtype=tf.float32)
tensor_dict[fields.InputDataFields.groundtruth_weights] = tf.cond(
tf.greater(
tf.shape(
tensor_dict[fields.InputDataFields.groundtruth_weights])[0],
0), lambda: tensor_dict[fields.InputDataFields.groundtruth_weights],
default_groundtruth_weights)
return tensor_dict
def _reshape_keypoints(self, keys_to_tensors):
"""Reshape keypoints.
The instance segmentation masks are reshaped to [num_instances,
num_keypoints, 2].
Args:
keys_to_tensors: a dictionary from keys to tensors.
Returns:
A 3-D float tensor of shape [num_instances, num_keypoints, 2] with values
in {0, 1}.
"""
y = keys_to_tensors['image/object/keypoint/y']
if isinstance(y, tf.SparseTensor):
y = tf.sparse_tensor_to_dense(y)
y = tf.expand_dims(y, 1)
x = keys_to_tensors['image/object/keypoint/x']
if isinstance(x, tf.SparseTensor):
x = tf.sparse_tensor_to_dense(x)
x = tf.expand_dims(x, 1)
keypoints = tf.concat([y, x], 1)
keypoints = tf.reshape(keypoints, [-1, self._num_keypoints, 2])
return keypoints
def _reshape_instance_masks(self, keys_to_tensors):
"""Reshape instance segmentation masks.
The instance segmentation masks are reshaped to [num_instances, height,
width].
Args:
keys_to_tensors: a dictionary from keys to tensors.
Returns:
A 3-D float tensor of shape [num_instances, height, width] with values
in {0, 1}.
"""
height = keys_to_tensors['image/height']
width = keys_to_tensors['image/width']
to_shape = tf.cast(tf.stack([-1, height, width]), tf.int32)
masks = keys_to_tensors['image/object/mask']
if isinstance(masks, tf.SparseTensor):
masks = tf.sparse_tensor_to_dense(masks)
masks = tf.reshape(tf.to_float(tf.greater(masks, 0.0)), to_shape)
return tf.cast(masks, tf.float32)
def _decode_png_instance_masks(self, keys_to_tensors):
"""Decode PNG instance segmentation masks and stack into dense tensor.
The instance segmentation masks are reshaped to [num_instances, height,
width].
Args:
keys_to_tensors: a dictionary from keys to tensors.
Returns:
A 3-D float tensor of shape [num_instances, height, width] with values
in {0, 1}.
"""
def decode_png_mask(image_buffer):
image = tf.squeeze(
tf.image.decode_image(image_buffer, channels=1), axis=2)
image.set_shape([None, None])
image = tf.to_float(tf.greater(image, 0))
return image
png_masks = keys_to_tensors['image/object/mask']
height = keys_to_tensors['image/height']
width = keys_to_tensors['image/width']
if isinstance(png_masks, tf.SparseTensor):
png_masks = tf.sparse_tensor_to_dense(png_masks, default_value='')
return tf.cond(
tf.greater(tf.size(png_masks), 0),
lambda: tf.map_fn(decode_png_mask, png_masks, dtype=tf.float32),
lambda: tf.zeros(tf.to_int32(tf.stack([0, height, width]))))
|
TensorFlow/LanguageModeling/BERT | BERT | gpu_environment | # coding=utf-8
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
import numpy as np
def float32_variable_storage_getter(getter, name, shape=None, dtype=None,
initializer=None, regularizer=None,
trainable=True,
*args, **kwargs):
"""Custom variable getter that forces trainable variables to be stored in
float32 precision and then casts them to the training precision.
"""
storage_dtype = tf.float32 if trainable else dtype
variable = getter(name, shape, dtype=storage_dtype,
initializer=initializer, regularizer=regularizer,
trainable=trainable,
*args, **kwargs)
if trainable and dtype != tf.float32:
variable = tf.cast(variable, dtype)
return variable
def get_custom_getter(compute_type):
return float32_variable_storage_getter if compute_type == tf.float16 else None
|
PyTorch/Translation/Transformer/scripts | scripts | run_DGXA100_TF32 | #! /bin/bash
#
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
nvidia-smi
RESULTS_DIR='/results'
CHECKPOINTS_DIR='/results/checkpoints'
mkdir -p $CHECKPOINTS_DIR
: ${SEED:=1}
: ${LR:=0.000846}
: ${WARMUP:=4000}
: ${NUM_EPOCHS:=30}
: ${BS:=10240}
: ${NUM_GPU:=8}
STAT_FILE=${RESULTS_DIR}/DGXA100_tf32_${NUM_GPU}GPU_log.json
DISTRIBUTED="-m torch.distributed.run --nproc_per_node=${NUM_GPU}"
python ${DISTRIBUTED} /workspace/translation/train.py \
/data/ \
--arch transformer_wmt_en_de_big_t2t \
--share-all-embeddings \
--optimizer adam \
--adam-betas 0.9 0.997 \
--adam-eps 1e-9 \
--clip-norm 0.0 \
--lr-scheduler inverse_sqrt \
--warmup-init-lr 0.0 \
--warmup-updates ${WARMUP} \
--lr $LR \
--min-lr 0.0 \
--dropout 0.1 \
--weight-decay 0.0 \
--criterion label_smoothed_cross_entropy \
--label-smoothing 0.1 \
--max-tokens ${BS} \
--seed ${SEED} \
--max-epoch ${NUM_EPOCHS} \
--no-epoch-checkpoints \
--fuse-layer-norm \
--online-eval \
--log-interval 500 \
--save-dir ${RESULTS_DIR} \
--stat-file ${STAT_FILE}
|
PyTorch/Translation/GNMT/scripts | scripts | filter_dataset | # Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
from collections import Counter
def parse_args():
parser = argparse.ArgumentParser(description='Clean dataset')
parser.add_argument('-f1', '--file1', help='file1')
parser.add_argument('-f2', '--file2', help='file2')
return parser.parse_args()
def save_output(fname, data):
with open(fname, 'w') as f:
f.writelines(data)
def main():
"""
Discards all pairs of sentences which can't be decoded by latin-1 encoder.
It aims to filter out sentences with rare unicode glyphs and pairs which
are most likely not valid English-German sentences.
Examples of discarded sentences:
✿★★★Hommage au king de la pop ★★★✿ ✿★★★Que son âme repos...
Для их осуществления нам, прежде всего, необходимо преодолеть
возражения рыночных фундаменталистов, которые хотят ликвидировать или
уменьшить роль МВФ.
practised as a scientist in various medical departments of the ⇗Medical
University of Hanover , the ⇗University of Ulm , and the ⇗RWTH Aachen
(rheumatology, pharmacology, physiology, pathology, microbiology,
immunology and electron-microscopy).
The same shift】 and press 【】 【alt out with a smaller diameter
circle.
Brought to you by ABMSUBS ♥leira(Coordinator/Translator)
♥chibichan93(Timer/Typesetter) ♥ja...
Some examples: &0u - ☺ &0U - ☻ &tel - ☏ &PI - ¶ &SU - ☼ &cH- - ♥ &M2=♫
&sn - ﺵ SGML maps SGML to unicode.
"""
args = parse_args()
c = Counter()
skipped = 0
valid = 0
data1 = []
data2 = []
with open(args.file1) as f1, open(args.file2) as f2:
for idx, lines in enumerate(zip(f1, f2)):
line1, line2 = lines
if idx % 100000 == 1:
print(f'Processed {idx} lines')
try:
line1.encode('latin1')
line2.encode('latin1')
except UnicodeEncodeError:
skipped += 1
else:
data1.append(line1)
data2.append(line2)
valid += 1
c.update(line1)
ratio = valid / (skipped + valid)
print(f'Skipped: {skipped}, Valid: {valid}, Valid ratio {ratio}')
print('Character frequency:', c)
save_output(args.file1, data1)
save_output(args.file2, data2)
if __name__ == '__main__':
main()
|
TensorFlow2/LanguageModeling/BERT/official/modeling/hyperparams | hyperparams | params_dict | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A parameter dictionary class which supports the nest structure."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import re
import six
import tensorflow as tf
import yaml
# regex pattern that matches on key-value pairs in a comma-separated
# key-value pair string. It splits each k-v pair on the = sign, and
# matches on values that are within single quotes, double quotes, single
# values (e.g. floats, ints, etc.), and a lists within brackets.
_PARAM_RE = re.compile(r"""
(?P<name>[a-zA-Z][\w\.]*) # variable name: "var" or "x"
\s*=\s*
((?P<val>\'(.*?)\' # single quote
|
\"(.*?)\" # double quote
|
[^,\[]* # single value
|
\[[^\]]*\])) # list of values
($|,\s*)""", re.VERBOSE)
class ParamsDict(object):
"""A hyperparameter container class."""
RESERVED_ATTR = ['_locked', '_restrictions']
def __init__(self, default_params=None, restrictions=None):
"""Instantiate a ParamsDict.
Instantiate a ParamsDict given a set of default parameters and a list of
restrictions. Upon initialization, it validates itself by checking all the
defined restrictions, and raise error if it finds inconsistency.
Args:
default_params: a Python dict or another ParamsDict object including the
default parameters to initialize.
restrictions: a list of strings, which define a list of restrictions to
ensure the consistency of different parameters internally. Each
restriction string is defined as a binary relation with a set of
operators, including {'==', '!=', '<', '<=', '>', '>='}.
"""
self._locked = False
self._restrictions = []
if restrictions:
self._restrictions = restrictions
if default_params is None:
default_params = {}
self.override(default_params, is_strict=False)
self.validate()
def _set(self, k, v):
if isinstance(v, dict):
self.__dict__[k] = ParamsDict(v)
else:
self.__dict__[k] = copy.deepcopy(v)
def __setattr__(self, k, v):
"""Sets the value of the existing key.
Note that this does not allow directly defining a new key. Use the
`override` method with `is_strict=False` instead.
Args:
k: the key string.
v: the value to be used to set the key `k`.
Raises:
KeyError: if k is not defined in the ParamsDict.
"""
if k not in ParamsDict.RESERVED_ATTR:
if k not in self.__dict__.keys():
raise KeyError('The key `%{}` does not exist. '
'To extend the existing keys, use '
'`override` with `is_strict` = True.'.format(k))
if self._locked:
raise ValueError('The ParamsDict has been locked. '
'No change is allowed.')
self._set(k, v)
def __getattr__(self, k):
"""Gets the value of the existing key.
Args:
k: the key string.
Returns:
the value of the key.
Raises:
KeyError: if k is not defined in the ParamsDict.
"""
if k not in self.__dict__.keys():
raise KeyError('The key `{}` does not exist. '.format(k))
return self.__dict__[k]
def __contains__(self, key):
"""Implements the membership test operator."""
return key in self.__dict__
def get(self, key, value=None):
"""Accesses through built-in dictionary get method."""
return self.__dict__.get(key, value)
def override(self, override_params, is_strict=True):
"""Override the ParamsDict with a set of given params.
Args:
override_params: a dict or a ParamsDict specifying the parameters to
be overridden.
is_strict: a boolean specifying whether override is strict or not. If
True, keys in `override_params` must be present in the ParamsDict.
If False, keys in `override_params` can be different from what is
currently defined in the ParamsDict. In this case, the ParamsDict will
be extended to include the new keys.
"""
if self._locked:
raise ValueError('The ParamsDict has been locked. No change is allowed.')
if isinstance(override_params, ParamsDict):
override_params = override_params.as_dict()
self._override(override_params, is_strict) # pylint: disable=protected-access
def _override(self, override_dict, is_strict=True):
"""The implementation of `override`."""
for k, v in six.iteritems(override_dict):
if k in ParamsDict.RESERVED_ATTR:
raise KeyError('The key `%{}` is internally reserved. '
'Can not be overridden.')
if k not in self.__dict__.keys():
if is_strict:
raise KeyError('The key `{}` does not exist. '
'To extend the existing keys, use '
'`override` with `is_strict` = False.'.format(k))
else:
self._set(k, v)
else:
if isinstance(v, dict):
self.__dict__[k]._override(v, is_strict) # pylint: disable=protected-access
elif isinstance(v, ParamsDict):
self.__dict__[k]._override(v.as_dict(), is_strict) # pylint: disable=protected-access
else:
self.__dict__[k] = copy.deepcopy(v)
def lock(self):
"""Makes the ParamsDict immutable."""
self._locked = True
def as_dict(self):
"""Returns a dict representation of ParamsDict.
For the nested ParamsDict, a nested dict will be returned.
"""
params_dict = {}
for k, v in six.iteritems(self.__dict__):
if k not in ParamsDict.RESERVED_ATTR:
if isinstance(v, ParamsDict):
params_dict[k] = v.as_dict()
else:
params_dict[k] = copy.deepcopy(v)
return params_dict
def validate(self):
"""Validate the parameters consistency based on the restrictions.
This method validates the internal consistency using the pre-defined list of
restrictions. A restriction is defined as a string which specfiies a binary
operation. The supported binary operations are {'==', '!=', '<', '<=', '>',
'>='}. Note that the meaning of these operators are consistent with the
underlying Python immplementation. Users should make sure the define
restrictions on their type make sense.
For example, for a ParamsDict like the following
```
a:
a1: 1
a2: 2
b:
bb:
bb1: 10
bb2: 20
ccc:
a1: 1
a3: 3
```
one can define two restrictions like this
['a.a1 == b.ccc.a1', 'a.a2 <= b.bb.bb2']
What it enforces are:
- a.a1 = 1 == b.ccc.a1 = 2
- a.a2 = 2 <= b.bb.bb2 = 20
Raises:
KeyError: if any of the following happens
(1) any of parameters in any of restrictions is not defined in
ParamsDict,
(2) any inconsistency violating the restriction is found.
ValueError: if the restriction defined in the string is not supported.
"""
def _get_kv(dotted_string, params_dict):
tokenized_params = dotted_string.split('.')
v = params_dict
for t in tokenized_params:
v = v[t]
return tokenized_params[-1], v
def _get_kvs(tokens, params_dict):
if len(tokens) != 2:
raise ValueError('Only support binary relation in restriction.')
stripped_tokens = [t.strip() for t in tokens]
left_k, left_v = _get_kv(stripped_tokens[0], params_dict)
right_k, right_v = _get_kv(stripped_tokens[1], params_dict)
return left_k, left_v, right_k, right_v
params_dict = self.as_dict()
for restriction in self._restrictions:
if '==' in restriction:
tokens = restriction.split('==')
_, left_v, _, right_v = _get_kvs(tokens, params_dict)
if left_v != right_v:
raise KeyError('Found inconsistncy between key `{}` and key `{}`.'
.format(tokens[0], tokens[1]))
elif '!=' in restriction:
tokens = restriction.split('!=')
_, left_v, _, right_v = _get_kvs(tokens, params_dict)
if left_v == right_v:
raise KeyError('Found inconsistncy between key `{}` and key `{}`.'
.format(tokens[0], tokens[1]))
elif '<' in restriction:
tokens = restriction.split('<')
_, left_v, _, right_v = _get_kvs(tokens, params_dict)
if left_v >= right_v:
raise KeyError('Found inconsistncy between key `{}` and key `{}`.'
.format(tokens[0], tokens[1]))
elif '<=' in restriction:
tokens = restriction.split('<=')
_, left_v, _, right_v = _get_kvs(tokens, params_dict)
if left_v > right_v:
raise KeyError('Found inconsistncy between key `{}` and key `{}`.'
.format(tokens[0], tokens[1]))
elif '>' in restriction:
tokens = restriction.split('>')
_, left_v, _, right_v = _get_kvs(tokens, params_dict)
if left_v <= right_v:
raise KeyError('Found inconsistncy between key `{}` and key `{}`.'
.format(tokens[0], tokens[1]))
elif '>=' in restriction:
tokens = restriction.split('>=')
_, left_v, _, right_v = _get_kvs(tokens, params_dict)
if left_v < right_v:
raise KeyError('Found inconsistncy between key `{}` and key `{}`.'
.format(tokens[0], tokens[1]))
else:
raise ValueError('Unsupported relation in restriction.')
def read_yaml_to_params_dict(file_path):
"""Reads a YAML file to a ParamsDict."""
with tf.io.gfile.GFile(file_path, 'r') as f:
params_dict = yaml.load(f)
return ParamsDict(params_dict)
def save_params_dict_to_yaml(params, file_path):
"""Saves the input ParamsDict to a YAML file."""
with tf.io.gfile.GFile(file_path, 'w') as f:
def _my_list_rep(dumper, data):
# u'tag:yaml.org,2002:seq' is the YAML internal tag for sequence.
return dumper.represent_sequence(
u'tag:yaml.org,2002:seq', data, flow_style=True)
yaml.add_representer(list, _my_list_rep)
yaml.dump(params.as_dict(), f, default_flow_style=False)
def nested_csv_str_to_json_str(csv_str):
"""Converts a nested (using '.') comma-separated k=v string to a JSON string.
Converts a comma-separated string of key/value pairs that supports
nesting of keys to a JSON string. Nesting is implemented using
'.' between levels for a given key.
Spacing between commas and = is supported (e.g. there is no difference between
"a=1,b=2", "a = 1, b = 2", or "a=1, b=2") but there should be no spaces before
keys or after values (e.g. " a=1,b=2" and "a=1,b=2 " are not supported).
Note that this will only support values supported by CSV, meaning
values such as nested lists (e.g. "a=[[1,2,3],[4,5,6]]") are not
supported. Strings are supported as well, e.g. "a='hello'".
An example conversion would be:
"a=1, b=2, c.a=2, c.b=3, d.a.a=5"
to
"{ a: 1, b : 2, c: {a : 2, b : 3}, d: {a: {a : 5}}}"
Args:
csv_str: the comma separated string.
Returns:
the converted JSON string.
Raises:
ValueError: If csv_str is not in a comma separated string or
if the string is formatted incorrectly.
"""
if not csv_str:
return ''
formatted_entries = []
nested_map = collections.defaultdict(list)
pos = 0
while pos < len(csv_str):
m = _PARAM_RE.match(csv_str, pos)
if not m:
raise ValueError('Malformed hyperparameter value while parsing '
'CSV string: %s' % csv_str[pos:])
pos = m.end()
# Parse the values.
m_dict = m.groupdict()
name = m_dict['name']
v = m_dict['val']
# If a GCS path (e.g. gs://...) is provided, wrap this in quotes
# as yaml.load would otherwise throw an exception
if re.match(r'(?=[^\"\'])(?=[gs://])', v):
v = '\'{}\''.format(v)
name_nested = name.split('.')
if len(name_nested) > 1:
grouping = name_nested[0]
value = '.'.join(name_nested[1:]) + '=' + v
nested_map[grouping].append(value)
else:
formatted_entries.append('%s : %s' % (name, v))
for grouping, value in nested_map.items():
value = ','.join(value)
value = nested_csv_str_to_json_str(value)
formatted_entries.append('%s : %s' % (grouping, value))
return '{' + ', '.join(formatted_entries) + '}'
def override_params_dict(params, dict_or_string_or_yaml_file, is_strict):
"""Override a given ParamsDict using a dict, JSON/YAML/CSV string or YAML file.
The logic of the function is outlined below:
1. Test that the input is a dict. If not, proceed to 2.
2. Tests that the input is a string. If not, raise unknown ValueError
2.1. Test if the string is in a CSV format. If so, parse.
If not, proceed to 2.2.
2.2. Try loading the string as a YAML/JSON. If successful, parse to
dict and use it to override. If not, proceed to 2.3.
2.3. Try using the string as a file path and load the YAML file.
Args:
params: a ParamsDict object to be overridden.
dict_or_string_or_yaml_file: a Python dict, JSON/YAML/CSV string or
path to a YAML file specifying the parameters to be overridden.
is_strict: a boolean specifying whether override is strict or not.
Returns:
params: the overridden ParamsDict object.
Raises:
ValueError: if failed to override the parameters.
"""
if not dict_or_string_or_yaml_file:
return params
if isinstance(dict_or_string_or_yaml_file, dict):
params.override(dict_or_string_or_yaml_file, is_strict)
elif isinstance(dict_or_string_or_yaml_file, six.string_types):
try:
dict_or_string_or_yaml_file = (
nested_csv_str_to_json_str(dict_or_string_or_yaml_file))
except ValueError:
pass
params_dict = yaml.load(dict_or_string_or_yaml_file)
if isinstance(params_dict, dict):
params.override(params_dict, is_strict)
else:
with tf.io.gfile.GFile(dict_or_string_or_yaml_file) as f:
params.override(yaml.load(f), is_strict)
else:
raise ValueError('Unknown input type to parse.')
return params
|
PyTorch/LanguageModeling/BERT/lamb_amp_opt/csrc | csrc | compat | #ifndef TORCH_CHECK
#define TORCH_CHECK AT_CHECK
#endif
#ifdef VERSION_GE_1_3
#define DATA_PTR data_ptr
#else
#define DATA_PTR data
#endif
|
Tools/PyTorch/TimeSeriesPredictionPlatform/models/tft_pyt/triton | triton | run_inference_on_fw | #!/usr/bin/env python3
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""
To infer the model on framework runtime, you can use `run_inference_on_fw.py` script.
It infers data obtained from pointed data loader locally and saves received data into dump files.
Those files are stored in directory pointed by `--output-dir` argument.
Example call:
```shell script
python ./triton/run_inference_on_fw.py \
--input-path /models/exported/model.onnx \
--input-type onnx \
--dataloader triton/dataloader.py \
--data-dir /data/imagenet \
--batch-size 32 \
--output-dir /results/dump_local \
--dump-labels
```
"""
import argparse
import logging
import os
from pathlib import Path
from tqdm import tqdm
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = Path(__file__).parent.name
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
os.environ["TF_ENABLE_DEPRECATION_WARNINGS"] = "0"
from .deployment_toolkit.args import ArgParserGenerator # noqa: E402 module level import not at top of file
from .deployment_toolkit.core import ( # noqa: E402 module level import not at top of file
DATALOADER_FN_NAME,
BaseLoader,
BaseRunner,
load_from_file,
)
from .deployment_toolkit.dump import JsonDumpWriter # noqa: E402 module level import not at top of file
from .deployment_toolkit.extensions import loaders, runners # noqa: E402 module level import not at top of file
LOGGER = logging.getLogger("run_inference_on_fw")
def _verify_and_format_dump(args, ids, x, y_pred, y_real):
data = {"outputs": y_pred, "ids": {"ids": ids}}
if args.dump_inputs:
data["inputs"] = x
if args.dump_labels:
if not y_real:
raise ValueError(
"Found empty label values. Please provide labels in dataloader_fn or do not use --dump-labels argument"
)
data["labels"] = y_real
return data
def _parse_and_validate_args():
supported_inputs = set(runners.supported_extensions) & set(loaders.supported_extensions)
parser = argparse.ArgumentParser(description="Dump local inference output of given model", allow_abbrev=False)
parser.add_argument("--input-path", help="Path to input model", required=True)
parser.add_argument("--input-type", help="Input model type", choices=supported_inputs, required=True)
parser.add_argument("--dataloader", help="Path to python file containing dataloader.", required=True)
parser.add_argument("--output-dir", help="Path to dir where output files will be stored", required=True)
parser.add_argument("--dump-labels", help="Dump labels to output dir", action="store_true", default=False)
parser.add_argument("--dump-inputs", help="Dump inputs to output dir", action="store_true", default=False)
parser.add_argument("-v", "--verbose", help="Verbose logs", action="store_true", default=False)
args, *_ = parser.parse_known_args()
get_dataloader_fn = load_from_file(args.dataloader, label="dataloader", target=DATALOADER_FN_NAME)
ArgParserGenerator(get_dataloader_fn).update_argparser(parser)
Loader: BaseLoader = loaders.get(args.input_type)
ArgParserGenerator(Loader, module_path=args.input_path).update_argparser(parser)
Runner: BaseRunner = runners.get(args.input_type)
ArgParserGenerator(Runner).update_argparser(parser)
args = parser.parse_args()
types_requiring_io_params = []
if args.input_type in types_requiring_io_params and not all(p for p in [args.inputs, args.outptputs]):
parser.error(f"For {args.input_type} input provide --inputs and --outputs parameters")
return args
def main():
args = _parse_and_validate_args()
log_level = logging.INFO if not args.verbose else logging.DEBUG
log_format = "%(asctime)s %(levelname)s %(name)s %(message)s"
logging.basicConfig(level=log_level, format=log_format)
LOGGER.info("args:")
for key, value in vars(args).items():
LOGGER.info(f" {key} = {value}")
Loader: BaseLoader = loaders.get(args.input_type)
Runner: BaseRunner = runners.get(args.input_type)
loader = ArgParserGenerator(Loader, module_path=args.input_path).from_args(args)
runner = ArgParserGenerator(Runner).from_args(args)
LOGGER.info(f"Loading {args.input_path}")
model = loader.load(args.input_path)
with runner.init_inference(model=model) as runner_session, JsonDumpWriter(args.output_dir) as writer:
get_dataloader_fn = load_from_file(args.dataloader, label="dataloader", target=DATALOADER_FN_NAME)
dataloader_fn = ArgParserGenerator(get_dataloader_fn).from_args(args)
LOGGER.info("Data loader initialized; Running inference")
for ids, x, y_real in tqdm(dataloader_fn(), unit="batch", mininterval=10):
y_pred = runner_session(x)
data = _verify_and_format_dump(args, ids=ids, x=x, y_pred=y_pred, y_real=y_real)
writer.write(**data)
LOGGER.info("Inference finished")
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.