Spaces:
Build error
Build error
File size: 3,711 Bytes
28c256d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
// Copyright (c) OpenMMLab. All rights reserved
#include <parrots/compute/aten.hpp>
#include <parrots/extension.hpp>
#include <parrots/foundation/ssattrs.hpp>
#include "focal_loss_pytorch.h"
using namespace parrots;
#ifdef MMCV_WITH_CUDA
void sigmoid_focal_loss_forward_cuda_parrots(CudaContext& ctx,
const SSElement& attr,
const OperatorBase::in_list_t& ins,
OperatorBase::out_list_t& outs) {
float gamma;
float alpha;
SSAttrs(attr).get<float>("gamma", gamma).get<float>("alpha", alpha).done();
// get inputs and outputs
const auto& input = buildATensor(ctx, ins[0]);
const auto& target = buildATensor(ctx, ins[1]);
const auto& weight = buildATensor(ctx, ins[2]);
auto output = buildATensor(ctx, outs[0]);
sigmoid_focal_loss_forward_cuda(input, target, weight, output, gamma, alpha);
}
void sigmoid_focal_loss_backward_cuda_parrots(
CudaContext& ctx, const SSElement& attr, const OperatorBase::in_list_t& ins,
OperatorBase::out_list_t& outs) {
float gamma;
float alpha;
SSAttrs(attr).get<float>("gamma", gamma).get<float>("alpha", alpha).done();
// get inputs and outputs
const auto& input = buildATensor(ctx, ins[0]);
const auto& target = buildATensor(ctx, ins[1]);
const auto& weight = buildATensor(ctx, ins[2]);
auto grad_input = buildATensor(ctx, outs[0]);
sigmoid_focal_loss_backward_cuda(input, target, weight, grad_input, gamma,
alpha);
}
void softmax_focal_loss_forward_cuda_parrots(CudaContext& ctx,
const SSElement& attr,
const OperatorBase::in_list_t& ins,
OperatorBase::out_list_t& outs) {
float gamma;
float alpha;
SSAttrs(attr).get<float>("gamma", gamma).get<float>("alpha", alpha).done();
// get inputs and outputs
const auto& input = buildATensor(ctx, ins[0]);
const auto& target = buildATensor(ctx, ins[1]);
const auto& weight = buildATensor(ctx, ins[2]);
auto output = buildATensor(ctx, outs[0]);
softmax_focal_loss_forward_cuda(input, target, weight, output, gamma, alpha);
}
void softmax_focal_loss_backward_cuda_parrots(
CudaContext& ctx, const SSElement& attr, const OperatorBase::in_list_t& ins,
OperatorBase::out_list_t& outs) {
float gamma;
float alpha;
SSAttrs(attr).get<float>("gamma", gamma).get<float>("alpha", alpha).done();
// get inputs and outputs
const auto& input = buildATensor(ctx, ins[0]);
const auto& target = buildATensor(ctx, ins[1]);
const auto& weight = buildATensor(ctx, ins[2]);
auto buff = buildATensor(ctx, outs[0]);
auto grad_input = buildATensor(ctx, outs[1]);
softmax_focal_loss_backward_cuda(input, target, weight, buff, grad_input,
gamma, alpha);
}
PARROTS_EXTENSION_REGISTER(sigmoid_focal_loss_forward)
.attr("gamma")
.attr("alpha")
.input(3)
.output(1)
.apply(sigmoid_focal_loss_forward_cuda_parrots)
.done();
PARROTS_EXTENSION_REGISTER(sigmoid_focal_loss_backward)
.attr("gamma")
.attr("alpha")
.input(3)
.output(1)
.apply(sigmoid_focal_loss_backward_cuda_parrots)
.done();
PARROTS_EXTENSION_REGISTER(softmax_focal_loss_forward)
.attr("gamma")
.attr("alpha")
.input(3)
.output(1)
.apply(softmax_focal_loss_forward_cuda_parrots)
.done();
PARROTS_EXTENSION_REGISTER(softmax_focal_loss_backward)
.attr("gamma")
.attr("alpha")
.input(3)
.output(2)
.apply(softmax_focal_loss_backward_cuda_parrots)
.done();
#endif
|