|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#include <cuda_fp16.h> |
|
#include <torch/extension.h> |
|
#include <vector> |
|
|
|
namespace multihead_attn |
|
{ |
|
namespace fused_softmax |
|
{ |
|
namespace generic_scaled_masked_softmax |
|
{ |
|
|
|
torch::Tensor fwd_cuda( |
|
torch::Tensor const &input, |
|
torch::Tensor const &mask, |
|
float scale_factor); |
|
|
|
torch::Tensor bwd_cuda( |
|
torch::Tensor const &output_grads, |
|
torch::Tensor const &softmax_results, |
|
float scale_factor); |
|
|
|
torch::Tensor fwd( |
|
torch::Tensor const &input, |
|
torch::Tensor const &mask, |
|
float scale_factor) |
|
{ |
|
TORCH_CHECK(input.dim() == 4, "expected 4D tensor"); |
|
TORCH_CHECK((input.scalar_type() == at::ScalarType::Half) || |
|
(input.scalar_type() == at::ScalarType::BFloat16), |
|
"Only fp16 and bf16 are supported"); |
|
TORCH_CHECK(mask.dim() == 4, "expected 4D tensor"); |
|
|
|
return fwd_cuda(input, mask, scale_factor); |
|
} |
|
|
|
torch::Tensor bwd( |
|
torch::Tensor const &output_grads, |
|
torch::Tensor const &softmax_results, |
|
float scale_factor) |
|
{ |
|
|
|
TORCH_CHECK(output_grads.dim() == 4, "expected 3D tensor"); |
|
TORCH_CHECK(softmax_results.dim() == 4, "expected 3D tensor"); |
|
|
|
TORCH_CHECK((output_grads.scalar_type() == at::ScalarType::Half) || |
|
(output_grads.scalar_type() == at::ScalarType::BFloat16), |
|
"Only fp16 and bf16 are supported"); |
|
TORCH_CHECK((softmax_results.scalar_type() == at::ScalarType::Half) || |
|
(softmax_results.scalar_type() == at::ScalarType::BFloat16), |
|
"Only fp16 and bf16 are supported"); |
|
|
|
return bwd_cuda(output_grads, softmax_results, scale_factor); |
|
} |
|
|
|
} |
|
} |
|
} |
|
|
|
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { |
|
m.def("forward", |
|
&multihead_attn::fused_softmax::generic_scaled_masked_softmax::fwd, |
|
"Self Multihead Attention scaled, time masked softmax -- Forward."); |
|
|
|
m.def("backward", |
|
&multihead_attn::fused_softmax::generic_scaled_masked_softmax::bwd, |
|
"Self Multihead Attention scaled, time masked softmax -- Backward."); |
|
} |
|
|