modelzoo.transformers.utils.attention_utils.FixedSparseAttentionBuilder#

class modelzoo.transformers.utils.attention_utils.FixedSparseAttentionBuilder[source]#

Bases: modelzoo.transformers.utils.attention_utils.SparseAttentionBuilder

Methods

build_mask

get_pytorch_mask

set_global_attention

set_local_attention

trim_to_autoregressive_format

__init__(num_heads, max_sequence_length, num_different_head_attn_configs=1, local_attn_ctx=4, global_attn_ctx=1, attention_type='unidirectional', horizontal_global_attention=False)[source]#