Source code for modelzoo.transformers.pytorch.layers_api_demo.cb_transformer

# Copyright 2022 Cerebras Systems.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import math

from torch import Tensor, nn

from modelzoo.common.pytorch.layers import (
    EmbeddingLayer,
    TransformerEncoder,
    TransformerEncoderLayer,
)


[docs]class TransformerModel(nn.Module):
[docs] def __init__( self, ntoken: int, d_model: int, nhead: int, d_hid: int, nlayers: int, dropout: float = 0.5, activation: str = "gelu", max_len: int = 10, ): super().__init__() self.model_type = "Transformer" self.encoder = EmbeddingLayer( vocab_size=ntoken, embedding_size=d_model, position_embedding_type="fixed", max_position_embeddings=max_len, ) encoder_layers = TransformerEncoderLayer( d_model, nhead, d_hid, dropout, batch_first=True, activation=activation, ) self.transformer_encoder = TransformerEncoder(encoder_layers, nlayers) self.d_model = d_model self.decoder = nn.Linear(d_model, ntoken) self.init_weights()
def init_weights(self) -> None: initrange = 0.1 self.decoder.bias.data.zero_() self.decoder.weight.data.uniform_(-initrange, initrange)
[docs] def forward(self, src: Tensor, src_mask: Tensor) -> Tensor: """ Args: src: Tensor, shape [seq_len, batch_size] src_mask: Tensor, shape [seq_len, seq_len] Returns: output Tensor of shape [seq_len, batch_size, ntoken] """ src = self.encoder(src) * math.sqrt(self.d_model) src = self.transformer_encoder(src, src_mask) output = self.decoder(src) return output