cerebras.modelzoo.config_manager.config_classes.base.model_config#

Config classes of Model Configs

Classes

InitializerConfig

InitializerConfig(name: str = typing.Literal['constant', 'ones', 'zeros', 'eyeuniform', 'normal', 'xavier_normal', 'glorot_normal', 'xavier_uniform', 'glorot_uniform', 'truncated_normal', 'variance_scaling', 'lecun_normal', 'lecun_uniform', 'kaiming_normal', 'kaiming_uniform'], mean: Optional[float] = None, std: Optional[float] = None, a: Optional[float] = None, b: Optional[float] = None, nonlinearity: Optional[Literal['linear', 'conv1d', 'conv2d', 'conv3d', 'conv_transpose1d', 'conv_transpose2d', 'conv_transpose3d', 'sigmoid', 'tanh', 'relu', 'leaky_relu']] = None, mode: Optional[str] = None, scale: Optional[float] = None, distribution: Optional[str] = None)

LoraConfig

LoraConfig(r: int = 0, alpha: int = 1, dropout: float = 0.0, fan_in_fan_out: bool = False, merge_weights: bool = True, target_modules: Optional[list] = None)

ModelConfig

ModelConfig(mixed_precision: bool = False, fp16_type: Optional[Literal['bfloat16', 'float16', 'cbfloat16']] = 'bfloat16', boundary_casting: Optional[bool] = False, lora_params: Union[cerebras.modelzoo.config_manager.config_classes.base.model_config.LoraConfig, List[cerebras.modelzoo.config_manager.config_classes.base.model_config.LoraConfig], NoneType] = None)

NormKWArgsConfig

NormKWArgsConfig()