cerebras.modelzoo.config_manager.config_classes.base.model_config.ModelConfig#

class cerebras.modelzoo.config_manager.config_classes.base.model_config.ModelConfig[source]#

ModelConfig(mixed_precision: bool = False, fp16_type: Optional[Literal[‘bfloat16’, ‘float16’, ‘cbfloat16’]] = ‘bfloat16’, boundary_casting: Optional[bool] = False, lora_params: Union[cerebras.modelzoo.config_manager.config_classes.base.model_config.LoraConfig, List[cerebras.modelzoo.config_manager.config_classes.base.model_config.LoraConfig], NoneType] = None)

mixed_precision: bool = False#

Enable to run the model in mixed precision mode

fp16_type: Optional[Literal['bfloat16', 'float16', 'cbfloat16']] = 'bfloat16'#

Type of 16bit precision used

boundary_casting: Optional[bool] = False#
lora_params: Optional[Union[cerebras.modelzoo.config_manager.config_classes.base.model_config.LoraConfig, List[cerebras.modelzoo.config_manager.config_classes.base.model_config.LoraConfig]]] = None#