fix(config): 修改Checkpoint类中tokenizer和config字段的默认值初始化方式

This commit is contained in:
ViperEkura 2025-10-29 13:24:20 +08:00
parent bad6243b53
commit 12850d403c
1 changed files with 2 additions and 2 deletions

View File

@ -100,11 +100,11 @@ class Checkpoint(BaseModelIO):
metadata={"help": "Transformer model."} metadata={"help": "Transformer model."}
) )
tokenizer: BpeTokenizer = field( tokenizer: BpeTokenizer = field(
default=None, default_factory=BpeTokenizer,
metadata={"help": "Tokenizer for the model."} metadata={"help": "Tokenizer for the model."}
) )
config: TransformerConfig = field( config: TransformerConfig = field(
default=None, default_factory=TransformerConfig,
metadata={"help": "Transformer model configuration."} metadata={"help": "Transformer model configuration."}
) )
optimizer_state: Dict[str, Any] = field( optimizer_state: Dict[str, Any] = field(