reafactor: 修改ModelParameter
This commit is contained in:
parent
80c0b20877
commit
9f1561afe7
|
|
@ -1,6 +1,7 @@
|
|||
import torch.nn as nn
|
||||
import safetensors.torch as st
|
||||
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, Self, Union
|
||||
from pathlib import Path
|
||||
|
|
@ -10,12 +11,38 @@ from astrai.config.model_config import ModelConfig
|
|||
from astrai.model.transformer import Transformer
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disable_random_init(enable: bool = True):
|
||||
init_functions = [
|
||||
"xavier_normal_",
|
||||
"xavier_uniform_",
|
||||
"kaiming_normal_",
|
||||
"kaiming_uniform_",
|
||||
"zeros_",
|
||||
"ones_",
|
||||
"constant_",
|
||||
"normal_",
|
||||
"uniform_",
|
||||
]
|
||||
original_funcs = {}
|
||||
for name in init_functions:
|
||||
if enable and hasattr(nn.init, name):
|
||||
original_funcs[name] = getattr(nn.init, name)
|
||||
setattr(nn.init, name, lambda *args, **kwargs: None)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if enable:
|
||||
for name, orig_func in original_funcs.items():
|
||||
setattr(nn.init, name, orig_func)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseModelIO:
|
||||
"""Base class for model I/O operations."""
|
||||
|
||||
model: Optional[nn.Module] = field(
|
||||
default=None, metadata={"help": "Transformer model."}
|
||||
model: nn.Module = field(
|
||||
default_factory=nn.Identity, metadata={"help": "Transformer model."}
|
||||
)
|
||||
tokenizer: BpeTokenizer = field(
|
||||
default_factory=BpeTokenizer, metadata={"help": "Tokenizer for the model."}
|
||||
|
|
@ -41,10 +68,13 @@ class BaseModelIO:
|
|||
|
||||
if self.model is not None:
|
||||
st.save_file(self.model.state_dict(), str(paths["model"]))
|
||||
|
||||
self.config.save(str(paths["config"]))
|
||||
self.tokenizer.save(str(paths["tokenizer"]))
|
||||
|
||||
def load_components(self, load_dir: Union[str, Path]) -> Self:
|
||||
def load_components(
|
||||
self, load_dir: Union[str, Path], disable_init: bool = False
|
||||
) -> Self:
|
||||
"""Load core model components."""
|
||||
paths = self._get_file_paths(load_dir)
|
||||
|
||||
|
|
@ -52,6 +82,7 @@ class BaseModelIO:
|
|||
self.tokenizer.load(str(paths["tokenizer"]))
|
||||
|
||||
if self.model is None:
|
||||
with disable_random_init(enable=disable_init):
|
||||
self.model = Transformer(self.config)
|
||||
|
||||
if paths["model"].exists():
|
||||
|
|
@ -76,6 +107,8 @@ class ModelParameter(BaseModelIO):
|
|||
instance.save_components(save_dir)
|
||||
|
||||
@classmethod
|
||||
def load(cls, load_dir: Union[str, Path]) -> "ModelParameter":
|
||||
def load(
|
||||
cls, load_dir: Union[str, Path], disable_init: bool = False
|
||||
) -> "ModelParameter":
|
||||
instance = cls()
|
||||
return instance.load_components(load_dir)
|
||||
return instance.load_components(load_dir, disable_init=disable_init)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from astrai.inference.core import (
|
||||
disable_random_init,
|
||||
GeneratorCore,
|
||||
EmbeddingEncoderCore,
|
||||
KVCacheManager,
|
||||
|
|
@ -15,7 +14,6 @@ from astrai.inference.generator import (
|
|||
)
|
||||
|
||||
__all__ = [
|
||||
"disable_random_init",
|
||||
"GeneratorCore",
|
||||
"EmbeddingEncoderCore",
|
||||
"KVCacheManager",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import torch
|
||||
import torch.nn as nn
|
||||
|
||||
from torch import Tensor
|
||||
from contextlib import contextmanager
|
||||
from typing import Any, Callable, List, Tuple, Union, Optional, Self
|
||||
from astrai.config import ModelParameter, ModelConfig
|
||||
|
||||
|
|
@ -55,31 +53,6 @@ def apply_sampling_strategies(
|
|||
return logits
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disable_random_init():
|
||||
init_functions = [
|
||||
"xavier_normal_",
|
||||
"xavier_uniform_",
|
||||
"kaiming_normal_",
|
||||
"kaiming_uniform_",
|
||||
"zeros_",
|
||||
"ones_",
|
||||
"constant_",
|
||||
"normal_",
|
||||
"uniform_",
|
||||
]
|
||||
original_funcs = {}
|
||||
for name in init_functions:
|
||||
if hasattr(nn.init, name):
|
||||
original_funcs[name] = getattr(nn.init, name)
|
||||
setattr(nn.init, name, lambda *args, **kwargs: None)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for name, orig_func in original_funcs.items():
|
||||
setattr(nn.init, name, orig_func)
|
||||
|
||||
|
||||
class GeneratorCore:
|
||||
def __init__(self, parameter: ModelParameter):
|
||||
self.model = parameter.model
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ PARAMETER_ROOT = Path(PROJECT_ROOT, "params")
|
|||
|
||||
if __name__ == "__main__":
|
||||
snapshot_download(
|
||||
repo_id="ViperEk/AstrAI",
|
||||
repo_id="ViperEk/KHAOSZ",
|
||||
local_dir=PARAMETER_ROOT,
|
||||
force_download=True,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import torch
|
||||
from pathlib import Path
|
||||
from astrai.config.param_config import ModelParameter
|
||||
from astrai.inference.core import disable_random_init
|
||||
from astrai.inference.generator import GeneratorFactory, GenerationRequest
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent
|
||||
|
|
@ -9,9 +8,7 @@ PARAMETER_ROOT = Path(PROJECT_ROOT, "params")
|
|||
|
||||
|
||||
def generate_text():
|
||||
|
||||
with disable_random_init():
|
||||
param = ModelParameter.load(PARAMETER_ROOT)
|
||||
param = ModelParameter.load(PARAMETER_ROOT, disable_init=True)
|
||||
param.to(device="cuda", dtype=torch.bfloat16)
|
||||
|
||||
query = input(">> ")
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import torch
|
||||
from pathlib import Path
|
||||
from astrai.config.param_config import ModelParameter
|
||||
from astrai.inference.core import disable_random_init
|
||||
from astrai.inference.generator import GeneratorFactory, GenerationRequest
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent
|
||||
|
|
@ -9,9 +8,7 @@ PARAMETER_ROOT = Path(PROJECT_ROOT, "params")
|
|||
|
||||
|
||||
def batch_generate():
|
||||
|
||||
with disable_random_init():
|
||||
param = ModelParameter.load(PARAMETER_ROOT)
|
||||
param = ModelParameter.load(PARAMETER_ROOT, disable_init=True)
|
||||
param.to(device="cuda", dtype=torch.bfloat16)
|
||||
|
||||
inputs = [
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import torch
|
||||
from pathlib import Path
|
||||
from astrai.config.param_config import ModelParameter
|
||||
from astrai.inference.core import disable_random_init
|
||||
from astrai.inference.generator import GeneratorFactory, GenerationRequest
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent
|
||||
|
|
@ -9,9 +8,7 @@ PARAMETER_ROOT = Path(PROJECT_ROOT, "params")
|
|||
|
||||
|
||||
def chat():
|
||||
|
||||
with disable_random_init():
|
||||
param = ModelParameter.load(PARAMETER_ROOT)
|
||||
param = ModelParameter.load(PARAMETER_ROOT, disable_init=True)
|
||||
param.to(device="cuda", dtype=torch.bfloat16)
|
||||
|
||||
history = []
|
||||
|
|
|
|||
|
|
@ -4,23 +4,19 @@ import argparse
|
|||
|
||||
from astrai.config.param_config import ModelParameter
|
||||
from astrai.inference.generator import BatchGenerator, GenerationRequest
|
||||
from astrai.inference.core import disable_random_init
|
||||
|
||||
|
||||
def processor(
|
||||
model_dir: str,
|
||||
input_json_file: str,
|
||||
output_json_file: str,
|
||||
batch_size: int,
|
||||
temperature: float,
|
||||
top_k: int,
|
||||
top_p: float,
|
||||
question_key: str,
|
||||
response_key: str,
|
||||
):
|
||||
with disable_random_init():
|
||||
param = ModelParameter.load(model_dir)
|
||||
|
||||
param = ModelParameter.load(model_dir, disable_init=True)
|
||||
param.to(device="cuda", dtype=torch.bfloat16)
|
||||
generator = BatchGenerator(param)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import tqdm
|
|||
|
||||
from torch import Tensor
|
||||
from astrai.config.param_config import ModelParameter
|
||||
from astrai.inference.core import disable_random_init
|
||||
|
||||
|
||||
def compute_perplexity(
|
||||
|
|
@ -42,9 +41,7 @@ def compute_perplexity(
|
|||
def process_file(
|
||||
model_dir: str, input_file: str, output_file: str, batch_size: int, text_key: str
|
||||
):
|
||||
with disable_random_init():
|
||||
param = ModelParameter.load(model_dir)
|
||||
|
||||
param = ModelParameter.load(model_dir, disable_init=True)
|
||||
param.to(device="cuda", dtype=torch.bfloat16)
|
||||
model = param.model
|
||||
tokenizer = param.tokenizer
|
||||
|
|
|
|||
Loading…
Reference in New Issue