Initialize the LLM Model Config
Args:
model_name (str): The name of the model
model_size (str): The size of the model
model_family (str): The family of the model
model_type (str): The type of the model
repo (str): The repo of the model
filename (str): The filename of the model
file_size (float): The size of the model file
available (bool): If the model is
args:
*kwargs:
Source code in Agent/modules/quantization_llm/models.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46 | def __init__(
self,
model_name: str,
model_family: str,
repo: str,
filename: str,
file_size: float,
available: bool,
*args,
**kwargs,
):
"""
Initialize the LLM Model Config
Args:
model_name (str): The name of the model
model_size (str): The size of the model
model_family (str): The family of the model
model_type (str): The type of the model
repo (str): The repo of the model
filename (str): The filename of the model
file_size (float): The size of the model file
available (bool): If the model is
*args:
**kwargs:
"""
self.model_name = model_name
self.model_family = model_family
self.repo = repo
self.filename = filename
self.file_size = file_size
self.available = available
self.llm = None
logger.debug(args)
logger.debug(kwargs)
|