File size: 489 Bytes
f30a887 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
from transformers import PretrainedConfig
from typing import List
class MetaLoRAConfig(PretrainedConfig):
model_type = "metalora"
def __init__(
self,
mlora_layers:List[int],
base_size:int,
embd_model:str,
llm_tokenizer:str,
**kwargs,
):
self.mlora_layers = mlora_layers
self.base_size = base_size
self.embd_model = embd_model
self.llm_tokenizer = llm_tokenizer
super().__init__(**kwargs) |