update
This commit is contained in:
parent
2b96630895
commit
151cd71420
|
@ -116,9 +116,10 @@ class MyNewModelConfig(PretrainedConfig):
|
|||
rope_scaling=None,
|
||||
attention_bias=False,
|
||||
attention_dropout=0.0,
|
||||
mlp_bias=True, **kwargs,
|
||||
mlp_bias=True, new_param=0, **kwargs,
|
||||
):
|
||||
self.mlp_bias = mlp_bias
|
||||
self.new_param = new_param
|
||||
self.vocab_size = vocab_size
|
||||
self.max_position_embeddings = max_position_embeddings
|
||||
self.hidden_size = hidden_size
|
||||
|
|
|
@ -7,7 +7,8 @@ class MyNewModelConfig(LlamaConfig):
|
|||
r"""
|
||||
mlp_bias (`bool`, *optional*, defaults to `False`)
|
||||
"""
|
||||
|
||||
def __init__(self, mlp_bias=True, **super_kwargs):
|
||||
|
||||
def __init__(self, mlp_bias=True, new_param=0, **super_kwargs):
|
||||
self.mlp_bias = mlp_bias
|
||||
self.new_param = new_param
|
||||
super().__init__(self, **super_kwargs)
|
||||
|
|
Loading…
Reference in New Issue