update config
Browse files- modeling_llama.py +3 -4
modeling_llama.py
CHANGED
|
@@ -299,10 +299,9 @@ class LlamaAttention(nn.Module):
|
|
| 299 |
self.rope_theta = config.rope_theta
|
| 300 |
self.is_causal = True
|
| 301 |
|
| 302 |
-
|
| 303 |
-
self.
|
| 304 |
-
self.
|
| 305 |
-
self._lambda_ss = 1
|
| 306 |
|
| 307 |
if (self.head_dim * self.num_heads) != self.hidden_size:
|
| 308 |
raise ValueError(
|
|
|
|
| 299 |
self.rope_theta = config.rope_theta
|
| 300 |
self.is_causal = True
|
| 301 |
|
| 302 |
+
self._lambda_ts = config.lambda_ts
|
| 303 |
+
self._lambda_st = config.lambda_st
|
| 304 |
+
self._lambda_ss = config.lambda_ss
|
|
|
|
| 305 |
|
| 306 |
if (self.head_dim * self.num_heads) != self.hidden_size:
|
| 307 |
raise ValueError(
|