Skip to content

Commit cdf41c9

Browse files
vasquleaderofARS
authored andcommitted
[mRope] Fix warnings (huggingface#42660)
fix warning
1 parent b81b441 commit cdf41c9

File tree

8 files changed

+20
-10
lines changed

8 files changed

+20
-10
lines changed

src/transformers/models/glm4v/configuration_glm4v.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,9 @@ def __init__(
234234
self.attention_dropout = attention_dropout
235235
self.rope_parameters = rope_parameters
236236

237-
super().__init__(tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope"}, **kwargs)
237+
super().__init__(
238+
tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope_section"}, **kwargs
239+
)
238240

239241

240242
class Glm4vConfig(PreTrainedConfig):

src/transformers/models/glm4v/modular_glm4v.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,9 @@ def __init__(
271271
self.attention_dropout = attention_dropout
272272
self.rope_parameters = rope_parameters
273273

274-
super().__init__(tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope"}, **kwargs)
274+
super().__init__(
275+
tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope_section"}, **kwargs
276+
)
275277

276278

277279
class Glm4vConfig(PreTrainedConfig):

src/transformers/models/glm4v_moe/configuration_glm4v_moe.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,9 @@ def __init__(
280280
self.first_k_dense_replace = first_k_dense_replace
281281
self.norm_topk_prob = norm_topk_prob
282282
self.router_aux_loss_coef = router_aux_loss_coef
283-
super().__init__(tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope"}, **kwargs)
283+
super().__init__(
284+
tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope_section"}, **kwargs
285+
)
284286

285287

286288
class Glm4vMoeConfig(PreTrainedConfig):

src/transformers/models/glm4v_moe/modular_glm4v_moe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def __init__(
227227
self.norm_topk_prob = norm_topk_prob
228228
self.router_aux_loss_coef = router_aux_loss_coef
229229
PreTrainedConfig.__init__(
230-
self, tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope"}, **kwargs
230+
self, tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope_section"}, **kwargs
231231
)
232232

233233

src/transformers/models/qwen2_5_omni/configuration_qwen2_5_omni.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -365,7 +365,7 @@ def __init__(
365365
self.rope_parameters = rope_parameters
366366
super().__init__(
367367
tie_word_embeddings=tie_word_embeddings,
368-
ignore_keys_at_rope_validation={"mrope"},
368+
ignore_keys_at_rope_validation={"mrope_section"},
369369
**kwargs,
370370
)
371371

@@ -713,7 +713,9 @@ def __init__(
713713
layer_type_validation(self.layer_types, self.num_hidden_layers)
714714

715715
self.rope_parameters = rope_parameters
716-
super().__init__(tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope"}, **kwargs)
716+
super().__init__(
717+
tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope_section"}, **kwargs
718+
)
717719

718720

719721
class Qwen2_5OmniDiTConfig(PreTrainedConfig):

src/transformers/models/qwen2_5_omni/modular_qwen2_5_omni.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -399,7 +399,7 @@ def __init__(
399399
self.rope_parameters = rope_parameters
400400
super().__init__(
401401
tie_word_embeddings=tie_word_embeddings,
402-
ignore_keys_at_rope_validation={"mrope"},
402+
ignore_keys_at_rope_validation={"mrope_section"},
403403
**kwargs,
404404
)
405405

@@ -747,7 +747,9 @@ def __init__(
747747
layer_type_validation(self.layer_types, self.num_hidden_layers)
748748

749749
self.rope_parameters = rope_parameters
750-
super().__init__(tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope"}, **kwargs)
750+
super().__init__(
751+
tie_word_embeddings=tie_word_embeddings, ignore_keys_at_rope_validation={"mrope_section"}, **kwargs
752+
)
751753

752754

753755
class Qwen2_5OmniDiTConfig(PreTrainedConfig):

src/transformers/models/qwen2_5_vl/configuration_qwen2_5_vl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ def __init__(
230230
bos_token_id=bos_token_id,
231231
eos_token_id=eos_token_id,
232232
pad_token_id=pad_token_id,
233-
ignore_keys_at_rope_validation={"mrope"},
233+
ignore_keys_at_rope_validation={"mrope_section"},
234234
**kwargs,
235235
)
236236

src/transformers/models/qwen2_vl/configuration_qwen2_vl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ def __init__(
218218
bos_token_id=bos_token_id,
219219
eos_token_id=eos_token_id,
220220
pad_token_id=pad_token_id,
221-
ignore_keys_at_rope_validation={"mrope"},
221+
ignore_keys_at_rope_validation={"mrope_section"},
222222
**kwargs,
223223
)
224224

0 commit comments

Comments
 (0)