We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6e60b90 commit 98ea10cCopy full SHA for 98ea10c
test/transformers/test_monkey_patch.py
@@ -1663,7 +1663,7 @@ def test_apply_liger_kernel_to_instance_for_glm4v():
1663
from liger_kernel.transformers.model.glm4v import lce_forward as glm4v_lce_forward
1664
1665
# Instantiate a dummy model
1666
- config = transformers.models.paligemma.configuration_glm4v.Glm4vConfig(
+ config = transformers.models.glm4v.configuration_glm4v.Glm4vConfig(
1667
torch_dtype=torch.bfloat16,
1668
text_config={
1669
"num_hidden_layers": 2,
0 commit comments