We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
has_offloaded_params
1 parent 74c1063 commit c7c9248Copy full SHA for c7c9248
src/llmcompressor/transformers/sparsification/compressed_tensors_utils.py
@@ -10,7 +10,7 @@
10
ModelCompressor,
11
SparsityCompressionConfig,
12
delete_offload_parameter,
13
- is_module_offloaded,
+ has_offloaded_params,
14
register_offload_parameter,
15
)
16
from loguru import logger
@@ -138,7 +138,7 @@ def untie_word_embeddings(model: PreTrainedModel):
138
continue
139
140
# this could be replaced by a `get_offloaded_parameter` util
141
- if not is_module_offloaded(module):
+ if not has_offloaded_params(module):
142
untied_data = module.weight.data.clone()
143
else:
144
untied_data = module._hf_hook.weights_map["weight"].clone()
0 commit comments