Skip to content

Commit 437c8e7

Browse files
authored
Fix unused pkgs import (#1931)
Signed-off-by: Kaihui-intel <[email protected]>
1 parent ff37401 commit 437c8e7

File tree

3 files changed

+64
-3
lines changed

3 files changed

+64
-3
lines changed

neural_compressor/torch/algorithms/layer_wise/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ def get_named_children(model, pre=[]):
9090
return module_list
9191

9292

93-
def dowload_hf_model(repo_id, cache_dir=None, repo_type=None, revision=None):
93+
def dowload_hf_model(repo_id, cache_dir=None, repo_type=None, revision=None): # pragma: no cover
9494
"""Download hugging face model from hf hub."""
9595
from huggingface_hub.constants import DEFAULT_REVISION, HUGGINGFACE_HUB_CACHE
9696
from huggingface_hub.file_download import REGEX_COMMIT_HASH, repo_folder_name
@@ -122,7 +122,7 @@ def dowload_hf_model(repo_id, cache_dir=None, repo_type=None, revision=None):
122122
return file_path
123123

124124

125-
def load_empty_model(pretrained_model_name_or_path, cls=AutoModelForCausalLM, **kwargs):
125+
def load_empty_model(pretrained_model_name_or_path, cls=AutoModelForCausalLM, **kwargs): # pragma: no cover
126126
"""Load a empty model."""
127127
is_local = os.path.isdir(pretrained_model_name_or_path)
128128
if is_local: # pragma: no cover

neural_compressor/torch/utils/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,4 +15,3 @@
1515
from .environ import *
1616
from .constants import *
1717
from .utility import *
18-
from neural_compressor.torch.algorithms.layer_wise import load_empty_model

neural_compressor/torch/utils/utility.py

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -278,3 +278,65 @@ def get_processor_type_from_user_config(user_processor_type: Optional[Union[str,
278278
else:
279279
raise NotImplementedError(f"Unsupported processor type: {user_processor_type}")
280280
return processor_type
281+
282+
283+
def dowload_hf_model(repo_id, cache_dir=None, repo_type=None, revision=None):
284+
"""Download hugging face model from hf hub."""
285+
import os
286+
287+
from huggingface_hub.constants import DEFAULT_REVISION, HUGGINGFACE_HUB_CACHE
288+
from huggingface_hub.file_download import REGEX_COMMIT_HASH, repo_folder_name
289+
from huggingface_hub.utils import EntryNotFoundError
290+
291+
if cache_dir is None:
292+
cache_dir = HUGGINGFACE_HUB_CACHE
293+
if revision is None:
294+
revision = DEFAULT_REVISION
295+
if repo_type is None:
296+
repo_type = "model"
297+
storage_folder = os.path.join(cache_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type))
298+
commit_hash = None
299+
if REGEX_COMMIT_HASH.match(revision):
300+
commit_hash = revision
301+
else:
302+
ref_path = os.path.join(storage_folder, "refs", revision)
303+
if os.path.exists(ref_path):
304+
with open(ref_path) as f:
305+
commit_hash = f.read()
306+
if storage_folder and commit_hash:
307+
pointer_path = os.path.join(storage_folder, "snapshots", commit_hash)
308+
if os.path.isdir(pointer_path):
309+
return pointer_path
310+
else: # pragma: no cover
311+
from huggingface_hub import snapshot_download
312+
313+
file_path = snapshot_download(repo_id)
314+
return file_path
315+
316+
317+
def load_empty_model(pretrained_model_name_or_path, cls=None, **kwargs):
318+
"""Load a empty model."""
319+
import os
320+
321+
from accelerate import init_empty_weights
322+
from transformers import AutoConfig, AutoModelForCausalLM
323+
from transformers.models.auto.auto_factory import _BaseAutoModelClass
324+
325+
cls = AutoModelForCausalLM if cls is None else cls
326+
is_local = os.path.isdir(pretrained_model_name_or_path)
327+
if is_local: # pragma: no cover
328+
path = pretrained_model_name_or_path
329+
else:
330+
path = dowload_hf_model(pretrained_model_name_or_path)
331+
if cls.__base__ == _BaseAutoModelClass:
332+
config = AutoConfig.from_pretrained(path, **kwargs)
333+
with init_empty_weights():
334+
model = cls.from_config(config)
335+
else: # pragma: no cover
336+
config = cls.config_class.from_pretrained(path, **kwargs)
337+
with init_empty_weights():
338+
model = cls(config)
339+
model.tie_weights()
340+
model.eval()
341+
model.path = pretrained_model_name_or_path
342+
return model

0 commit comments

Comments
 (0)