Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix unused pkgs import #1931

Merged
merged 3 commits into from
Jul 18, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion neural_compressor/torch/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,3 @@
from .environ import *
from .constants import *
from .utility import *
from neural_compressor.torch.algorithms.layer_wise import load_empty_model
62 changes: 62 additions & 0 deletions neural_compressor/torch/utils/utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,3 +278,65 @@ def get_processor_type_from_user_config(user_processor_type: Optional[Union[str,
else:
raise NotImplementedError(f"Unsupported processor type: {user_processor_type}")
return processor_type


def dowload_hf_model(repo_id, cache_dir=None, repo_type=None, revision=None):
"""Download hugging face model from hf hub."""
import os

from huggingface_hub.constants import DEFAULT_REVISION, HUGGINGFACE_HUB_CACHE
from huggingface_hub.file_download import REGEX_COMMIT_HASH, repo_folder_name
from huggingface_hub.utils import EntryNotFoundError

if cache_dir is None:
cache_dir = HUGGINGFACE_HUB_CACHE
if revision is None:
revision = DEFAULT_REVISION
if repo_type is None:
repo_type = "model"
storage_folder = os.path.join(cache_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type))
commit_hash = None
if REGEX_COMMIT_HASH.match(revision):
commit_hash = revision
else:
ref_path = os.path.join(storage_folder, "refs", revision)
if os.path.exists(ref_path):
with open(ref_path) as f:
commit_hash = f.read()
if storage_folder and commit_hash:
pointer_path = os.path.join(storage_folder, "snapshots", commit_hash)
if os.path.isdir(pointer_path):
return pointer_path
else: # pragma: no cover
from huggingface_hub import snapshot_download

file_path = snapshot_download(repo_id)
return file_path


def load_empty_model(pretrained_model_name_or_path, cls=None, **kwargs):
Kaihui-intel marked this conversation as resolved.
Show resolved Hide resolved
"""Load a empty model."""
import os

from accelerate import init_empty_weights
from transformers import AutoConfig, AutoModelForCausalLM
from transformers.models.auto.auto_factory import _BaseAutoModelClass

cls = AutoModelForCausalLM if cls is None else cls
is_local = os.path.isdir(pretrained_model_name_or_path)
if is_local: # pragma: no cover
path = pretrained_model_name_or_path
else:
path = dowload_hf_model(pretrained_model_name_or_path)
if cls.__base__ == _BaseAutoModelClass:
config = AutoConfig.from_pretrained(path, **kwargs)
with init_empty_weights():
model = cls.from_config(config)
else: # pragma: no cover
config = cls.config_class.from_pretrained(path, **kwargs)
with init_empty_weights():
model = cls(config)
model.tie_weights()
model.eval()
model.path = pretrained_model_name_or_path
return model
Loading