Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion fms_mo/utils/import_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,18 @@
Utils for storing what optional dependencies are available
"""

# Standard
import pkgutil
import sys

# Third Party
from transformers.utils.import_utils import _is_package_available
import torch

all_available_modules = []
for finder, name, ispkg in pkgutil.iter_modules(sys.path):
all_available_modules.append(name)

optional_packages = [
"gptqmodel",
"gptqmodel_exllama_kernels",
Expand All @@ -37,7 +45,9 @@

available_packages = {}
for package in optional_packages:
available_packages[package] = _is_package_available(package)
available_packages[package] = (
_is_package_available(package) or package in all_available_modules
)

# cutlass is detected through torch.ops.cutlass_gemm
available_packages["cutlass"] = hasattr(torch.ops, "cutlass_gemm") and hasattr(
Expand Down
2 changes: 1 addition & 1 deletion tests/build/test_launch_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def _validate_quantization_output(base_dir, quant_method):
assert os.path.exists(os.path.join(base_dir, "tokenizer.json")) is True
assert os.path.exists(os.path.join(base_dir, "special_tokens_map.json")) is True
assert os.path.exists(os.path.join(base_dir, "tokenizer_config.json")) is True
assert os.path.exists(os.path.join(base_dir, "tokenizer.model")) is True
# assert os.path.exists(os.path.join(base_dir, "tokenizer.model")) is True

# Check quantized model files exist
if quant_method == "gptq":
Expand Down
Loading