Skip to content

Commit 2d11f50

Browse files
committed
Document-wise improvements
- Add usage example in module docstring. - Modified what to show on document page. Signed-off-by: Muti Chung <mtchung037@gmail.com>
1 parent 23a9d73 commit 2d11f50

File tree

1 file changed

+38
-9
lines changed

1 file changed

+38
-9
lines changed

src/llmcompressor/modifiers/awq/convert_autoawq.py

Lines changed: 38 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,33 @@
1+
"""
2+
Convert AutoAWQ models to llmcompressor-compatible models.
3+
4+
This module offers the functionality to convert models quantized with AutoAWQ into
5+
compressed models in llmcompressor's format, which can then be served with vLLM.
6+
This module can be used as a CLI tool or as a Python API.
7+
8+
## CLI Usage
9+
10+
```sh
11+
python -m llmcompressor.modifiers.awq.convert_autoawq \
12+
--model-name-or-path /path/to/model \
13+
--output-dir /path/to/compressed/model \
14+
--quantization-format naive-quantized
15+
```
16+
17+
For more information, run `python -m llmcompressor.modifiers.awq.convert_autoawq --help`
18+
or refer to the `ConversionArgs` dataclass below.
19+
20+
## Python API Usage
21+
22+
```python
23+
from llmcompressor.modifiers.awq.convert_autoawq import load_and_convert_from_autoawq
24+
25+
awq_model_path = "/path/to/model" # can also be model_id on huggingface hub
26+
model = load_and_convert_from_autoawq(awq_model_path)
27+
model.generate(...) # the converted model is now ready to be used.
28+
```
29+
"""
30+
131
import glob
232
import os
333
from dataclasses import dataclass, field
@@ -24,7 +54,9 @@
2454

2555

2656
def is_autoawq_model(model_path: Path, trust_remote_code: bool = False) -> bool:
27-
config = transformers.AutoConfig.from_pretrained(model_path, trust_remote_code=trust_remote_code)
57+
config = transformers.AutoConfig.from_pretrained(
58+
model_path, trust_remote_code=trust_remote_code
59+
)
2860
if not hasattr(config, "quantization_config"):
2961
return False
3062

@@ -33,15 +65,12 @@ def is_autoawq_model(model_path: Path, trust_remote_code: bool = False) -> bool:
3365

3466

3567
def resolve_model_path(model_name_or_path: str) -> Path:
36-
"""Locate the model path.
37-
38-
If the input is a repository ID, download the model from the Hugging Face Hub and
39-
return the path to the local directory.
40-
"""
4168
if os.path.isdir(model_name_or_path):
4269
return Path(model_name_or_path)
43-
44-
return Path(snapshot_download(model_name_or_path))
70+
else:
71+
# If the input is a model ID, download the model from the Hugging Face Hub and
72+
# return the path to the local directory.
73+
return Path(snapshot_download(model_name_or_path))
4574

4675

4776
def load_state_dict_from_model_dir(model_path: Path) -> dict[str, torch.Tensor]:
@@ -273,7 +302,7 @@ class ConversionArgs:
273302
)
274303

275304

276-
__all__ = ["convert_and_save", "load_and_convert_from_autoawq"]
305+
__all__ = ["convert_and_save", "load_and_convert_from_autoawq", "ConversionArgs"]
277306

278307

279308
if __name__ == "__main__":

0 commit comments

Comments
 (0)