修改了awq的量化readme

This commit is contained in:
刘丹 2024-06-27 09:45:50 +08:00
parent 58ff788df6
commit c0ace35df1
2 changed files with 3 additions and 3 deletions

View File

@ -312,7 +312,7 @@ print(model.response("<用户>山东省最高的山是哪座山, 它比黄山高
custom_data=[{'question':'过敏性鼻炎有什么症状?','answer':'过敏性鼻炎可能鼻塞,流鼻涕,头痛等症状反复发作,严重时建议及时就医。'},
{'question':'1+1等于多少','answer':'等于2'}]
```
4. 根据选择的数据集修改quantize/awq_quantize.py 为以下三行代码其中一行:
4. 根据选择的数据集修改quantize/awq_quantize.py 中第三十八行为以下三行代码其中一行:
```python
#使用wikitext进行量化
model.quantize(tokenizer, quant_config=quant_config, calib_data=load_wikitext(quant_data_path=quant_data_path))

View File

@ -7,10 +7,10 @@ import os
model_path = '/root/ld/ld_model_pretrained/MiniCPM-1B-sft-bf16' # model_path or model_id
quant_path = '/root/ld/ld_project/pull_request/MiniCPM/quantize/awq_cpm_1b_4bit' # quant_save_path
quant_data_path='/root/ld/ld_project/pull_request/MiniCPM/quantize/quantize_data/wikitext'# 写入自带
quant_data_path='/root/ld/ld_project/pull_request/MiniCPM/quantize/quantize_data/wikitext'# 写入自带数据集地址
quant_config = { "zero_point": True, "q_group_size": 128, "w_bit": 4, "version": "GEMM" } # "w_bit":4 or 8
quant_samples=512 # how many samples to use for calibration
custom_data=[{'question':'你叫什么名字。','answer':'我是openmbmb开源的小钢炮minicpm。'},
custom_data=[{'question':'你叫什么名字。','answer':'我是openmbmb开源的小钢炮minicpm。'}, # 自定义数据集可用
{'question':'你有什么特色。','answer':'我很小,但是我很强。'}]
# Load model
model = AutoAWQForCausalLM.from_pretrained(model_path)