From c0ace35df137fd9fb6e51be5e30ff03ffa94fb64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=88=98=E4=B8=B9?= Date: Thu, 27 Jun 2024 09:45:50 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86awq=E7=9A=84?= =?UTF-8?q?=E9=87=8F=E5=8C=96readme?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 +- quantize/awq_quantize.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 0c4a69d..9ae2a4c 100644 --- a/README.md +++ b/README.md @@ -312,7 +312,7 @@ print(model.response("<用户>山东省最高的山是哪座山, 它比黄山高 custom_data=[{'question':'过敏性鼻炎有什么症状?','answer':'过敏性鼻炎可能鼻塞,流鼻涕,头痛等症状反复发作,严重时建议及时就医。'}, {'question':'1+1等于多少?','answer':'等于2'}] ``` -4. 根据选择的数据集,修改quantize/awq_quantize.py 为以下三行代码其中一行: +4. 根据选择的数据集,修改quantize/awq_quantize.py 中第三十八行为以下三行代码其中一行: ```python #使用wikitext进行量化 model.quantize(tokenizer, quant_config=quant_config, calib_data=load_wikitext(quant_data_path=quant_data_path)) diff --git a/quantize/awq_quantize.py b/quantize/awq_quantize.py index 0a1543b..b40b14e 100644 --- a/quantize/awq_quantize.py +++ b/quantize/awq_quantize.py @@ -7,10 +7,10 @@ import os model_path = '/root/ld/ld_model_pretrained/MiniCPM-1B-sft-bf16' # model_path or model_id quant_path = '/root/ld/ld_project/pull_request/MiniCPM/quantize/awq_cpm_1b_4bit' # quant_save_path -quant_data_path='/root/ld/ld_project/pull_request/MiniCPM/quantize/quantize_data/wikitext'# 写入自带 +quant_data_path='/root/ld/ld_project/pull_request/MiniCPM/quantize/quantize_data/wikitext'# 写入自带数据集地址 quant_config = { "zero_point": True, "q_group_size": 128, "w_bit": 4, "version": "GEMM" } # "w_bit":4 or 8 quant_samples=512 # how many samples to use for calibration -custom_data=[{'question':'你叫什么名字。','answer':'我是openmbmb开源的小钢炮minicpm。'}, +custom_data=[{'question':'你叫什么名字。','answer':'我是openmbmb开源的小钢炮minicpm。'}, # 自定义数据集可用 {'question':'你有什么特色。','answer':'我很小,但是我很强。'}] # Load model model = AutoAWQForCausalLM.from_pretrained(model_path)