From 782a17e4e6203f4ef24a188fdfeea6a33cd3bc45 Mon Sep 17 00:00:00 2001 From: chenxl Date: Fri, 9 Aug 2024 09:39:42 +0000 Subject: [PATCH] [feature] add bat for windows, update readme --- README.md | 10 ++++++++-- install.bat | 16 ++++++++++++++++ install.sh | 2 +- ktransformers/ktransformers_ext/CMakeLists.txt | 2 +- 4 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 install.bat diff --git a/README.md b/README.md index 1ab12d0..a80fe67 100644 --- a/README.md +++ b/README.md @@ -82,11 +82,12 @@ Some preparation:

Installation

1. Use a Docker image, see [documentation for Docker](./doc/en/docker.md) -2. You can install using Pypi: +2. You can install using Pypi (for linux): ``` pip install ktransformers --no-build-isolation ``` + for windows we prepare a pre compiled whl package in [ktransformers-0.1.1+cu125torch24avx2-cp311-cp311-win_amd64.whl](https://github.com/kvcache-ai/ktransformers/releases/download/v0.1.1/ktransformers-0.1.1+cu125torch24avx2-cp311-cp311-win_amd64.whl), which require cuda-12.5, torch-2.4, python-3.11, more pre compiled package are being produced. 3. Or you can download source code and compile: - init source code @@ -97,11 +98,16 @@ Some preparation: git submodule update ``` - [Optional] If you want to run with website, please [compile the website](./doc/en/api/server/website.md) before execute ```bash install.sh``` - - Compile and install + - Compile and install (for Linux) ``` bash install.sh ``` + - Compile and install(for Windows) + ``` + install.bat + ``` +

Local Chat

We provide a simple command-line local chat Python script that you can run for testing. diff --git a/install.bat b/install.bat new file mode 100644 index 0000000..dc429e4 --- /dev/null +++ b/install.bat @@ -0,0 +1,16 @@ +@echo off + +REM clear build dirs +rmdir /S /Q ktransformers\ktransformers_ext\build +rmdir /S /Q ktransformers\ktransformers_ext\cuda\build +rmdir /S /Q ktransformers\ktransformers_ext\cuda\dist +rmdir /S /Q ktransformers\ktransformers_ext\out +del /F /Q ktransformers\ktransformers_ext\cuda\*.egg-info + +echo Installing python dependencies from requirements.txt +pip install -r requirements-local_chat.txt + +echo Installing ktransformers +set KTRANSFORMERS_FORCE_BUILD=TRUE +pip install . --no-build-isolation +echo Installation completed successfully \ No newline at end of file diff --git a/install.sh b/install.sh index fa5ba18..ffb7aca 100644 --- a/install.sh +++ b/install.sh @@ -11,5 +11,5 @@ echo "Installing python dependencies from requirements.txt" pip install -r requirements-local_chat.txt echo "Installing ktransformers" -pip install . --no-build-isolation +KTRANSFORMERS_FORCE_BUILD=TRUE pip install . --no-build-isolation echo "Installation completed successfully" \ No newline at end of file diff --git a/ktransformers/ktransformers_ext/CMakeLists.txt b/ktransformers/ktransformers_ext/CMakeLists.txt index dd8c604..89647a8 100644 --- a/ktransformers/ktransformers_ext/CMakeLists.txt +++ b/ktransformers/ktransformers_ext/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.17) +cmake_minimum_required(VERSION 3.16) project(cpuinfer_ext VERSION 0.1.0) set(CMAKE_CXX_STANDARD 17)