daihui.zhang
commited on
Commit
·
23f119b
1
Parent(s):
f38a13b
update whispercpp version
Browse files
README.md
CHANGED
@@ -20,7 +20,7 @@ license: mit
|
|
20 |
### WhisperCPP 安装
|
21 |
> 1. 克隆 WhisperCPP 仓库并初始化子模块:
|
22 |
```bash
|
23 |
-
git clone --recurse-submodules https://github.com/absadiki/pywhispercpp.git
|
24 |
```
|
25 |
> 2. 切换到特定的提交版本:
|
26 |
```bash
|
@@ -38,7 +38,7 @@ license: mit
|
|
38 |
```
|
39 |
> 2. 切换到特定的提交版本:
|
40 |
```bash
|
41 |
-
cd llama-cpp-python && git checkout
|
42 |
```
|
43 |
> 3. 使用以下命令安装 Llama-cpp-python,确保启用 Metal 支持:
|
44 |
```bash
|
|
|
20 |
### WhisperCPP 安装
|
21 |
> 1. 克隆 WhisperCPP 仓库并初始化子模块:
|
22 |
```bash
|
23 |
+
git clone --recurse-submodules https://github.com/absadiki/pywhispercpp.git && cd whisper.cpp && git checkout 170b2faf75c2f6173ef947e6ef346961f3368e1b && cd ../..
|
24 |
```
|
25 |
> 2. 切换到特定的提交版本:
|
26 |
```bash
|
|
|
38 |
```
|
39 |
> 2. 切换到特定的提交版本:
|
40 |
```bash
|
41 |
+
cd llama-cpp-python && git checkout 99f2ebfde18912adeb7f714b49c1ddb624df3087 && cd vendor/llama.cpp && git checkout 80f19b41869728eeb6a26569957b92a773a2b2c6 && cd ../..
|
42 |
```
|
43 |
> 3. 使用以下命令安装 Llama-cpp-python,确保启用 Metal 支持:
|
44 |
```bash
|
config.py
CHANGED
@@ -2,7 +2,7 @@ import pathlib
|
|
2 |
import re
|
3 |
import logging
|
4 |
|
5 |
-
DEBUG =
|
6 |
logging.basicConfig(
|
7 |
level=logging.DEBUG if DEBUG else logging.INFO,
|
8 |
format="%(asctime)s - %(levelname)s - %(message)s",
|
@@ -40,12 +40,13 @@ MAX_LENTH_ZH = 4
|
|
40 |
WHISPER_PROMPT_EN = ""# "The following is an English sentence."
|
41 |
MAX_LENGTH_EN= 1
|
42 |
|
43 |
-
|
44 |
-
WHISPER_MODEL = 'large-v3-turbo-q5_0'
|
45 |
|
46 |
# LLM
|
47 |
LLM_MODEL_PATH = (MODEL_DIR / "qwen2.5-1.5b-instruct-q5_0.gguf").as_posix()
|
48 |
-
LLM_LARGE_MODEL_PATH = (MODEL_DIR / "qwen2.5-
|
|
|
49 |
|
50 |
# VAD
|
51 |
VAD_MODEL_PATH = (MODEL_DIR / "silero-vad" / "silero_vad.onnx").as_posix()
|
|
|
2 |
import re
|
3 |
import logging
|
4 |
|
5 |
+
DEBUG = True
|
6 |
logging.basicConfig(
|
7 |
level=logging.DEBUG if DEBUG else logging.INFO,
|
8 |
format="%(asctime)s - %(levelname)s - %(message)s",
|
|
|
40 |
WHISPER_PROMPT_EN = ""# "The following is an English sentence."
|
41 |
MAX_LENGTH_EN= 1
|
42 |
|
43 |
+
WHISPER_MODEL = 'medium-q5_0'
|
44 |
+
# WHISPER_MODEL = 'large-v3-turbo-q5_0'
|
45 |
|
46 |
# LLM
|
47 |
LLM_MODEL_PATH = (MODEL_DIR / "qwen2.5-1.5b-instruct-q5_0.gguf").as_posix()
|
48 |
+
LLM_LARGE_MODEL_PATH = (MODEL_DIR / "qwen2.5-1.5b-instruct-q5_0.gguf").as_posix()
|
49 |
+
# LLM_LARGE_MODEL_PATH = (MODEL_DIR / "qwen2.5-7b-instruct-q5_0-00001-of-00002.gguf").as_posix()
|
50 |
|
51 |
# VAD
|
52 |
VAD_MODEL_PATH = (MODEL_DIR / "silero-vad" / "silero_vad.onnx").as_posix()
|