Spaces:
Sleeping
Sleeping
Fedir Zadniprovskyi
commited on
Commit
•
20b7748
1
Parent(s):
4581383
chore: update default whisper model
Browse files- Dockerfile.cpu +1 -2
- Dockerfile.cuda +1 -1
- src/faster_whisper_server/config.py +2 -2
Dockerfile.cpu
CHANGED
@@ -17,9 +17,8 @@ RUN --mount=type=cache,target=/root/.cache/uv \
|
|
17 |
COPY ./src ./pyproject.toml ./uv.lock ./
|
18 |
RUN --mount=type=cache,target=/root/.cache/uv \
|
19 |
uv sync --frozen
|
20 |
-
ENV WHISPER__MODEL=Systran/faster-whisper-
|
21 |
ENV WHISPER__INFERENCE_DEVICE=cpu
|
22 |
-
ENV WHISPER__COMPUTE_TYPE=int8
|
23 |
ENV UVICORN_HOST=0.0.0.0
|
24 |
ENV UVICORN_PORT=8000
|
25 |
CMD ["uv", "run", "uvicorn", "faster_whisper_server.main:app"]
|
|
|
17 |
COPY ./src ./pyproject.toml ./uv.lock ./
|
18 |
RUN --mount=type=cache,target=/root/.cache/uv \
|
19 |
uv sync --frozen
|
20 |
+
ENV WHISPER__MODEL=Systran/faster-whisper-small
|
21 |
ENV WHISPER__INFERENCE_DEVICE=cpu
|
|
|
22 |
ENV UVICORN_HOST=0.0.0.0
|
23 |
ENV UVICORN_PORT=8000
|
24 |
CMD ["uv", "run", "uvicorn", "faster_whisper_server.main:app"]
|
Dockerfile.cuda
CHANGED
@@ -17,7 +17,7 @@ RUN --mount=type=cache,target=/root/.cache/uv \
|
|
17 |
COPY ./src ./pyproject.toml ./uv.lock ./
|
18 |
RUN --mount=type=cache,target=/root/.cache/uv \
|
19 |
uv sync --frozen
|
20 |
-
ENV WHISPER__MODEL=Systran/faster-
|
21 |
ENV WHISPER__INFERENCE_DEVICE=auto
|
22 |
ENV UVICORN_HOST=0.0.0.0
|
23 |
ENV UVICORN_PORT=8000
|
|
|
17 |
COPY ./src ./pyproject.toml ./uv.lock ./
|
18 |
RUN --mount=type=cache,target=/root/.cache/uv \
|
19 |
uv sync --frozen
|
20 |
+
ENV WHISPER__MODEL=Systran/faster-whisper-large-v3
|
21 |
ENV WHISPER__INFERENCE_DEVICE=auto
|
22 |
ENV UVICORN_HOST=0.0.0.0
|
23 |
ENV UVICORN_PORT=8000
|
src/faster_whisper_server/config.py
CHANGED
@@ -150,7 +150,7 @@ class Task(enum.StrEnum):
|
|
150 |
class WhisperConfig(BaseModel):
|
151 |
"""See https://github.com/SYSTRAN/faster-whisper/blob/master/faster_whisper/transcribe.py#L599."""
|
152 |
|
153 |
-
model: str = Field(default="Systran/faster-whisper-
|
154 |
"""
|
155 |
Default Huggingface model to use for transcription. Note, the model must support being ran using CTranslate2.
|
156 |
This model will be used if no model is specified in the request.
|
@@ -205,7 +205,7 @@ class Config(BaseSettings):
|
|
205 |
preload_models: list[str] = Field(
|
206 |
default_factory=list,
|
207 |
examples=[
|
208 |
-
["Systran/faster-whisper-
|
209 |
["Systran/faster-whisper-medium.en", "Systran/faster-whisper-small.en"],
|
210 |
],
|
211 |
)
|
|
|
150 |
class WhisperConfig(BaseModel):
|
151 |
"""See https://github.com/SYSTRAN/faster-whisper/blob/master/faster_whisper/transcribe.py#L599."""
|
152 |
|
153 |
+
model: str = Field(default="Systran/faster-whisper-small")
|
154 |
"""
|
155 |
Default Huggingface model to use for transcription. Note, the model must support being ran using CTranslate2.
|
156 |
This model will be used if no model is specified in the request.
|
|
|
205 |
preload_models: list[str] = Field(
|
206 |
default_factory=list,
|
207 |
examples=[
|
208 |
+
["Systran/faster-whisper-small"],
|
209 |
["Systran/faster-whisper-medium.en", "Systran/faster-whisper-small.en"],
|
210 |
],
|
211 |
)
|