Merge pull request #233 from harry0703/dev
add openai error logs and compatible with local whisper model
This commit is contained in:
@@ -5,6 +5,8 @@ from typing import List
|
|||||||
from loguru import logger
|
from loguru import logger
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
from openai import AzureOpenAI
|
from openai import AzureOpenAI
|
||||||
|
from openai.types.chat import ChatCompletion
|
||||||
|
|
||||||
from app.config import config
|
from app.config import config
|
||||||
|
|
||||||
|
|
||||||
@@ -133,7 +135,15 @@ def _generate_response(prompt: str) -> str:
|
|||||||
messages=[{"role": "user", "content": prompt}]
|
messages=[{"role": "user", "content": prompt}]
|
||||||
)
|
)
|
||||||
if response:
|
if response:
|
||||||
content = response.choices[0].message.content
|
if isinstance(response, ChatCompletion):
|
||||||
|
content = response.choices[0].message.content
|
||||||
|
else:
|
||||||
|
raise Exception(
|
||||||
|
f"[{llm_provider}] returned an invalid response: \"{response}\", please check your network "
|
||||||
|
f"connection and try again.")
|
||||||
|
else:
|
||||||
|
raise Exception(
|
||||||
|
f"[{llm_provider}] returned an empty response, please check your network connection and try again.")
|
||||||
|
|
||||||
return content.replace("\n", "")
|
return content.replace("\n", "")
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
import os.path
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from faster_whisper import WhisperModel
|
from faster_whisper import WhisperModel
|
||||||
@@ -17,8 +18,13 @@ model = None
|
|||||||
def create(audio_file, subtitle_file: str = ""):
|
def create(audio_file, subtitle_file: str = ""):
|
||||||
global model
|
global model
|
||||||
if not model:
|
if not model:
|
||||||
logger.info(f"loading model: {model_size}, device: {device}, compute_type: {compute_type}")
|
model_path = f"{utils.root_dir()}/models/whisper-{model_size}"
|
||||||
model = WhisperModel(model_size_or_path=model_size,
|
model_bin_file = f"{model_path}/model.bin"
|
||||||
|
if not os.path.isdir(model_path) or not os.path.isfile(model_bin_file):
|
||||||
|
model_path = model_size
|
||||||
|
|
||||||
|
logger.info(f"loading model: {model_path}, device: {device}, compute_type: {compute_type}")
|
||||||
|
model = WhisperModel(model_size_or_path=model_path,
|
||||||
device=device,
|
device=device,
|
||||||
compute_type=compute_type)
|
compute_type=compute_type)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user