diff --git a/src/blackbox/blackbox_factory.py b/src/blackbox/blackbox_factory.py index 8944eaf..8df0d38 100644 --- a/src/blackbox/blackbox_factory.py +++ b/src/blackbox/blackbox_factory.py @@ -1,10 +1,8 @@ -from . import melotts from .audio_chat import AudioChat from .sentiment import Sentiment from .tts import TTS from .asr import ASR from .audio_to_text import AudioToText -#from .emotion import Emotion from .blackbox import Blackbox # from .text_to_audio import TextToAudio # from .tesou import Tesou @@ -25,12 +23,10 @@ class BlackboxFactory: @inject def __init__(self, audio_to_text: AudioToText, - text_to_audio: TextToAudio, asr: ASR, tts: TTS, sentiment_engine: Sentiment, #emotion: Emotion, - tesou: Tesou, fastchat: Fastchat, audio_chat: AudioChat, g2e: G2E, @@ -39,23 +35,22 @@ class BlackboxFactory: #chroma_upsert: ChromaUpsert, #chroma_chat: ChromaChat, melotts: MeloTTS, - vlms: VLMS) -> None: + vlms: VLMS, + chroma_query: ChromaQuery, + chroma_upsert: ChromaUpsert, + chroma_chat: ChromaChat) -> None: self.models["audio_to_text"] = audio_to_text - self.models["text_to_audio"] = text_to_audio self.models["asr"] = asr self.models["tts"] = tts self.models["sentiment_engine"] = sentiment_engine - self.models["tesou"] = tesou #self.models["emotion"] = emotion self.models["fastchat"] = fastchat self.models["audio_chat"] = audio_chat self.models["g2e"] = g2e self.models["text_and_image"] = text_and_image - #self.models["chroma_query"] = chroma_query - #self.models["chroma_upsert"] = chroma_upsert - #self.models["chroma_chat"] = chroma_chat - self.models["melotts"] = melotts - self.models["vlms"] = vlms + self.models["chroma_query"] = chroma_query + self.models["chroma_upsert"] = chroma_upsert + self.models["chroma_chat"] = chroma_chat def __call__(self, *args, **kwargs): return self.processing(*args, **kwargs) diff --git a/src/blackbox/chat.py b/src/blackbox/chat.py index 316fcc4..0d5448f 100644 --- a/src/blackbox/chat.py +++ b/src/blackbox/chat.py @@ -21,7 +21,7 @@ class Chat(Blackbox): return isinstance(data, list) # model_name有 Qwen1.5-14B-Chat , internlm2-chat-20b - def processing(self, model_name, prompt, template, context: list, temperature, top_p, n, max_tokens) -> str: + def processing(self, model_name, prompt, template, context: list, temperature, top_p, n, max_tokens,stop,frequency_penalty,presence_penalty) -> str: if context == None: context = [] @@ -49,7 +49,9 @@ class Chat(Blackbox): "top_p": top_p, "n": n, "max_tokens": max_tokens, - "stream": False, + "frequency_penalty": frequency_penalty, + "presence_penalty": presence_penalty, + "stop": stop } header = { @@ -75,7 +77,9 @@ class Chat(Blackbox): user_top_p = data.get("top_p") user_n = data.get("n") user_max_tokens = data.get("max_tokens") - + user_stop = data.get("stop") + user_frequency_penalty = data.get("frequency_penalty") + user_presence_penalty = data.get("presence_penalty") if user_question is None: return JSONResponse(content={"error": "question is required"}, status_code=status.HTTP_400_BAD_REQUEST) @@ -87,10 +91,10 @@ class Chat(Blackbox): user_template = "" if user_temperature is None or user_temperature == "": - user_temperature = 0.7 + user_temperature = 0.8 if user_top_p is None or user_top_p == "": - user_top_p = 1 + user_top_p = 0.8 if user_n is None or user_n == "": user_n = 1 @@ -98,6 +102,15 @@ class Chat(Blackbox): if user_max_tokens is None or user_max_tokens == "": user_max_tokens = 1024 + if user_stop is None or user_stop == "": + user_stop = 100 + + if user_frequency_penalty is None or user_frequency_penalty == "": + user_frequency_penalty = 0.5 + + if user_presence_penalty is None or user_presence_penalty == "": + user_presence_penalty = 0.8 + return JSONResponse(content={"response": self.processing(user_model_name, user_question, user_template, user_context, - user_temperature, user_top_p, user_n, user_max_tokens)}, status_code=status.HTTP_200_OK) \ No newline at end of file + user_temperature, user_top_p, user_n, user_max_tokens,user_stop,user_frequency_penalty,user_presence_penalty)}, status_code=status.HTTP_200_OK) \ No newline at end of file