From cc44574ffbc04855febd368acf49a931422f2872 Mon Sep 17 00:00:00 2001 From: gdw6463 Date: Fri, 17 May 2024 17:26:46 +0800 Subject: [PATCH] Log configuration fixed --- README.md | 5 +++++ main.py | 2 +- src/blackbox/blackbox_factory.py | 3 +++ src/blackbox/g2e.py | 25 +++++++++++++++---------- 4 files changed, 24 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index c70b482..07fc7c3 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,11 @@ python main.py ## Configuration Create ".env.yaml" at the root of jarvis-models, and copy the following yaml configuration ```yaml +log: + level: debug + time_format: "%Y-%m-%d %H:%M:%S" + filename: "D:/Workspace/Logging/jarvis/jarvis-models.log" + melotts: url: http://{IP running docker melotts-api}:18080/convert/tts diff --git a/main.py b/main.py index 4cafad9..4f70de2 100644 --- a/main.py +++ b/main.py @@ -14,7 +14,7 @@ class Main(): def run(self): logger = logging.getLogger(__name__) logger.info("jarvis-models start", extra={"version": "0.0.1"}) - uvicorn.run("server:app", host="0.0.0.0", port=8001, log_level="info") + uvicorn.run("server:app", host="0.0.0.0", port=8000, log_level="info") if __name__ == "__main__": injector = Injector() diff --git a/src/blackbox/blackbox_factory.py b/src/blackbox/blackbox_factory.py index b633b52..edb0579 100644 --- a/src/blackbox/blackbox_factory.py +++ b/src/blackbox/blackbox_factory.py @@ -1,3 +1,4 @@ +from . import melotts from .audio_chat import AudioChat from .sentiment import Sentiment from .tts import TTS @@ -37,6 +38,7 @@ class BlackboxFactory: #chroma_query: ChromaQuery, #chroma_upsert: ChromaUpsert, #chroma_chat: ChromaChat, + melotts: MeloTTS, vlms: VLMS) -> None: self.models["audio_to_text"] = audio_to_text self.models["text_to_audio"] = text_to_audio @@ -52,6 +54,7 @@ class BlackboxFactory: #self.models["chroma_query"] = chroma_query #self.models["chroma_upsert"] = chroma_upsert #self.models["chroma_chat"] = chroma_chat + self.models["melotts"] = melotts self.models["vlms"] = vlms def __call__(self, *args, **kwargs): diff --git a/src/blackbox/g2e.py b/src/blackbox/g2e.py index 4a6640a..13ebd34 100755 --- a/src/blackbox/g2e.py +++ b/src/blackbox/g2e.py @@ -42,17 +42,17 @@ class G2E(Blackbox): KOMBUKIKI康普茶价格 内地常规版:25 RMB 澳门常规版:28-29 MOP''' - prompt1 = ''''你是琪琪,活泼的康普茶看板娘,同时你对澳门十分熟悉,是一个澳门旅游专家,请回答任何关于澳门旅游的问题,回答尽量简练明了。 - ''' - inject_prompt = '(用活泼的语气说话回答,回答严格限制50字以内)' + prompt1 = '''你是琪琪,活泼的康普茶看板娘,同时你对澳门十分熟悉,是一个澳门旅游专家,请回答任何关于澳门旅游的问题,回答尽量简练明了。''' + #inject_prompt = '(用活泼的语气说话回答,回答严格限制50字以内)' + inject_prompt = '(回答简练,不要输出重复内容,只讲中文)' - prompt_template = [ - {"role": "system", "content": background_prompt + prompt1}, - ] #prompt_template = [ - # {"role": "system", "content": ''}, + # {"role": "system", "content": background_prompt + prompt1}, #] + prompt_template = [ + {"role": "system", "content": prompt1}, + ] messages = prompt_template + context + [ @@ -61,6 +61,8 @@ class G2E(Blackbox): "content": prompt + inject_prompt } ] + print("**** History with current prompt input : ****") + print(messages) client = OpenAI( api_key='YOUR_API_KEY', base_url=url @@ -68,16 +70,19 @@ class G2E(Blackbox): #model_name = client.models.list().data[0].id model_name = client.models.list().data[1].id print(model_name) + response = client.chat.completions.create( model=model_name, messages=messages, temperature=0.8, - top_p=0.8, - # max_tokens = 50 + top_p=0.8 + #frequency_penalty=0.5, + #presence_penalty=0.8, + #stop=100 ) fastchat_content = response.choices[0].message.content - + print("*** Model response: " + fastchat_content + " ***") return fastchat_content async def fast_api_handler(self, request: Request) -> Response: