Merge pull request #10 from BoardWare-Genius/main

merge
This commit is contained in:
ACBBZ
2024-05-27 16:12:10 +08:00
committed by GitHub
5 changed files with 140 additions and 74 deletions

View File

@ -7,7 +7,7 @@ pip install torch==2.0.1 torchvision==0.15.2 torchaudio==2.0.2 --index-url https
```
## More Dependencies
| System | package | web | install command |
| --- | ---- | --- | --- |
| --- |-----------------------| --- | --- |
| python | filetype | https://pypi.org/project/filetype/ | pip install filetype |
| python | fastAPI | https://fastapi.tiangolo.com/ | pip install fastapi |
| python | python-multipart | https://pypi.org/project/python-multipart/ | pip install python-multipart |
@ -19,6 +19,7 @@ pip install torch==2.0.1 torchvision==0.15.2 torchaudio==2.0.2 --index-url https
| python | langchain | https://github.com/langchain-ai/langchain | pip install langchain |
| python | chromadb | https://docs.trychroma.com/getting-started | pip install chromadb |
| python | lagent | https://github.com/InternLM/lagent/blob/main/README.md | pip install lagent |
| python | sentence_transformers | https://github.com/InternLM/lagent/blob/main/README.md | pip install sentence_transformers |
## Start
@ -86,4 +87,6 @@ Model:
cudnn_conv_algo_search: EXHAUSTIVE
do_copy_in_default_stream: true
batch_size: 3
blackbox:
lazyloading: true
```

View File

@ -10,3 +10,4 @@ pip install injector
pip install landchain
pip install chromadb
pip install lagent
pip install sentence_transformers

View File

@ -1,57 +1,112 @@
from blackbox.emotion import Emotion
from .chat import Chat
from .audio_chat import AudioChat
from .sentiment import Sentiment
from .tts import TTS
from .asr import ASR
from .audio_to_text import AudioToText
from .blackbox import Blackbox
from .fastchat import Fastchat
from .g2e import G2E
from injector import inject, singleton, Injector
from ..configuration import BlackboxConf
blackboxConf = Injector().get(BlackboxConf)
def model_loader(lazy=blackboxConf.lazyloading):
def load(init_fun):
model = None
if not lazy:
model = init_fun()
def inner():
nonlocal model
if model is None:
model = init_fun()
return model
return inner
return load
@model_loader(lazy=blackboxConf.lazyloading)
def text_and_image_loader():
from .text_and_image import TextAndImage
from .melotts import MeloTTS
return Injector().get(TextAndImage)
@model_loader(lazy=blackboxConf.lazyloading)
def audio_chat_loader():
from .audio_chat import AudioChat
return Injector().get(AudioChat)
@model_loader(lazy=blackboxConf.lazyloading)
def sentiment_loader():
from .sentiment import Sentiment
return Injector().get(Sentiment)
@model_loader(lazy=blackboxConf.lazyloading)
def g2e_loader():
from .g2e import G2E
return Injector().get(G2E)
@model_loader(lazy=blackboxConf.lazyloading)
def asr_loader():
from .asr import ASR
return Injector().get(ASR)
@model_loader(lazy=blackboxConf.lazyloading)
def vlms_loader():
from .vlms import VLMS
return Injector().get(VLMS)
@model_loader(lazy=blackboxConf.lazyloading)
def melotts_loader():
from .melotts import MeloTTS
return Injector().get(MeloTTS)
@model_loader(lazy=blackboxConf.lazyloading)
def tts_loader():
from .tts import TTS
return Injector().get(TTS)
@model_loader(lazy=blackboxConf.lazyloading)
def emotion_loader():
from .emotion import Emotion
return Injector().get(Emotion)
@model_loader(lazy=blackboxConf.lazyloading)
def fastchat_loader():
from .fastchat import Fastchat
return Injector().get(Fastchat)
@model_loader(lazy=blackboxConf.lazyloading)
def chat_loader():
from .chat import Chat
return Injector().get(Chat)
@model_loader(lazy=blackboxConf.lazyloading)
def chroma_query_loader():
from .chroma_query import ChromaQuery
return Injector().get(ChromaQuery)
@model_loader(lazy=blackboxConf.lazyloading)
def chroma_upsert_loader():
from .chroma_upsert import ChromaUpsert
return Injector().get(ChromaUpsert)
@model_loader(lazy=blackboxConf.lazyloading)
def chroma_chat_load():
from .chroma_chat import ChromaChat
from injector import inject, singleton
return Injector().get(ChromaChat)
@singleton
class BlackboxFactory:
models = {}
@inject
def __init__(self,
audio_to_text: AudioToText,
asr: ASR,
tts: TTS,
sentiment_engine: Sentiment,
emotion: Emotion,
fastchat: Fastchat,
audio_chat: AudioChat,
g2e: G2E,
text_and_image: TextAndImage,
melotts: MeloTTS,
vlms: VLMS,
chroma_query: ChromaQuery,
chroma_upsert: ChromaUpsert,
chroma_chat: ChromaChat,
chat: Chat) -> None:
self.models["audio_to_text"] = audio_to_text
self.models["asr"] = asr
self.models["tts"] = tts
self.models["sentiment_engine"] = sentiment_engine
self.models["emotion"] = emotion
self.models["fastchat"] = fastchat
self.models["audio_chat"] = audio_chat
self.models["g2e"] = g2e
self.models["text_and_image"] = text_and_image
self.models["chroma_query"] = chroma_query
self.models["chroma_upsert"] = chroma_upsert
self.models["chroma_chat"] = chroma_chat
self.models["melotts"] = melotts
self.models["vlms"] = vlms
self.models["chat"] = chat
def __init__(self,) -> None:
self.models["asr"] = asr_loader
self.models["tts"] = tts_loader
self.models["sentiment_engine"] = sentiment_loader
self.models["emotion"] = emotion_loader
self.models["fastchat"] = fastchat_loader
self.models["audio_chat"] = audio_chat_loader
self.models["g2e"] = g2e_loader
self.models["text_and_image"] = text_and_image_loader
self.models["chroma_query"] = chroma_query_loader
self.models["chroma_upsert"] = chroma_upsert_loader
self.models["chroma_chat"] = chroma_chat_load
self.models["melotts"] = melotts_loader
self.models["vlms"] = vlms_loader
self.models["chat"] = chat_loader
def __call__(self, *args, **kwargs):
return self.processing(*args, **kwargs)
@ -60,4 +115,4 @@ class BlackboxFactory:
model = self.models.get(blackbox_name)
if model is None:
raise ValueError("Invalid Blackbox Type...")
return model
return model()

View File

@ -1,4 +1,3 @@
import logging
from typing import Any, Coroutine
from fastapi import Request, Response, status
@ -14,8 +13,6 @@ import re
from injector import singleton
logger = logging.getLogger
@singleton
class Chat(Blackbox):
@ -27,15 +24,14 @@ class Chat(Blackbox):
return isinstance(data, list)
# model_name有 Qwen1.5-14B-Chat , internlm2-chat-20b
@logging_time(logger=logger)
def processing(self, *args, **kwargs) -> str:
@logging_time()
def processing(self, prompt: str, context: list, settings: dict) -> str:
settings: dict = args[0]
if settings is None:
settings = {}
user_model_name = settings.get("model_name")
user_context = settings.get("context")
user_question = settings.get("question")
user_context = context
user_question = prompt
user_template = settings.get("template")
user_temperature = settings.get("temperature")
user_top_p = settings.get("top_p")
@ -44,7 +40,6 @@ class Chat(Blackbox):
user_stop = settings.get("stop")
user_frequency_penalty = settings.get("frequency_penalty")
user_presence_penalty = settings.get("presence_penalty")
if user_context == None:
user_context = []
@ -124,5 +119,7 @@ class Chat(Blackbox):
return JSONResponse(content={"error": "json parse error"}, status_code=status.HTTP_400_BAD_REQUEST)
setting: dict = data.get("settings")
context = data.get("context")
prompt = data.get("prompt")
return JSONResponse(content={"response": self.processing(setting)}, status_code=status.HTTP_200_OK)
return JSONResponse(content={"response": self.processing(prompt, context, setting)}, status_code=status.HTTP_200_OK)

View File

@ -1,4 +1,5 @@
from dataclasses import dataclass
from injector import inject,singleton
import yaml
import sys
@ -102,3 +103,12 @@ class EnvConf():
self.version = "0.0.1"
self.host = config.get("env.host", default="0.0.0.0")
self.port = config.get("env.port", default="8080")
@singleton
@dataclass
class BlackboxConf():
lazyloading: bool
@inject
def __init__(self, config: Configuration) -> None:
self.lazyloading = bool(config.get("blackbox.lazyloading", default=False))