Files
jarvis-models/src/blackbox/emotion.py
2024-05-13 17:26:09 +08:00

61 lines
2.0 KiB
Python

from typing import Any, Coroutine
from fastapi import Request, Response, status
from fastapi.responses import JSONResponse
from .blackbox import Blackbox
from lagent.llms.lmdepoly_wrapper import LMDeployClient
from lagent.llms.meta_template import INTERNLM2_META as META
from injector import singleton
@singleton
class Emotion(Blackbox):
def __init__(self, model_name:str = "Mistral-7B-Instruct-v0.2", model_url:str = "http://120.196.116.194:48892") -> None:
self.model = LMDeployClient(
model_name=model_name,
url=model_url,
meta_template=META,
top_p=0.8,
top_k=100,
temperature=0,
repetition_penalty=1.0,
stop_words=['<|im_end|>'])
def __call__(self, *args, **kwargs):
return self.processing(*args, **kwargs)
def valid(self, *args, **kwargs) -> bool:
data = args[0]
return isinstance(data, str)
def processing(self, text: str) -> str:
response = ""
for text in self.model.stream_chat(text):
print("------------")
print(text[1])
print("------------")
# print(type(text))
response = text[1]
# print("************0")
# print(response)
return response
async def fast_api_handler(self, request) -> Response:
try:
data = await request.json()
except:
return JSONResponse(content={"error": "json parse error"}, status_code=status.HTTP_400_BAD_REQUEST)
text = data.get("text")
if text is None:
return JSONResponse(content={"error": "text is required"}, status_code=status.HTTP_400_BAD_REQUEST)
text = "Please use one word to infer the emotion of the following passage:\n" + text + "\nJust print out that signle word pls."
text = [{'role': 'user', 'content': text}]
sentiment = self.processing(text)
return JSONResponse(content={"sentiment": sentiment}, status_code=status.HTTP_200_OK)