Files
jarvis-models/src/blackbox/modelscope.py
2024-05-24 10:41:16 +08:00

145 lines
5.2 KiB
Python
Executable File
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from typing import Any, Coroutine
from fastapi import Request, Response, status
from fastapi.responses import JSONResponse
from .blackbox import Blackbox
import requests
import json
from modelscope_agent.agents import RolePlay
from modelscope_agent.tools.base import BaseTool
from modelscope_agent.tools import register_tool
class Modelscope(Blackbox):
def __call__(self, *args, **kwargs):
return self.processing(*args, **kwargs)
def valid(self, *args, **kwargs) -> bool:
data = args[0]
return isinstance(data, list)
# model_name有 Qwen1.5-14B-Chat , internlm2-chat-20b
def processing(self, model_name, prompt, template, context: list) -> str:
if context == None:
context = []
@register_tool('ChromaQuery')
class AliyunRenewInstanceTool(BaseTool):
description = '查询chroma数据库中的数据'
name = 'ChromaQuery'
parameters: list = [{
'name': 'id',
'description': '用户的chroma id',
'required': True,
'type': 'string'
}, {
'name': 'query',
'description': '用户需要在chroma中查询的问题',
'required': True,
'type': 'string'
}]
def call(self, params: str, **kwargs):
params = self._verify_args(params)
id = params['id']
query = params['query']
query_data = {
"chroma_query_data": {
"id": id,
"question": query
}
}
url = "http://10.6.80.75:7003"
response = requests.post(f"{url}/api/chroma_query", json=query_data)
result = response.json()['response']
return str({'result': f'Chroma ID为{id}的用户,查询结果为{response}'})
@register_tool('WebSearch')
class WebSearchTool(BaseTool):
description = '查询网络中的内容'
name = 'WebSearch'
parameters: list = [ {
'name': 'search_term',
'description': '用户需要在Web中查询的问题',
'required': True,
'type': 'string'
}]
def call(self, params: str, **kwargs):
params = self._verify_args(params)
search_term = params['search_term']
api_key='9e51be0aaecb5a56fe2faead6e2c702fde92e62a'
headers = {
'X-API-KEY': api_key,
'Content-Type': 'application/json',
}
params = {
'q': search_term
}
try:
response = requests.post(
f'https://google.serper.dev/search',
headers=headers,
params=params,
timeout=5)
except Exception as e:
return -1, str(e)
result = response.json()['answerBox']['snippet']
return str({'result': f'WebSearch查询结果为{search_term}{result}'})
# define LLM
api_base_url = "http://120.196.116.194:48892/v1"
api_key= "EMPTY"
LLM_MODEL = model_name
llm_config = {
'model': LLM_MODEL,
'model_server': 'openai',
'api_base':api_base_url,
'api_key': api_key
}
function_list = ['WebSearch', 'ChromaQuery']
bot = RolePlay(function_list=function_list,llm=llm_config, instruction=template)
response = bot.run(prompt, history=context, lang='zh')
text = ''
for chunk in response:
text += chunk
return text
async def fast_api_handler(self, request: Request) -> Response:
try:
data = await request.json()
except:
return JSONResponse(content={"error": "json parse error"}, status_code=status.HTTP_400_BAD_REQUEST)
user_model_name = data.get("model_name")
user_context = data.get("context")
user_prompt = data.get("prompt")
user_template = data.get("template")
if user_prompt is None:
return JSONResponse(content={"error": "question is required"}, status_code=status.HTTP_400_BAD_REQUEST)
if user_model_name is None or user_model_name.isspace():
user_model_name = "Qwen1.5-14B-Chat"
if user_template is None or user_template.isspace():
# user_template 是定义LLM的语气例如template = "使用小丑的语气说话。"user_template可以为空字串或者是用户自定义的语气或者是使用我们提供的语气
user_template = ""
else:
user_template = f"使用{user_template}的语气说话。"
return JSONResponse(content={"response": self.processing(user_model_name, user_prompt, user_template, user_context)}, status_code=status.HTTP_200_OK)