vlms update

This commit is contained in:
gdw6463
2024-05-14 11:40:50 +08:00
parent 68d5088552
commit 10cd635eb9

View File

@ -33,7 +33,7 @@ class VLMS(Blackbox):
else:
model_name = "infer-qwen-vl"
url = 'http://120.196.116.194:48894/' + model_name + '/'
url = 'http://10.6.80.59:8001/' + model_name + '/'
if is_base64(images):
images_data = images
@ -48,17 +48,14 @@ class VLMS(Blackbox):
return data.text
async def fast_api_handler(self, request: Request) -> Response:
# try:
# data = await request.json()
# except:
# return JSONResponse(content={"error": "json parse error"}, status_code=status.HTTP_400_BAD_REQUEST)
try:
data = await request.json()
except:
return JSONResponse(content={"error": "json parse error"}, status_code=status.HTTP_400_BAD_REQUEST)
# model_name = data.get("model_name")
# prompt = data.get("prompt")
# img_data = data.get("img_data")
model_name = request.get("model_name")
prompt = request.get("prompt")
img_data = request.get("img_data")
model_name = data.get("model_name")
prompt = data.get("prompt")
img_data = data.get("img_data")
if prompt is None:
return JSONResponse(content={'error': "Question is required"}, status_code=status.HTTP_400_BAD_REQUEST)