diff --git a/src/blackbox/chroma_query.py b/src/blackbox/chroma_query.py index c936bf3..449b317 100755 --- a/src/blackbox/chroma_query.py +++ b/src/blackbox/chroma_query.py @@ -22,11 +22,12 @@ class ChromaQuery(Blackbox): def __init__(self, *args, **kwargs) -> None: # config = read_yaml(args[0]) # load chromadb and embedding model - self.embedding_model_1 = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="/home/gpu/Workspace/Models/BAAI/bge-large-zh-v1.5", device = "cuda:0") - self.embedding_model_2 = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="/home/gpu/Workspace/Models/BAAI/bge-small-en-v1.5", device = "cuda:0") - self.client_1 = chromadb.HttpClient(host='10.6.81.119', port=7000) + self.embedding_model_1 = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="/home/administrator/Workspace/Models/BAAI/bge-large-zh-v1.5", device = "cuda:0") + self.embedding_model_2 = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="/home/administrator/Workspace/Models/BAAI/bge-small-en-v1.5", device = "cuda:0") + self.embedding_model_3 = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="/home/administrator/Workspace/Models/BAAI/bge-m3", device = "cuda:0") + self.client_1 = chromadb.HttpClient(host='172.16.4.7', port=7000) # self.client_2 = chromadb.HttpClient(host='10.6.82.192', port=8000) - self.reranker_model_1 = CrossEncoder("/home/gpu/Workspace/Models/BAAI/bge-reranker-v2-m3", max_length=512, device = "cuda") + self.reranker_model_1 = CrossEncoder("/home/administrator/Workspace/Models/BAAI/bge-reranker-v2-m3", max_length=512, device = "cuda") def __call__(self, *args, **kwargs): return self.processing(*args, **kwargs) @@ -56,10 +57,10 @@ class ChromaQuery(Blackbox): return JSONResponse(content={"error": "question is required"}, status_code=status.HTTP_400_BAD_REQUEST) if chroma_embedding_model is None or chroma_embedding_model.isspace() or chroma_embedding_model == "": - chroma_embedding_model = "/home/gpu/Workspace/Models/BAAI/bge-large-zh-v1.5" + chroma_embedding_model = "/home/administrator/Workspace/Models/BAAI/bge-large-zh-v1.5" if chroma_host is None or chroma_host.isspace() or chroma_host == "": - chroma_host = "10.6.81.119" + chroma_host = "172.16.4.7" if chroma_port is None or chroma_port.isspace() or chroma_port == "": chroma_port = "7000" @@ -71,7 +72,7 @@ class ChromaQuery(Blackbox): chroma_n_results = 10 # load client and embedding model from init - if re.search(r"10.6.81.119", chroma_host) and re.search(r"7000", chroma_port): + if re.search(r"172.16.4.7", chroma_host) and re.search(r"7000", chroma_port): client = self.client_1 else: try: @@ -79,10 +80,12 @@ class ChromaQuery(Blackbox): except: return JSONResponse(content={"error": "chroma client not found"}, status_code=status.HTTP_400_BAD_REQUEST) - if re.search(r"/home/gpu/Workspace/Models/BAAI/bge-large-zh-v1.5", chroma_embedding_model): + if re.search(r"/home/administrator/Workspace/Models/BAAI/bge-large-zh-v1.5", chroma_embedding_model): embedding_model = self.embedding_model_1 - elif re.search(r"/home/gpu/Workspace/Models/BAAI/bge-small-en-v1.5", chroma_embedding_model): + elif re.search(r"/home/administrator/Workspace/Models/BAAI/bge-small-en-v1.5", chroma_embedding_model): embedding_model = self.embedding_model_2 + elif re.search(r"/home/administrator/Workspace/Models/BAAI/bge-m3", chroma_embedding_model): + embedding_model = self.embedding_model_3 else: try: embedding_model = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=chroma_embedding_model, device = "cuda:0") @@ -120,8 +123,8 @@ class ChromaQuery(Blackbox): final_result = str(results["documents"]) if chroma_reranker_model: - if re.search(r"/home/gpu/Workspace/Models/BAAI/bge-reranker-v2-m3", chroma_embedding_model): - reranker_model = self.chroma_reranker_model_1 + if re.search(r"/home/administrator/Workspace/Models/BAAI/bge-reranker-v2-m3", chroma_reranker_model): + reranker_model = self.reranker_model_1 else: try: reranker_model = CrossEncoder(chroma_reranker_model, max_length=512, device = "cuda")