style: convert /home/gpu/ to /

This commit is contained in:
0Xiao0
2024-10-28 17:38:40 +08:00
parent f4b971d2fd
commit 8fe010bbbe
12 changed files with 49 additions and 49 deletions

View File

@ -9,16 +9,16 @@ from langchain_community.embeddings.sentence_transformer import SentenceTransfor
import time
# chroma run --path chroma_db/ --port 8000 --host 0.0.0.0
# loader = TextLoader("/home/administrator/Workspace/chroma_data/粤语语料.txt",encoding="utf-8")
loader = TextLoader("/home/administrator/Workspace/jarvis-models/sample/RAG_boss.txt")
# loader = TextLoader("/Workspace/chroma_data/粤语语料.txt",encoding="utf-8")
loader = TextLoader("/Workspace/jarvis-models/sample/RAG_boss.txt")
documents = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=10, chunk_overlap=0, length_function=len, is_separator_regex=True,separators=['\n', '\n\n'])
docs = text_splitter.split_documents(documents)
print("len(docs)", len(docs))
ids = ["粤语语料"+str(i) for i in range(len(docs))]
embedding_model = SentenceTransformerEmbeddings(model_name='/home/administrator/Workspace/Models/BAAI/bge-m3', model_kwargs={"device": "cuda:1"})
client = chromadb.HttpClient(host='172.16.4.7', port=7000)
embedding_model = SentenceTransformerEmbeddings(model_name='/Workspace/Models/BAAI/bge-m3', model_kwargs={"device": "cuda:1"})
client = chromadb.HttpClient(host='192.168.0.200', port=7000)
id = "boss"
client.delete_collection(id)
@ -28,13 +28,13 @@ db = Chroma.from_documents(documents=docs, embedding=embedding_model, ids=ids, c
embedding_model = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="/home/administrator/Workspace/Models/BAAI/bge-m3", device = "cuda:1")
embedding_model = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="/Workspace/Models/BAAI/bge-m3", device = "cuda:1")
client = chromadb.HttpClient(host='172.16.4.7', port=7000)
client = chromadb.HttpClient(host='192.168.0.200', port=7000)
collection = client.get_collection(id, embedding_function=embedding_model)
reranker_model = CrossEncoder("/home/administrator/Workspace/Models/BAAI/bge-reranker-v2-m3", max_length=512, device = "cuda:1")
reranker_model = CrossEncoder("/Workspace/Models/BAAI/bge-reranker-v2-m3", max_length=512, device = "cuda:1")
while True:
usr_question = input("\n 请输入问题: ")