Refactor code structure for improved readability and maintainability

This commit is contained in:
2026-05-08 17:40:11 +08:00
parent 602c2bd71b
commit 0acee1ec6c
20 changed files with 5410 additions and 79 deletions

View File

@ -0,0 +1,102 @@
import asyncio
import os
from uuid import uuid4
import pytest
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
import memory_gateway.api_v2 as api_v2
from memory_gateway.evermemos_client import EverMemOSClient
from memory_gateway.openviking_client import OpenVikingClient
from memory_gateway.repositories import InMemoryRepository
from memory_gateway.schemas_v2 import BackendRefStatus, BackendType, IngestRequest, IngestResponse, OperationStatus
from memory_gateway.server_auth import verify_api_key_compat
from memory_gateway.services_v2 import MemoryGatewayV2Service
pytestmark = pytest.mark.skipif(
os.environ.get("RUN_REAL_BACKEND_TESTS") != "1",
reason="real backend ingest test is opt-in; set RUN_REAL_BACKEND_TESTS=1",
)
def _env(name: str) -> str:
value = os.environ.get(name)
if not value:
pytest.skip(f"{name} is required for real backend ingest test")
return value
def test_real_openviking_and_evermemos_ingest_writes_memory_refs():
openviking_base_url = _env("OPENVIKING_BASE_URL")
evermemos_base_url = _env("EVERMEMOS_BASE_URL")
openviking_api_key = os.environ.get("OPENVIKING_API_KEY", "")
evermemos_api_key = os.environ.get("EVERMEMOS_API_KEY", "")
openviking_ingest_path = os.environ.get("OPENVIKING_INGEST_PATH")
evermemos_ingest_path = os.environ.get("EVERMEMOS_INGEST_PATH")
async def openviking_factory():
return OpenVikingClient(
mode="real",
base_url=openviking_base_url,
api_key=openviking_api_key,
ingest_path=openviking_ingest_path,
)
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=openviking_factory,
evermemos_client=EverMemOSClient(
mode="real",
base_url=evermemos_base_url,
api_key=evermemos_api_key,
ingest_path=evermemos_ingest_path,
),
)
run_id = uuid4().hex[:12]
response = asyncio.run(post_ingest(service, run_id))
refs = repo.list_memory_refs(session_id=f"real_ingest_sess_{run_id}", limit=10)
assert {ref.backend_type for ref in refs} == {BackendType.OPENVIKING, BackendType.EVERMEMOS}
assert all(ref.content_hash for ref in refs)
openviking_ref = next(ref for ref in refs if ref.backend_type == BackendType.OPENVIKING)
evermemos_ref = next(ref for ref in refs if ref.backend_type == BackendType.EVERMEMOS)
assert openviking_ref.status == BackendRefStatus.SUCCESS
if evermemos_ref.status == BackendRefStatus.SUCCESS:
assert response.status == OperationStatus.SUCCESS
assert evermemos_ref.native_id
assert evermemos_ref.native_uri
else:
assert evermemos_ref.status == BackendRefStatus.FAILED
assert response.status == OperationStatus.PARTIAL_SUCCESS
assert evermemos_ref.error_message
async def post_ingest(service: MemoryGatewayV2Service, run_id: str):
api_v2.v2_service = service
app = FastAPI()
app.dependency_overrides[verify_api_key_compat] = lambda: None
app.include_router(api_v2.router)
request = IngestRequest(
workspace_id=os.environ.get("REAL_BACKEND_WORKSPACE_ID", "ws_real_ingest"),
user_id=os.environ.get("REAL_BACKEND_USER_ID", "user_real_ingest"),
agent_id=os.environ.get("REAL_BACKEND_AGENT_ID", "agent_real_ingest"),
session_id=f"real_ingest_sess_{run_id}",
turn_id=f"real_ingest_turn_{run_id}",
request_id=f"real_ingest_req_{run_id}",
idempotency_key=f"real_ingest_idem_{run_id}",
namespace=os.environ.get("REAL_BACKEND_NAMESPACE", "workspace/ws_real_ingest/user/user_real_ingest"),
source_type="integration_test",
source_event_id=f"real_ingest_evt_{run_id}",
role="user",
content=f"Memory Gateway real ingest smoke test {run_id}",
metadata={"source_channel": "integration_test"},
)
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
response = await client.post("/v2/conversations/ingest", json=request.model_dump(mode="json"))
response.raise_for_status()
return IngestResponse.model_validate(response.json())

View File

@ -1,8 +1,10 @@
import asyncio
import sys
import types
import pytest
from fastapi import HTTPException
from fastapi.responses import StreamingResponse
from fastapi.testclient import TestClient
def install_test_stubs() -> None:
@ -59,8 +61,8 @@ def install_test_stubs() -> None:
install_test_stubs()
from memory_gateway.server import app
from memory_gateway.types import Config, ObsidianConfig, SearchResult, ServerConfig
import memory_gateway.server as server
from memory_gateway.types import CommitSummaryRequest, Config, ObsidianConfig, SearchRequest, SearchResult, ServerConfig
class FakeOVClient:
@ -117,9 +119,13 @@ async def fake_summarize_with_llm(content, **kwargs):
}
def build_headers(api_key: str | None):
return {"x-api-key": api_key} if api_key is not None else {}
class FakeUploadFile:
def __init__(self, filename: str, content: bytes) -> None:
self.filename = filename
self._content = content
async def read(self) -> bytes:
return self._content
def test_health_requires_api_key(monkeypatch):
monkeypatch.setattr(
@ -131,14 +137,15 @@ def test_health_requires_api_key(monkeypatch):
fake_get_openviking_client,
)
monkeypatch.setattr("memory_gateway.server.summarize_with_llm", fake_summarize_with_llm)
monkeypatch.setattr("memory_gateway.server.v1_service.evermemos_health", lambda: {"status": "disabled"})
with TestClient(app) as client:
response = client.get("/health")
assert response.status_code == 401
with pytest.raises(HTTPException) as exc_info:
server.verify_api_key()
assert exc_info.value.status_code == 401
response = client.get("/health", headers=build_headers("secret"))
assert response.status_code == 200
assert response.json()["openviking"]["status"] == "ok"
server.verify_api_key("secret")
payload = asyncio.run(server.health_check())
assert payload["openviking"]["status"] == "ok"
def test_mcp_rpc_lists_tools_with_api_key(monkeypatch):
@ -151,18 +158,11 @@ def test_mcp_rpc_lists_tools_with_api_key(monkeypatch):
fake_get_openviking_client,
)
with TestClient(app) as client:
response = client.post(
"/mcp/rpc",
json={"jsonrpc": "2.0", "id": 1, "method": "tools/list", "params": {}},
headers=build_headers("secret"),
)
assert response.status_code == 200
payload = response.json()
assert payload["jsonrpc"] == "2.0"
assert len(payload["result"]["tools"]) >= 7
assert any(tool["name"] == "commit_summary" for tool in payload["result"]["tools"])
assert any(tool["name"] == "memory_search" for tool in payload["result"]["tools"])
server.verify_api_key("secret")
tools = asyncio.run(server.list_tools())
assert len(tools) >= 7
assert any(tool.name == "commit_summary" for tool in tools)
assert any(tool.name == "memory_search" for tool in tools)
def test_search_passes_through_gateway(monkeypatch):
@ -175,12 +175,9 @@ def test_search_passes_through_gateway(monkeypatch):
fake_get_openviking_client,
)
with TestClient(app) as client:
response = client.post("/api/search", json={"query": "phishing"})
assert response.status_code == 200
payload = response.json()
assert payload["total"] == 1
assert payload["results"][0]["abstract"] == "phishing"
payload = asyncio.run(server.api_search(SearchRequest(query="phishing")))
assert payload["total"] == 1
assert payload["results"][0]["abstract"] == "phishing"
def test_summary_endpoint_builds_generic_artifact(monkeypatch):
@ -194,28 +191,26 @@ def test_summary_endpoint_builds_generic_artifact(monkeypatch):
)
monkeypatch.setattr("memory_gateway.server.summarize_with_llm", fake_summarize_with_llm)
with TestClient(app) as client:
response = client.post(
"/api/summary",
json={
"title": "Demo investigation summary",
"content": "结论:这是一次高价值沉淀。\n- 证据:命中历史 case。\n- 建议:后续复用该处置路径。",
"namespace": "demo",
"memory_type": "knowledge",
"tags": ["demo", "summary"],
"persist_as": "none",
},
payload = asyncio.run(
server.api_commit_summary(
CommitSummaryRequest(
title="Demo investigation summary",
content="结论:这是一次高价值沉淀。\n- 证据:命中历史 case。\n- 建议:后续复用该处置路径。",
namespace="demo",
memory_type="knowledge",
tags=["demo", "summary"],
persist_as="none",
)
)
assert response.status_code == 200
payload = response.json()
assert payload["status"] == "ok"
assert payload["artifact"]["title"] == "Demo investigation summary"
assert payload["artifact"]["namespace"] == "demo"
assert payload["artifact"]["memory_type"] == "knowledge"
assert payload["artifact"]["summary"].startswith("LLM summary:")
assert payload["artifact"]["llm"]["provider"] == "fake"
assert payload["memory_result"] is None
assert payload["resource_result"] is None
)
assert payload["status"] == "ok"
assert payload["artifact"]["title"] == "Demo investigation summary"
assert payload["artifact"]["namespace"] == "demo"
assert payload["artifact"]["memory_type"] == "knowledge"
assert payload["artifact"]["summary"].startswith("LLM summary:")
assert payload["artifact"]["llm"]["provider"] == "fake"
assert payload["memory_result"] is None
assert payload["resource_result"] is None
def test_knowledge_upload_converts_saves_and_commits(monkeypatch, tmp_path):
@ -230,21 +225,27 @@ def test_knowledge_upload_converts_saves_and_commits(monkeypatch, tmp_path):
monkeypatch.setattr("memory_gateway.server.summarize_with_llm", fake_summarize_with_llm)
monkeypatch.setattr("memory_gateway.server.convert_file_to_markdown", lambda path: "# Uploaded Doc\n\nImportant uploaded knowledge.")
with TestClient(app) as client:
response = client.post(
"/api/knowledge/upload",
data={
"title": "Uploaded Knowledge",
"namespace": "demo",
"knowledge_type": "playbook",
"tags": "demo,upload",
"persist_as": "resource",
},
files={"file": ("sample.txt", b"hello", "text/plain")},
)
async def fake_to_thread(func, *args, **kwargs):
return func(*args, **kwargs)
monkeypatch.setattr("memory_gateway.server.asyncio.to_thread", fake_to_thread)
upload = FakeUploadFile(filename="sample.txt", content=b"hello")
payload = asyncio.run(
server.api_upload_knowledge(
file=upload,
title="Uploaded Knowledge",
namespace="demo",
knowledge_type="playbook",
tags="demo,upload",
source=None,
obsidian_dir=None,
resource_uri=None,
persist_as="resource",
max_summary_chars=1000,
)
)
assert response.status_code == 200
payload = response.json()
assert payload["status"] == "ok"
assert payload["artifact"]["schema_version"] == "memory-gateway.knowledge_upload.v1"
assert payload["artifact"]["knowledge_type"] == "playbook"

1925
tests/test_v2_api.py Normal file

File diff suppressed because it is too large Load Diff